code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
# -*- coding: utf-8 -*-
# 导入模块
import sys
from PyQt5.QtWidgets import QMainWindow , QApplication
from PyQt5.QtCore import Qt
### 自定义窗口类
class MyWindow( QMainWindow):
'''自定义窗口类'''
### 构造函数
def __init__(self,parent=None):
'''构造函数'''
# 调用父类构造函数
super(MyWindow,self).__init__(parent)
# 设置窗口标记(无边框 )
self.setWindowFlags( Qt.FramelessWindowHint)
# 便于显示,设置窗口背景颜色(采用QSS)
self.setStyleSheet('''background-color:blue; ''')
###覆盖函数
def showMaximized(self):
'''最大化'''
# 得到桌面控件
desktop = QApplication.desktop()
# 得到屏幕可显示尺寸
rect = desktop.availableGeometry()
# 设置窗口尺寸
self.setGeometry(rect)
# 设置窗口显示
self.show()
### 主函数
if __name__ == "__main__":
'''主函数'''
# 声明变量
app = QApplication(sys.argv)
# 创建窗口
window = MyWindow()
# 调用最大化显示
window.showMaximized()
# 应用程序事件循环
sys.exit(app.exec_())
|
[
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QApplication.desktop"
] |
[((838, 860), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (850, 860), False, 'from PyQt5.QtWidgets import QMainWindow, QApplication\n'), ((589, 611), 'PyQt5.QtWidgets.QApplication.desktop', 'QApplication.desktop', ([], {}), '()\n', (609, 611), False, 'from PyQt5.QtWidgets import QMainWindow, QApplication\n')]
|
# -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2021.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Examples for GlobalCatalogV1
"""
import os
import io
import uuid
import pytest
from ibm_cloud_sdk_core import ApiException, read_external_sources
from ibm_platform_services.global_catalog_v1 import *
#
# This file provides an example of how to use the Global Catalog service.
#
# The following configuration properties are assumed to be defined:
#
# GLOBAL_CATALOG_URL=<service url>
# GLOBAL_CATALOG_AUTH_TYPE=iam
# GLOBAL_CATALOG_APIKEY=<IAM apikey>
# GLOBAL_CATALOG_AUTH_URL=<IAM token service URL - omit this if using the production environment>
#
# These configuration properties can be exported as environment variables, or stored
# in a configuration file and then:
# export IBM_CREDENTIALS_FILE=<name of configuration file>
#
config_file = 'global_catalog.env'
global_catalog_service = None
catalog_entry_id = None
##############################################################################
# Start of Examples for Service: GlobalCatalogV1
##############################################################################
# region
class TestGlobalCatalogV1Examples():
"""
Example Test Class for GlobalCatalogV1
"""
@classmethod
def setup_class(cls):
global global_catalog_service
if os.path.exists(config_file):
os.environ['IBM_CREDENTIALS_FILE'] = config_file
# begin-common
global_catalog_service = GlobalCatalogV1.new_instance()
# end-common
assert global_catalog_service is not None
print('Setup complete.')
needscredentials = pytest.mark.skipif(
not os.path.exists(config_file),
reason="External configuration not available, skipping...")
@needscredentials
def test_create_catalog_entry_example(self):
"""
create_catalog_entry request example
"""
global catalog_entry_id
try:
# begin-create_catalog_entry
overview_model_EN = {
'display_name': 'Example Web Starter',
'description': 'Use the Example service in your applications',
'long_description': 'This is a starter that helps you use the Example service within your applications.',
}
image_model = {
'image': 'https://somehost.com/examplewebstarter/cachedIcon/large/0',
'small_image': 'https://somehost.com/examplewebstarter/cachedIcon/small/0',
'medium_image': 'https://somehost.com/examplewebstarter/cachedIcon/medium/0',
'feature_image': 'https://somehost.com/examplewebstarter/cachedIcon/large/0',
}
provider_model = {
'email': '<EMAIL>',
'name': 'Example Starter Co., Inc.',
'contact': 'Example Starter Developer Relations',
'support_email': '<EMAIL>',
'phone': '800-555-1234',
}
metadata_model = {
'version': '1.0.0',
}
catalog_entry_id = str(uuid.uuid4())
catalog_entry = global_catalog_service.create_catalog_entry(
name='exampleWebStarter123',
kind=CatalogEntry.KindEnum.TEMPLATE,
overview_ui={
'en': overview_model_EN
},
images=image_model,
disabled=False,
tags=['example-tag-1', 'example-tag-2'],
provider=provider_model,
id=catalog_entry_id,
active=True,
metadata=metadata_model,
).get_result()
print('\ncreate_catalog_entry() result:\n' + json.dumps(catalog_entry, indent=2))
# end-create_catalog_entry
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_catalog_entry_example(self):
"""
get_catalog_entry request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-get_catalog_entry
catalog_entry = global_catalog_service.get_catalog_entry(
id=catalog_entry_id,
complete=True,
).get_result()
print('\nget_catalog_entry() result:\n' + json.dumps(catalog_entry, indent=2))
# end-get_catalog_entry
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_update_catalog_entry_example(self):
"""
update_catalog_entry request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-update_catalog_entry
overview_model_EN = {
'display_name': 'Example Web Starter V2',
'description': 'Use the Example V2 service in your applications',
'long_description': 'This is a starter that helps you use the Example V2 service within your applications.',
}
image_model = {
'image': 'https://somehost.com/examplewebstarter/cachedIcon/large/0',
'small_image': 'https://somehost.com/examplewebstarter/cachedIcon/small/0',
'medium_image': 'https://somehost.com/examplewebstarter/cachedIcon/medium/0',
'feature_image': 'https://somehost.com/examplewebstarter/cachedIcon/large/0',
}
provider_model = {
'email': '<EMAIL>',
'name': 'Example Starter Co., Inc.',
'contact': 'Example Starter Developer Relations',
'support_email': '<EMAIL>',
'phone': '800-555-1234',
}
metadata_model = {
'version': '2.0.0',
}
catalog_entry = global_catalog_service.update_catalog_entry(
id=catalog_entry_id,
name='exampleWebStarter123',
kind=CatalogEntry.KindEnum.TEMPLATE,
overview_ui={
'en': overview_model_EN,
},
images=image_model,
disabled=False,
tags=['example-tag-1', 'example-tag-2', 'new-example-tag-3'],
provider=provider_model,
active=True,
metadata=metadata_model,
).get_result()
print('\nupdate_catalog_entry() result:\n' + json.dumps(catalog_entry, indent=2))
# end-update_catalog_entry
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_list_catalog_entries_example(self):
"""
list_catalog_entries request example
"""
try:
# begin-list_catalog_entries
entry_search_result = global_catalog_service.list_catalog_entries(
offset=0,
limit=10,
q='kind:template tag:example-tag-1',
complete=True,
).get_result()
print('\nlist_catalog_entries() result:\n' + json.dumps(entry_search_result, indent=2))
# end-list_catalog_entries
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_child_objects_example(self):
"""
get_child_objects request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-get_child_objects
entry_search_result = global_catalog_service.get_child_objects(
id=catalog_entry_id,
kind='*',
offset=0,
limit=10,
complete=True,
).get_result()
print('\nget_child_objects() result:\n' + json.dumps(entry_search_result, indent=2))
# end-get_child_objects
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_restore_catalog_entry_example(self):
"""
restore_catalog_entry request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-restore_catalog_entry
response = global_catalog_service.restore_catalog_entry(
id=catalog_entry_id,
).get_result()
print('\nrestore_catalog_entry() result:\n' + json.dumps(response, indent=2))
# end-restore_catalog_entry
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_visibility_example(self):
"""
get_visibility request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-get_visibility
visibility = global_catalog_service.get_visibility(
id=catalog_entry_id,
).get_result()
print('\nget_visibility() result:\n' + json.dumps(visibility, indent=2))
# end-get_visibility
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_update_visibility_example(self):
"""
update_visibility request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-update_visibility
response = global_catalog_service.update_visibility(
id=catalog_entry_id,
extendable=False,
).get_result()
print('\nupdate_visibility() result:\n' + json.dumps(response, indent=2))
# end-update_visibility
except ApiException as e:
print(
'update_visibility() returned the following error: {0}'.format(str(e.message)))
@needscredentials
def test_get_pricing_example(self):
"""
get_pricing request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-get_pricing
pricing_get = global_catalog_service.get_pricing(
id=catalog_entry_id,
).get_result()
print('\nget_pricing() result:\n' + json.dumps(pricing_get, indent=2))
# end-get_pricing
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_audit_logs_example(self):
"""
get_audit_logs request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-get_audit_logs
audit_search_result = global_catalog_service.get_audit_logs(
id=catalog_entry_id,
offset=0,
limit=10,
).get_result()
print('\nget_audit_logs() result:\n' + json.dumps(audit_search_result, indent=2))
# end-get_audit_logs
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_upload_artifact_example(self):
"""
upload_artifact request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-upload_artifact
artifact_contents = io.BytesIO(
b'This is an example artifact associated with a catalog entry.')
response = global_catalog_service.upload_artifact(
object_id=catalog_entry_id,
artifact_id='artifact.txt',
artifact=artifact_contents,
content_type='text/plain',
).get_result()
print('\nupload_artifact() result:\n' + json.dumps(response, indent=2))
# end-upload_artifact
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_artifact_example(self):
"""
get_artifact request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-get_artifact
response = global_catalog_service.get_artifact(
object_id=catalog_entry_id,
artifact_id='artifact.txt',
)
content_type = response.get_headers().get('content-type')
result = response.get_result()
print('\nget_artifact() result:\n')
print('Artifact content-type: {0}'.format(content_type))
print('Artifact contents: {0}'.format(str(result.content)))
# end-get_artifact
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_list_artifacts_example(self):
"""
list_artifacts request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-list_artifacts
artifacts = global_catalog_service.list_artifacts(
object_id=catalog_entry_id).get_result()
print('\nlist_artifacts() result:\n' + json.dumps(artifacts, indent=2))
# end-list_artifacts
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_delete_artifact_example(self):
"""
delete_artifact request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-delete_artifact
response = global_catalog_service.delete_artifact(
object_id=catalog_entry_id,
artifact_id='artifact.txt',
).get_result()
print('\ndelete_artifact() result:\n' + json.dumps(response, indent=2))
# end-delete_artifact
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_delete_catalog_entry_example(self):
"""
delete_catalog_entry request example
"""
global catalog_entry_id
assert catalog_entry_id is not None
try:
# begin-delete_catalog_entry
response = global_catalog_service.delete_catalog_entry(
id=catalog_entry_id).get_result()
print('\ndelete_catalog_entry() result:\n' + json.dumps(response, indent=2))
# end-delete_catalog_entry
except ApiException as e:
pytest.fail(str(e))
# endregion
##############################################################################
# End of Examples for Service: GlobalCatalogV1
##############################################################################
|
[
"io.BytesIO",
"uuid.uuid4",
"os.path.exists"
] |
[((1839, 1866), 'os.path.exists', 'os.path.exists', (['config_file'], {}), '(config_file)\n', (1853, 1866), False, 'import os\n'), ((2196, 2223), 'os.path.exists', 'os.path.exists', (['config_file'], {}), '(config_file)\n', (2210, 2223), False, 'import os\n'), ((11914, 11989), 'io.BytesIO', 'io.BytesIO', (["b'This is an example artifact associated with a catalog entry.'"], {}), "(b'This is an example artifact associated with a catalog entry.')\n", (11924, 11989), False, 'import io\n'), ((3635, 3647), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3645, 3647), False, 'import uuid\n')]
|
import sympy
from sympy import Function, dsolve, Symbol
# symbols
t = Symbol('t', positive=True)
zeta = Symbol('\zeta', constant=True, positive=True)
# unknown function
u = Function('u')(t)
# assumed values
u0 = 1
v0 = 0
wn = 4.
wd = wn*sympy.sqrt(1-zeta**2)
ics = {u.subs(t, 0): u0, u.diff(t).subs(t, 0): v0}
sol = dsolve(u.diff(t, t) + 2*zeta*wn*u.diff(t) + wn**2*u, ics=ics)
import matplotlib
matplotlib.use('TkAgg')
from sympy.plotting import plot3d
p1 = plot3d(sol.rhs, (t, 0, 10), (zeta, 0.05, 0.7),
show=False,
nb_of_points_x=500,
nb_of_points_y=10,
xlabel='$t$',
ylabel='$\zeta$',
zlabel='$u(t)$',
)
p1.show()
|
[
"sympy.Symbol",
"sympy.sqrt",
"matplotlib.use",
"sympy.Function",
"sympy.plotting.plot3d"
] |
[((71, 97), 'sympy.Symbol', 'Symbol', (['"""t"""'], {'positive': '(True)'}), "('t', positive=True)\n", (77, 97), False, 'from sympy import Function, dsolve, Symbol\n'), ((105, 151), 'sympy.Symbol', 'Symbol', (['"""\\\\zeta"""'], {'constant': '(True)', 'positive': '(True)'}), "('\\\\zeta', constant=True, positive=True)\n", (111, 151), False, 'from sympy import Function, dsolve, Symbol\n'), ((401, 424), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (415, 424), False, 'import matplotlib\n'), ((465, 617), 'sympy.plotting.plot3d', 'plot3d', (['sol.rhs', '(t, 0, 10)', '(zeta, 0.05, 0.7)'], {'show': '(False)', 'nb_of_points_x': '(500)', 'nb_of_points_y': '(10)', 'xlabel': '"""$t$"""', 'ylabel': '"""$\\\\zeta$"""', 'zlabel': '"""$u(t)$"""'}), "(sol.rhs, (t, 0, 10), (zeta, 0.05, 0.7), show=False, nb_of_points_x=\n 500, nb_of_points_y=10, xlabel='$t$', ylabel='$\\\\zeta$', zlabel='$u(t)$')\n", (471, 617), False, 'from sympy.plotting import plot3d\n'), ((175, 188), 'sympy.Function', 'Function', (['"""u"""'], {}), "('u')\n", (183, 188), False, 'from sympy import Function, dsolve, Symbol\n'), ((240, 265), 'sympy.sqrt', 'sympy.sqrt', (['(1 - zeta ** 2)'], {}), '(1 - zeta ** 2)\n', (250, 265), False, 'import sympy\n')]
|
import datetime
from django.db import models
from django.utils.translation import ugettext as _
from django.core.validators import RegexValidator
from base.models import (Address, Center, Participant)
class EventCategory(models.Model):
category = models.CharField(max_length=50, default="", help_text=_("Event Category"))
def __str__(self):
return "Event Category: {}".format(self.category)
class Event(models.Model):
"""Event represents an particular Event.
venue field is an foreign key to :model: `base.Address`
"""
# Choices
YEAR_CHOICES = []
curr_year = datetime.datetime.now().year
for r in range(2016, curr_year + 5):
YEAR_CHOICES.append((r,r))
GENDER_FEMALE = 'female'
GENDER_MALE = 'male'
GENDER_CHOICES = (
(GENDER_FEMALE, 'Female'),
(GENDER_MALE, 'Male'))
name = models.CharField(max_length=50, help_text=_("Event Name"))
venue = models.ForeignKey(Address, on_delete=models.CASCADE)
center = models.ForeignKey(Center, on_delete=models.CASCADE, help_text=_("Center"))
category = models.ForeignKey(EventCategory, on_delete=models.CASCADE, help_text=_("Event Category"), default="")
year = models.PositiveIntegerField(choices=YEAR_CHOICES, default=curr_year,
help_text=_('year'))
start_date = models.DateField(help_text=_("Event Start Date"))
end_date = models.DateField(help_text=_("Event End Date"))
last_date_of_registration = models.DateField(
help_text=_("Last Date of Registration"))
fees = models.DecimalField(max_digits=10, decimal_places=2,
help_text=_("Event Fees"))
late_fees = models.DecimalField(max_digits=10, decimal_places=2,
help_text=_("Late Registration Fees"))
accommodation_provided = models.BooleanField(help_text=_("Is Accommodation Provided?"))
event_code = models.CharField(max_length=100, unique=True, help_text=_("Event Code"))
gender = models.CharField(max_length=6, choices=GENDER_CHOICES, blank=True)
# This represents age-group
min_age = models.PositiveIntegerField(help_text=_("Age Group lower limit"))
max_age = models.PositiveIntegerField(help_text=_("Age Group Upper limit"))
rules = models.TextField(help_text=_("Any Rules"), blank=True)
remarks = models.TextField(help_text=_("Any Remarks"), blank=True)
active = models.BooleanField(help_text=_("Is event active?"))
poc_name = models.CharField(max_length=50, help_text="Name of point of contact", blank=True)
poc_number = models.CharField(max_length=50, help_text="Contact number of POC", blank=True)
is_global_poc = models.BooleanField(help_text=_("Is global PoC?"), default=False)
class EventParticipant(models.Model):
"""EventParticipant stores information about an participant for the Event.
The EventParticipant table contains information about an event participant
for an event.
event field is an foreign key to :model: `events.Event`
pariticipant field is an foreign key to :model: `base.Pariticpant`
home_center field is an foreign key to :model: `base.Center`
event_center field is optional and an foreign key to :model: `base.Center`
"""
# Choices
ROLE_PARTICIPANT = 'participant'
ROLE_HELPER = 'helper'
ROLE_COORDINATOR = 'coordinator'
ROLE_CHOICES = (
(ROLE_PARTICIPANT, 'Participant'),
(ROLE_HELPER, 'Helper'),
(ROLE_COORDINATOR, 'Coordinator'))
event = models.ForeignKey(Event, on_delete=models.CASCADE)
participant = models.ForeignKey(Participant, on_delete=models.CASCADE)
registration_no = models.CharField(max_length=100, unique=True, help_text=_("Registration Number"))
home_center = models.ForeignKey(Center, on_delete=models.CASCADE, related_name='home_center',
help_text=_("Home Center"))
event_center = models.ForeignKey(Center, on_delete=models.CASCADE, blank=True, null=True,
related_name='event_center', help_text=_("Event Center"))
accommodation = models.BooleanField(help_text=_("Is Accommodation Required?"))
payment_status = models.BooleanField(help_text=_("Has paid?"))
amount_paid = models.DecimalField(max_digits=10, decimal_places=2, help_text=_("Amount Paid"))
cashier = models.CharField(max_length=50, help_text=_("Cashier"), blank=True)
big_buddy = models.CharField(max_length=50, help_text=_("Big Buddy"), blank=True)
goal_achievement = models.CharField(max_length=100, help_text=_("Goal Achievement"), blank=True)
role = models.CharField(max_length=12, choices=ROLE_CHOICES, help_text=_("Role"))
registration_status = models.PositiveSmallIntegerField(default=0, help_text=_("Registration Status"))
created_on = models.DateTimeField(auto_now_add=True, help_text=_("Event Participant Created on"))
updated_on = models.DateTimeField(auto_now=True, help_text=_("Event Participant Updated on"))
skill = models.TextField(blank=True, help_text=_("Skill"))
|
[
"django.db.models.ForeignKey",
"django.utils.translation.ugettext",
"django.db.models.CharField",
"datetime.datetime.now"
] |
[((946, 998), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Address'], {'on_delete': 'models.CASCADE'}), '(Address, on_delete=models.CASCADE)\n', (963, 998), False, 'from django.db import models\n'), ((2049, 2115), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(6)', 'choices': 'GENDER_CHOICES', 'blank': '(True)'}), '(max_length=6, choices=GENDER_CHOICES, blank=True)\n', (2065, 2115), False, 'from django.db import models\n'), ((2528, 2614), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'help_text': '"""Name of point of contact"""', 'blank': '(True)'}), "(max_length=50, help_text='Name of point of contact', blank\n =True)\n", (2544, 2614), False, 'from django.db import models\n'), ((2627, 2705), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'help_text': '"""Contact number of POC"""', 'blank': '(True)'}), "(max_length=50, help_text='Contact number of POC', blank=True)\n", (2643, 2705), False, 'from django.db import models\n'), ((3573, 3623), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Event'], {'on_delete': 'models.CASCADE'}), '(Event, on_delete=models.CASCADE)\n', (3590, 3623), False, 'from django.db import models\n'), ((3642, 3698), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Participant'], {'on_delete': 'models.CASCADE'}), '(Participant, on_delete=models.CASCADE)\n', (3659, 3698), False, 'from django.db import models\n'), ((606, 629), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (627, 629), False, 'import datetime\n'), ((306, 325), 'django.utils.translation.ugettext', '_', (['"""Event Category"""'], {}), "('Event Category')\n", (307, 325), True, 'from django.utils.translation import ugettext as _\n'), ((917, 932), 'django.utils.translation.ugettext', '_', (['"""Event Name"""'], {}), "('Event Name')\n", (918, 932), True, 'from django.utils.translation import ugettext as _\n'), ((1074, 1085), 'django.utils.translation.ugettext', '_', (['"""Center"""'], {}), "('Center')\n", (1075, 1085), True, 'from django.utils.translation import ugettext as _\n'), ((1171, 1190), 'django.utils.translation.ugettext', '_', (['"""Event Category"""'], {}), "('Event Category')\n", (1172, 1190), True, 'from django.utils.translation import ugettext as _\n'), ((1326, 1335), 'django.utils.translation.ugettext', '_', (['"""year"""'], {}), "('year')\n", (1327, 1335), True, 'from django.utils.translation import ugettext as _\n'), ((1381, 1402), 'django.utils.translation.ugettext', '_', (['"""Event Start Date"""'], {}), "('Event Start Date')\n", (1382, 1402), True, 'from django.utils.translation import ugettext as _\n'), ((1446, 1465), 'django.utils.translation.ugettext', '_', (['"""Event End Date"""'], {}), "('Event End Date')\n", (1447, 1465), True, 'from django.utils.translation import ugettext as _\n'), ((1559, 1589), 'django.utils.translation.ugettext', '_', (['"""Last Date of Registration"""'], {}), "('Last Date of Registration')\n", (1560, 1589), True, 'from django.utils.translation import ugettext as _\n'), ((1697, 1712), 'django.utils.translation.ugettext', '_', (['"""Event Fees"""'], {}), "('Event Fees')\n", (1698, 1712), True, 'from django.utils.translation import ugettext as _\n'), ((1825, 1852), 'django.utils.translation.ugettext', '_', (['"""Late Registration Fees"""'], {}), "('Late Registration Fees')\n", (1826, 1852), True, 'from django.utils.translation import ugettext as _\n'), ((1913, 1944), 'django.utils.translation.ugettext', '_', (['"""Is Accommodation Provided?"""'], {}), "('Is Accommodation Provided?')\n", (1914, 1944), True, 'from django.utils.translation import ugettext as _\n'), ((2019, 2034), 'django.utils.translation.ugettext', '_', (['"""Event Code"""'], {}), "('Event Code')\n", (2020, 2034), True, 'from django.utils.translation import ugettext as _\n'), ((2200, 2226), 'django.utils.translation.ugettext', '_', (['"""Age Group lower limit"""'], {}), "('Age Group lower limit')\n", (2201, 2226), True, 'from django.utils.translation import ugettext as _\n'), ((2280, 2306), 'django.utils.translation.ugettext', '_', (['"""Age Group Upper limit"""'], {}), "('Age Group Upper limit')\n", (2281, 2306), True, 'from django.utils.translation import ugettext as _\n'), ((2348, 2362), 'django.utils.translation.ugettext', '_', (['"""Any Rules"""'], {}), "('Any Rules')\n", (2349, 2362), True, 'from django.utils.translation import ugettext as _\n'), ((2417, 2433), 'django.utils.translation.ugettext', '_', (['"""Any Remarks"""'], {}), "('Any Remarks')\n", (2418, 2433), True, 'from django.utils.translation import ugettext as _\n'), ((2490, 2511), 'django.utils.translation.ugettext', '_', (['"""Is event active?"""'], {}), "('Is event active?')\n", (2491, 2511), True, 'from django.utils.translation import ugettext as _\n'), ((2756, 2775), 'django.utils.translation.ugettext', '_', (['"""Is global PoC?"""'], {}), "('Is global PoC?')\n", (2757, 2775), True, 'from django.utils.translation import ugettext as _\n'), ((3777, 3801), 'django.utils.translation.ugettext', '_', (['"""Registration Number"""'], {}), "('Registration Number')\n", (3778, 3801), True, 'from django.utils.translation import ugettext as _\n'), ((3947, 3963), 'django.utils.translation.ugettext', '_', (['"""Home Center"""'], {}), "('Home Center')\n", (3948, 3963), True, 'from django.utils.translation import ugettext as _\n'), ((4134, 4151), 'django.utils.translation.ugettext', '_', (['"""Event Center"""'], {}), "('Event Center')\n", (4135, 4151), True, 'from django.utils.translation import ugettext as _\n'), ((4203, 4234), 'django.utils.translation.ugettext', '_', (['"""Is Accommodation Required?"""'], {}), "('Is Accommodation Required?')\n", (4204, 4234), True, 'from django.utils.translation import ugettext as _\n'), ((4287, 4301), 'django.utils.translation.ugettext', '_', (['"""Has paid?"""'], {}), "('Has paid?')\n", (4288, 4301), True, 'from django.utils.translation import ugettext as _\n'), ((4384, 4400), 'django.utils.translation.ugettext', '_', (['"""Amount Paid"""'], {}), "('Amount Paid')\n", (4385, 4400), True, 'from django.utils.translation import ugettext as _\n'), ((4458, 4470), 'django.utils.translation.ugettext', '_', (['"""Cashier"""'], {}), "('Cashier')\n", (4459, 4470), True, 'from django.utils.translation import ugettext as _\n'), ((4542, 4556), 'django.utils.translation.ugettext', '_', (['"""Big Buddy"""'], {}), "('Big Buddy')\n", (4543, 4556), True, 'from django.utils.translation import ugettext as _\n'), ((4636, 4657), 'django.utils.translation.ugettext', '_', (['"""Goal Achievement"""'], {}), "('Goal Achievement')\n", (4637, 4657), True, 'from django.utils.translation import ugettext as _\n'), ((4746, 4755), 'django.utils.translation.ugettext', '_', (['"""Role"""'], {}), "('Role')\n", (4747, 4755), True, 'from django.utils.translation import ugettext as _\n'), ((4837, 4861), 'django.utils.translation.ugettext', '_', (['"""Registration Status"""'], {}), "('Registration Status')\n", (4838, 4861), True, 'from django.utils.translation import ugettext as _\n'), ((4930, 4963), 'django.utils.translation.ugettext', '_', (['"""Event Participant Created on"""'], {}), "('Event Participant Created on')\n", (4931, 4963), True, 'from django.utils.translation import ugettext as _\n'), ((5028, 5061), 'django.utils.translation.ugettext', '_', (['"""Event Participant Updated on"""'], {}), "('Event Participant Updated on')\n", (5029, 5061), True, 'from django.utils.translation import ugettext as _\n'), ((5114, 5124), 'django.utils.translation.ugettext', '_', (['"""Skill"""'], {}), "('Skill')\n", (5115, 5124), True, 'from django.utils.translation import ugettext as _\n')]
|
#!/usr/bin/env python
# coding=utf-8
# pylint: disable=broad-except,unused-argument,line-too-long, unused-variable
# Copyright (c) 2016-2018, F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains the logic to scan for patched TMOS disk images
and then upload to IBM Cloud Object Storage
"""
import os
import sys
import time
import datetime
import logging
import json
import ibm_boto3
import urlparse
import requests
from ibm_botocore.client import Config, ClientError
IMAGE_TYPES = ['.qcow2', '.vhd', '.vmdk']
IBM_COS_REGIONS = []
TMOS_IMAGE_DIR = None
COS_API_KEY = None
COS_RESOURCE_CRN = None
COS_IMAGE_LOCATION = None
COS_AUTH_ENDPOINT = None
COS_ENDPOINT = None
UPDATE_IMAGES = None
DELETE_ALL = None
LOG = logging.getLogger('ibmcloud_cos_image_uploader')
LOG.setLevel(logging.DEBUG)
FORMATTER = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
LOGSTREAM = logging.StreamHandler(sys.stdout)
LOGSTREAM.setFormatter(FORMATTER)
LOG.addHandler(LOGSTREAM)
def get_patched_images(tmos_image_dir):
"""get TMOS patched disk images"""
return_image_files = []
LOG.debug('searching for images in %s', tmos_image_dir)
for patched_dir in os.listdir(tmos_image_dir):
patched_dir_path = "%s/%s" % (tmos_image_dir, patched_dir)
if os.path.isdir(patched_dir_path):
for patched_image in os.listdir(patched_dir_path):
if os.path.splitext(patched_image)[1] in IMAGE_TYPES:
image_filepath = "%s/%s" % (patched_dir_path,
patched_image)
return_image_files.append(image_filepath)
return return_image_files
def get_bucket_name(image_path, location):
"""Get bucket for this patched image"""
return "%s-%s" % (os.path.splitext(os.path.dirname(image_path.replace(TMOS_IMAGE_DIR, '')).replace(os.path.sep, ''))[0].replace('_', '-').lower(), location)
def get_object_name(image_path, location):
"""Get object name for this patched image"""
if 'DATASTOR' in image_path:
return "%s_DATASTOR" % os.path.dirname(image_path.replace(TMOS_IMAGE_DIR, '')).replace(os.path.sep, '')
else:
return os.path.dirname(image_path.replace(TMOS_IMAGE_DIR, '')).replace(os.path.sep, '')
def get_cos_client(location):
"""return IBM COS client object"""
cos_endpoint = "https://s3.%s.cloud-object-storage.appdomain.cloud" % location
return ibm_boto3.client("s3",
ibm_api_key_id=COS_API_KEY,
ibm_service_instance_id=COS_RESOURCE_CRN,
ibm_auth_endpoint=COS_AUTH_ENDPOINT,
config=Config(signature_version="oauth"),
endpoint_url=cos_endpoint)
def get_cos_resource(location):
"""return IBM COS resource object"""
cos_endpoint = "https://s3.%s.cloud-object-storage.appdomain.cloud" % location
return ibm_boto3.resource("s3",
ibm_api_key_id=COS_API_KEY,
ibm_service_instance_id=COS_RESOURCE_CRN,
ibm_auth_endpoint=COS_AUTH_ENDPOINT,
config=Config(signature_version="oauth"),
endpoint_url=cos_endpoint)
def assure_bucket(bucket_name, location):
"""Make sure bucket exists"""
cos_res = get_cos_resource(location)
try:
for bucket in cos_res.buckets.all():
if bucket.name == bucket_name:
return True
LOG.debug('creating bucket %s', bucket_name)
cos_res.Bucket(bucket_name).create(
ACL='public-read'
)
return True
except ClientError as client_error:
LOG.error('client error assuring bucket %s: %s',
bucket_name, client_error)
return False
except Exception as ex:
LOG.error('exception occurred assuring bucket %s: %s', bucket_name, ex)
return False
def assure_object(file_path, bucket_name, object_name, location):
"""check if patched image already exists"""
cos_res = get_cos_resource(location)
try:
for obj in cos_res.Bucket(bucket_name).objects.all():
if obj.key == object_name:
if UPDATE_IMAGES:
obj.delete()
else:
return True
LOG.debug('starting upload of image %s to %s/%s',
file_path, bucket_name, object_name)
part_size = 1024 * 1024 * 2
file_threshold = 1024 * 1024 * 1024 * 10
transfer_config = ibm_boto3.s3.transfer.TransferConfig(
multipart_threshold=file_threshold,
multipart_chunksize=part_size
)
cos_client = get_cos_client(location)
transfer_mgr = ibm_boto3.s3.transfer.TransferManager(
cos_client, config=transfer_config)
upload = transfer_mgr.upload(file_path, bucket_name, object_name, extra_args={'ACL': 'public-read'})
upload.result()
LOG.debug('upload complete for %s/%s', bucket_name, object_name)
return True
except ClientError as ce:
LOG.error('client error assuring object %s/%s: %s',
bucket_name, object_name, ce)
return False
except Exception as ex:
LOG.error('exception occurred assuring object %s/%s: %s',
bucket_name, object_name, ex)
return False
def assure_cos_image(image_path, location):
"""assure patch image object"""
bucket_name = get_bucket_name(image_path, location)
object_name = get_object_name(image_path, location)
LOG.debug('checking IBM COS Object: %s/%s exists',
bucket_name, object_name)
if assure_bucket(bucket_name, location):
assure_object(image_path, bucket_name, object_name, location)
md5_path = "%s.md5" % image_path
if os.path.exists(md5_path):
md5_object_name = "%s.md5" % object_name
assure_object(md5_path, bucket_name, md5_object_name, location)
sig_path = "%s.384.sig" % image_path
if os.path.exists(sig_path):
sig_object_name = "%s.384.sig" % object_name
assure_object(sig_path, bucket_name, sig_object_name, location)
def delete_all():
"""delete all files and buckets from the COS resource"""
LOG.debug('deleting images in: %s', IBM_COS_REGIONS)
for location in IBM_COS_REGIONS:
LOG.debug("deleting images in %s region" % location)
cos_res = get_cos_resource(location)
try:
for bucket in cos_res.buckets.all():
if location in bucket.name:
LOG.debug('deleting bucket: %s', bucket.name)
for obj in cos_res.Bucket(bucket.name).objects.all():
LOG.debug('deleting object: %s', obj.key)
obj.delete()
bucket.delete()
except ClientError as client_error:
LOG.error('client error deleting all resources: %s', client_error)
except Exception as ex:
LOG.error('exception occurred deleting all resources: %s', ex)
def upload_patched_images():
"""check for iamges and assure upload to IBM COS"""
LOG.debug('uploading images in %s', IBM_COS_REGIONS)
for image_path in get_patched_images(TMOS_IMAGE_DIR):
for location in IBM_COS_REGIONS:
assure_cos_image(image_path, location)
def inventory():
"""create inventory JSON"""
global UPDATE_IMAGES
inventory_file = "%s/ibmcos_images.json" % (TMOS_IMAGE_DIR)
if os.path.exists(inventory_file):
os.unlink(inventory_file)
inventory = {}
for location in IBM_COS_REGIONS:
inventory[location] = []
cos_res = get_cos_resource(location)
try:
for bucket in cos_res.buckets.all():
if location in bucket.name:
for obj in cos_res.Bucket(bucket.name).objects.all():
LOG.debug('inventory add %s/%s', bucket.name, obj.key)
if os.path.splitext(obj.key)[1] in IMAGE_TYPES:
inv_obj = {
'image_name': bucket.name.replace('.', '-'),
'image_sql_url': "cos://%s/%s/%s" % (location, bucket.name, obj.key),
'md5_sql_url': "cos://%s/%s/%s.md5" % (location, bucket.name, obj.key)
}
inventory[location].append(inv_obj)
except ClientError as client_error:
LOG.error('client error creating inventory of resources: %s', client_error)
except Exception as ex:
LOG.error('exception creating inventory of resources: %s', ex)
# write it locally
with open(inventory_file, 'w') as ivf:
ivf.write(json.dumps(inventory))
# store in each location
if not DELETE_ALL:
UPDATE_IMAGES = True
for location in IBM_COS_REGIONS:
bucket_name = "f5-image-catalog-%s" % location
public_url = "https://%s.s3.%s.cloud-object-storage.appdomain.cloud/f5-image-catalog.json" % (bucket_name, location)
LOG.debug('writing image catalog to: %s', public_url)
assure_bucket(bucket_name, location)
assure_object(inventory_file, bucket_name, "f5-image-catalog.json", location)
def initialize():
"""initialize configuration from environment variables"""
global TMOS_IMAGE_DIR, IBM_COS_REGIONS, COS_API_KEY, COS_RESOURCE_CRN, COS_IMAGE_LOCATION, COS_AUTH_ENDPOINT, UPDATE_IMAGES, DELETE_ALL
TMOS_IMAGE_DIR = os.getenv('TMOS_IMAGE_DIR', None)
COS_API_KEY = os.getenv('COS_API_KEY', None)
COS_RESOURCE_CRN = os.getenv('COS_RESOURCE_CRN', None)
COS_IMAGE_LOCATION = os.getenv('COS_IMAGE_LOCATION', 'us-south')
IBM_COS_REGIONS = [ x.strip() for x in COS_IMAGE_LOCATION.split(',') ]
COS_AUTH_ENDPOINT = os.getenv(
'COS_AUTH_ENDPOINT', 'https://iam.cloud.ibm.com/identity/token')
UPDATE_IMAGES = os.getenv('UPDATE_IMAGES', 'false')
if UPDATE_IMAGES.lower() == 'true':
UPDATE_IMAGES = True
else:
UPDATE_IMAGES = False
DELETE_ALL = os.getenv('DELETE_ALL', 'false')
if DELETE_ALL.lower() == 'true':
DELETE_ALL = True
else:
DELETE_ALL = False
if __name__ == "__main__":
START_TIME = time.time()
LOG.debug('process start time: %s', datetime.datetime.fromtimestamp(
START_TIME).strftime("%A, %B %d, %Y %I:%M:%S"))
initialize()
ERROR_MESSAGE = ''
ERROR = False
if not COS_API_KEY:
ERROR = True
ERROR_MESSAGE += "please set env COS_API_KEY for your IBM COS resource\n"
if not COS_RESOURCE_CRN:
ERROR = True
ERROR_MESSAGE += "please set env COS_RESOURCE_CRN for your IBM COS resource\n"
if not TMOS_IMAGE_DIR and not DELETE_ALL:
ERROR = True
ERROR_MESSAGE += "please set env TMOS_IMAGE_DIR to scan for patched TMOS images\n"
if ERROR:
LOG.error('\n\n%s\n', ERROR_MESSAGE)
sys.exit(1)
if DELETE_ALL:
delete_all()
else:
upload_patched_images()
inventory()
STOP_TIME = time.time()
DURATION = STOP_TIME - START_TIME
LOG.debug(
'process end time: %s - ran %s (seconds)',
datetime.datetime.fromtimestamp(
STOP_TIME).strftime("%A, %B %d, %Y %I:%M:%S"),
DURATION
)
|
[
"ibm_boto3.s3.transfer.TransferManager",
"os.unlink",
"os.path.isdir",
"logging.StreamHandler",
"os.path.exists",
"json.dumps",
"time.time",
"logging.Formatter",
"ibm_botocore.client.Config",
"os.path.splitext",
"datetime.datetime.fromtimestamp",
"sys.exit",
"ibm_boto3.s3.transfer.TransferConfig",
"os.getenv",
"os.listdir",
"logging.getLogger"
] |
[((1255, 1303), 'logging.getLogger', 'logging.getLogger', (['"""ibmcloud_cos_image_uploader"""'], {}), "('ibmcloud_cos_image_uploader')\n", (1272, 1303), False, 'import logging\n'), ((1344, 1417), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (1361, 1417), False, 'import logging\n'), ((1435, 1468), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (1456, 1468), False, 'import logging\n'), ((1721, 1747), 'os.listdir', 'os.listdir', (['tmos_image_dir'], {}), '(tmos_image_dir)\n', (1731, 1747), False, 'import os\n'), ((6434, 6458), 'os.path.exists', 'os.path.exists', (['md5_path'], {}), '(md5_path)\n', (6448, 6458), False, 'import os\n'), ((6629, 6653), 'os.path.exists', 'os.path.exists', (['sig_path'], {}), '(sig_path)\n', (6643, 6653), False, 'import os\n'), ((8117, 8147), 'os.path.exists', 'os.path.exists', (['inventory_file'], {}), '(inventory_file)\n', (8131, 8147), False, 'import os\n'), ((10168, 10201), 'os.getenv', 'os.getenv', (['"""TMOS_IMAGE_DIR"""', 'None'], {}), "('TMOS_IMAGE_DIR', None)\n", (10177, 10201), False, 'import os\n'), ((10220, 10250), 'os.getenv', 'os.getenv', (['"""COS_API_KEY"""', 'None'], {}), "('COS_API_KEY', None)\n", (10229, 10250), False, 'import os\n'), ((10274, 10309), 'os.getenv', 'os.getenv', (['"""COS_RESOURCE_CRN"""', 'None'], {}), "('COS_RESOURCE_CRN', None)\n", (10283, 10309), False, 'import os\n'), ((10335, 10378), 'os.getenv', 'os.getenv', (['"""COS_IMAGE_LOCATION"""', '"""us-south"""'], {}), "('COS_IMAGE_LOCATION', 'us-south')\n", (10344, 10378), False, 'import os\n'), ((10478, 10552), 'os.getenv', 'os.getenv', (['"""COS_AUTH_ENDPOINT"""', '"""https://iam.cloud.ibm.com/identity/token"""'], {}), "('COS_AUTH_ENDPOINT', 'https://iam.cloud.ibm.com/identity/token')\n", (10487, 10552), False, 'import os\n'), ((10582, 10617), 'os.getenv', 'os.getenv', (['"""UPDATE_IMAGES"""', '"""false"""'], {}), "('UPDATE_IMAGES', 'false')\n", (10591, 10617), False, 'import os\n'), ((10744, 10776), 'os.getenv', 'os.getenv', (['"""DELETE_ALL"""', '"""false"""'], {}), "('DELETE_ALL', 'false')\n", (10753, 10776), False, 'import os\n'), ((10923, 10934), 'time.time', 'time.time', ([], {}), '()\n', (10932, 10934), False, 'import time\n'), ((11747, 11758), 'time.time', 'time.time', ([], {}), '()\n', (11756, 11758), False, 'import time\n'), ((1827, 1858), 'os.path.isdir', 'os.path.isdir', (['patched_dir_path'], {}), '(patched_dir_path)\n', (1840, 1858), False, 'import os\n'), ((5140, 5247), 'ibm_boto3.s3.transfer.TransferConfig', 'ibm_boto3.s3.transfer.TransferConfig', ([], {'multipart_threshold': 'file_threshold', 'multipart_chunksize': 'part_size'}), '(multipart_threshold=file_threshold,\n multipart_chunksize=part_size)\n', (5176, 5247), False, 'import ibm_boto3\n'), ((5348, 5421), 'ibm_boto3.s3.transfer.TransferManager', 'ibm_boto3.s3.transfer.TransferManager', (['cos_client'], {'config': 'transfer_config'}), '(cos_client, config=transfer_config)\n', (5385, 5421), False, 'import ibm_boto3\n'), ((8157, 8182), 'os.unlink', 'os.unlink', (['inventory_file'], {}), '(inventory_file)\n', (8166, 8182), False, 'import os\n'), ((11616, 11627), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (11624, 11627), False, 'import sys\n'), ((1893, 1921), 'os.listdir', 'os.listdir', (['patched_dir_path'], {}), '(patched_dir_path)\n', (1903, 1921), False, 'import os\n'), ((3223, 3256), 'ibm_botocore.client.Config', 'Config', ([], {'signature_version': '"""oauth"""'}), "(signature_version='oauth')\n", (3229, 3256), False, 'from ibm_botocore.client import Config, ClientError\n'), ((3741, 3774), 'ibm_botocore.client.Config', 'Config', ([], {'signature_version': '"""oauth"""'}), "(signature_version='oauth')\n", (3747, 3774), False, 'from ibm_botocore.client import Config, ClientError\n'), ((9387, 9408), 'json.dumps', 'json.dumps', (['inventory'], {}), '(inventory)\n', (9397, 9408), False, 'import json\n'), ((10975, 11018), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['START_TIME'], {}), '(START_TIME)\n', (11006, 11018), False, 'import datetime\n'), ((11871, 11913), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['STOP_TIME'], {}), '(STOP_TIME)\n', (11902, 11913), False, 'import datetime\n'), ((1942, 1973), 'os.path.splitext', 'os.path.splitext', (['patched_image'], {}), '(patched_image)\n', (1958, 1973), False, 'import os\n'), ((8603, 8628), 'os.path.splitext', 'os.path.splitext', (['obj.key'], {}), '(obj.key)\n', (8619, 8628), False, 'import os\n')]
|
from qtpy.QtCore import Qt, Signal
from qtpy.QtWidgets import (
QHBoxLayout,
QLabel,
QPushButton,
QTableWidget,
QTableWidgetItem,
QVBoxLayout,
QWidget,
)
from ...settings import get_settings
from ...utils.translations import trans
class Extension2ReaderTable(QWidget):
"""Table showing extension to reader mappings with removal button.
Widget presented in preferences-plugin dialog."""
valueChanged = Signal(int)
def __init__(self, parent=None):
super().__init__(parent=parent)
self._table = QTableWidget()
self._table.setShowGrid(False)
self._populate_table()
layout = QVBoxLayout()
layout.addWidget(self._table)
self.setLayout(layout)
def _populate_table(self):
"""Add row for each extension to reader mapping in settings"""
self._extension_col = 0
self._reader_col = 1
header_strs = [trans._('Extension'), trans._('Reader Plugin')]
self._table.setColumnCount(2)
self._table.setColumnWidth(self._extension_col, 100)
self._table.setColumnWidth(self._reader_col, 150)
self._table.verticalHeader().setVisible(False)
self._table.setMinimumHeight(120)
extension2reader = get_settings().plugins.extension2reader
if len(extension2reader) > 0:
self._table.setRowCount(len(extension2reader))
self._table.horizontalHeader().setStretchLastSection(True)
self._table.horizontalHeader().setStyleSheet(
'border-bottom: 2px solid white;'
)
self._table.setHorizontalHeaderLabels(header_strs)
for row, (extension, plugin_name) in enumerate(
extension2reader.items()
):
item = QTableWidgetItem(extension)
item.setFlags(Qt.NoItemFlags)
self._table.setItem(row, self._extension_col, item)
plugin_widg = QWidget()
# need object name to easily find row
plugin_widg.setObjectName(f'{extension}')
plugin_widg.setLayout(QHBoxLayout())
plugin_widg.layout().setContentsMargins(0, 0, 0, 0)
plugin_label = QLabel(plugin_name)
# need object name to easily work out which button was clicked
remove_btn = QPushButton('x', objectName=f'{extension}')
remove_btn.setFixedWidth(30)
remove_btn.setStyleSheet('margin: 4px;')
remove_btn.setToolTip(
'Remove this extension to reader association'
)
remove_btn.clicked.connect(self._remove_extension_assignment)
plugin_widg.layout().addWidget(plugin_label)
plugin_widg.layout().addWidget(remove_btn)
self._table.setCellWidget(row, self._reader_col, plugin_widg)
else:
# Display that there are no extensions with reader associations
self._table.setRowCount(1)
self._table.setHorizontalHeaderLabels(header_strs)
self._table.setColumnHidden(self._reader_col, True)
self._table.setColumnWidth(self._extension_col, 200)
item = QTableWidgetItem(trans._('No extensions found.'))
item.setFlags(Qt.NoItemFlags)
self._table.setItem(0, 0, item)
def _remove_extension_assignment(self, event):
"""Delete extension to reader mapping setting and remove table row"""
extension_to_remove = self.sender().objectName()
current_settings = get_settings().plugins.extension2reader
# need explicit assignment to new object here for persistence
get_settings().plugins.extension2reader = {
k: v
for k, v in current_settings.items()
if k != extension_to_remove
}
for i in range(self._table.rowCount()):
row_widg_name = self._table.cellWidget(
i, self._reader_col
).objectName()
if row_widg_name == extension_to_remove:
self._table.removeRow(i)
return
|
[
"qtpy.QtWidgets.QHBoxLayout",
"qtpy.QtWidgets.QLabel",
"qtpy.QtWidgets.QTableWidget",
"qtpy.QtWidgets.QVBoxLayout",
"qtpy.QtWidgets.QWidget",
"qtpy.QtWidgets.QPushButton",
"qtpy.QtCore.Signal",
"qtpy.QtWidgets.QTableWidgetItem"
] |
[((446, 457), 'qtpy.QtCore.Signal', 'Signal', (['int'], {}), '(int)\n', (452, 457), False, 'from qtpy.QtCore import Qt, Signal\n'), ((559, 573), 'qtpy.QtWidgets.QTableWidget', 'QTableWidget', ([], {}), '()\n', (571, 573), False, 'from qtpy.QtWidgets import QHBoxLayout, QLabel, QPushButton, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget\n'), ((662, 675), 'qtpy.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (673, 675), False, 'from qtpy.QtWidgets import QHBoxLayout, QLabel, QPushButton, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget\n'), ((1797, 1824), 'qtpy.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['extension'], {}), '(extension)\n', (1813, 1824), False, 'from qtpy.QtWidgets import QHBoxLayout, QLabel, QPushButton, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget\n'), ((1970, 1979), 'qtpy.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (1977, 1979), False, 'from qtpy.QtWidgets import QHBoxLayout, QLabel, QPushButton, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget\n'), ((2245, 2264), 'qtpy.QtWidgets.QLabel', 'QLabel', (['plugin_name'], {}), '(plugin_name)\n', (2251, 2264), False, 'from qtpy.QtWidgets import QHBoxLayout, QLabel, QPushButton, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget\n'), ((2373, 2416), 'qtpy.QtWidgets.QPushButton', 'QPushButton', (['"""x"""'], {'objectName': 'f"""{extension}"""'}), "('x', objectName=f'{extension}')\n", (2384, 2416), False, 'from qtpy.QtWidgets import QHBoxLayout, QLabel, QPushButton, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget\n'), ((2130, 2143), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (2141, 2143), False, 'from qtpy.QtWidgets import QHBoxLayout, QLabel, QPushButton, QTableWidget, QTableWidgetItem, QVBoxLayout, QWidget\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 11 14:03:30 2020
@author: acpotter
"""
#%% -- IMPORTS --
import sys
sys.path.append("..") # import one subdirectory up in files
# external packages
import numpy as np
import qiskit as qk
import networkx as nx
#import tenpy
# custom things
#import mps
#%%
class ParamCircuit(object):
"""
Parameterized circuit
Circuit + parameters
"""
def __init__(self,circ,param_names):
self.circ=circ
self.param_names = param_names
def bind_parameters(self,params):
self.circ.bind_parameters(params)
return self.circ
# def bind_from_array(self,param_vals):
# """
# input: param_vals, np.array of values, must be same length as self.param_names
# """
# params = dict(zip(self.param_names,param_vals))
# return self.bind_from_array(params)
class QKParamCircuit(ParamCircuit):
"""
ParamCircuit implemented with qiskit
"""
def __init__(self,circ,param_names):
self.circ=circ
self.param_names = param_names
self.circuit_format='qiskit'
def bind_parameters(self,params):
cres = self.circ.bind_parameters(params)
return cres
def unitary(self,params):
"""
input: params = dictionary of qiskit circuit parameters
output: returns unitary for circuit
"""
bound_circ = self.bind_parameters(params)
simulator = qk.Aer.get_backend('unitary_simulator')
result = qk.execute(bound_circ,simulator).result()
u = result.get_unitary(bound_circ)
return u
# def bind_from_array(self,params):
# """
# sets named parameters to particular values
# input:
# params: dictionary {parameter name: numerical value}
# output:
# circuit with parameters resolved
# """
# return self.circ.bind_parameters(params)
#%% -- ISOTENSOR CLASS --
class IsoTensor(object):
"""
node of an isometric tensor-network, generated by parameterized cirq unitary
works equally for tensor network state (TNS) or operator (TNO);
for TNS: physical register implicitly assumed to start from reference state: |00..0>
Intention: circuit object intended to be easily adaptable to work equally with cirq, qiskit, etc...
"""
def __init__(self,
name, # label for the tensor
qregs, # listof quantum registers
pcirc, # parameterized circuit object
#param_names, # list of circuit parameter names (str's)
meas_list=[], # list of tuples: (qreg, creg, measurement circuit)
circuit_format:str='qiskit', # string specifying circuit type
thermal = False,
thermal_prob = 0 #the chance of flipping a physical site
):
self.name=name
self.qregs=qregs
self.regdims = [2**len(reg) for reg in qregs]
self.circ= pcirc.circ
self.param_names = pcirc.param_names
# self.param_names=param_names
self.circuit_format=circuit_format
self.meas_list=meas_list
self.p =thermal_prob
self.thermal = thermal
def __str__(self):
return self.name
def __rep__(self):
return self.name
## Resolve Circuit Parameters ##
def resolve_circuit(self,params,include_measurements=True):
"""
resolves parameters in circuit
inputs:
params: dictionary of parameter names and values
include_measurements, bool, whether or not to include measurement and reset
outputs:
resolved circuit
"""
if self.circuit_format == 'qiskit':
cres = self.circ.bind_parameters(params)
if include_measurements:
for qreg,creg,mcirc,cbits in self.meas_list:
cres = cres.combine(mcirc)
cres.add_register(creg)
# add the measurement circuit
cres.measure(qreg,cbits)
cres.reset(qreg)
if self.thermal: #do a pre-measurement circuit to flip a site to |1> with prob. p
pre_cir = qk.QuantumCircuit()
for reg in self.qregs: pre_cir.add_register(reg)
if include_measurements:
for qreg,creg,mcirc,cbits in self.meas_list:
pre_cir.add_register(creg)
cdict = {}
for i in range(len(self.qregs[0])):#need to match register to combine
cdict['c_pre'+str(i)] = qk.ClassicalRegister(1,'c_pre'+str(i))
cres.add_register(cdict['c_pre'+str(i)])
pre_cir.add_register(cdict['c_pre'+str(i)])
pre_cir.rx(2*np.arcsin(np.sqrt(abs(self.p[i]))),self.qregs[0][i])
pre_cir.measure(self.qregs[0][i],cdict['c_pre'+str(i)])
pre_cir.reset(self.qregs[0][i])
pre_cir.x(self.qregs[0][i]).c_if(cdict['c_pre'+str(i)], 1)
cres = pre_cir.combine(cres)
return cres
else:
raise NotImplementedError()
def bind_params(self,params):
"""
inputs:
- params: dictionary {'name':value} for parameters in circuit
outputs:
- circuit with symbolic parameters set to numerical values
"""
if self.circuit_format == 'qiskit':
return self.circ.bind_parameters(params)
else:
raise NotImplementedError()
## Compute unitaries ##
def unitary(self,params):
"""
inputs:
- params: dictionary {'name':value} for parameters in circuit
outputs:
- unitary for circuit, as numpy array with shape regdims (output legs),regdims (input legs)
"""
if self.circuit_format == 'qiskit':
return self.unitary_qiskit(params)
elif self.circuit_format == 'cirq':
return self.unitary_cirq(params)
else:
raise NotImplementedError('only qiskit implemented')
def unitary_qiskit(self,params):
"""
inputs:
- params, dictionary {parameter:value} for parameters in circuit
note: parameter key type depends on type of circuit
for qiskit: parameter keys are qiskit circuit parameters
for cirq: they are sympy symbols
"""
# setup unitary simulator and compute unitary
bound_circ = self.circ.bind_parameters(params)
simulator = qk.Aer.get_backend('unitary_simulator')
result = qk.execute(bound_circ,simulator).result()
u = result.get_unitary(bound_circ)
# need to re-size and re-order to be ampatible with expected indexing
# note: qiskit writes bases in opposite order of usual convention
# e.g. for 3-qubit register: [q0,q1,q2],
# the state 011 refers to: q0=1, q1=1, q2=0
u = u.reshape(self.regdims[::-1]+self.regdims[::-1]) # reshape as tensor
nreg = len(self.qregs)
old_order = list(range(2*nreg))
new_order = old_order.copy()
new_order[0:nreg] = old_order[0:nreg][::-1]
new_order[nreg::] = old_order[nreg::][::-1]
u = np.moveaxis(u,old_order,new_order)
return u
def unitary_cirq(self,params):
""" unitary constructor for cirq-based circuits """
qubit_order = [q for qreg in self.qregs for q in qreg] # order to return the qubit unitary
# resolve the symbolic circuit parameters to numerical values
resolver = cirq.ParamResolver(params)
resolved_circuit = cirq.resolve_parameters(self.circuit, resolver)
u = resolved_circuit.unitary(qubit_order = qubit_order)
return u.reshape(self.regdims) # reshape as a multi-l
#%%
class IsoNetwork(object):
"""
NetworkX directed graph with:
nodes = IsoTensors
edges have list of qubits
To Do:
- add global measurement register names list
- create to_qasm function that traverses the grapha and assembles
together the qasm files for each node, adding the appropriate header
and defining qubits and measurement registers one time in the beginning
"""
def __init__(self,nodes=[],
edges=[],
qregs=[],
circuit_format='qiskit'
):
"""
nodes, list of IsoTensors
edges, list of tuples (output node, input node, list of qubits passed along edge)
qregs, list of qubit registers
(for cirq: each qubit register is list of qubits,
for qiskit, each qreg is a QuantumRegister object)
cregs, list of classical registers
# meas_dict, dictionary of classical registers to
# hold measurement values for each node that gets measured
# keys=MeasurementNode, values = list of tuples:
# (qreg to be measured, creg that stores outcome, circuit to transform qubits to measurement basis)
# note: keys of this define which nodes get measured
param_assignments,
dict with key = node, value = list of parameter objects for that node
for qiskit: parameters are inbuilt circuit parameter
for cirq: parameters are sympy symbols
measurement_nodes, list of IsoTensors that get measured
i.e. have at least one output leg that terminates in a measurement
actual basis for measurement only specified at qasm output/simulator step
"""
self.circuit_format=circuit_format
# construct graph and check that is a DAG
# check for repeated node names
self.graph = nx.DiGraph()
self.graph.add_nodes_from(nodes)
self.graph.add_edges_from(edges)
# check that graph is directed & acyclic (DAG)
if nx.algorithms.dag.is_directed_acyclic_graph(self.graph) != True:
raise RuntimeError('Graph must be directed and acyclic')
# store node information
self.nodes = nodes
self.qregs = qregs
# self.creg_dict = creg_dict
self.node_names = [node.name for node in nodes]
if len(self.node_names) != len(set(self.node_names)):
raise ValueError('Tensor nodes must have unique names')
# store variational parameter info
self.param_assignments = {}
for node in nodes:
self.param_assignments[node]=node.param_names
# self.param_assignments = param_assignments
# topologically sort nodes in order of execution
self.sorted_nodes = [node for node in nx.topological_sort(self.graph)]
## Circuit Construction Methods ##
def construct_circuit(self,param_dict,include_measurements=True):
"""
input:
param_dict, dict of {parameter:value}
output:
circuit
"""
if self.circuit_format=='qiskit':
return self.construct_cirquit_qiskit(param_dict,include_measurements)
else:
raise NotImplementedError
def construct_cirquit_qiskit(self,param_dict,include_measurements=True):
"""
construct circuit for network using qiskit
"""
self.circ = qk.QuantumCircuit()
# add quantum and classical registers
for reg in self.qregs: self.circ.add_register(reg)
#for reg in list(self.creg_dict.values()): self.circ.add_register(reg)
for node in self.sorted_nodes:
node_dict = {k:param_dict[k] for k in self.param_assignments[node]}
node_circ = node.resolve_circuit(node_dict,include_measurements)
self.circ = self.circ.combine(node_circ)
return self.circ
def to_qasm(self,param_dict):
if self.circuit_format=='qiskit':
return self.construct_circuit(param_dict).qasm()
else:
raise NotImplementedError()
#%%
|
[
"sys.path.append",
"numpy.moveaxis",
"qiskit.QuantumCircuit",
"qiskit.execute",
"networkx.topological_sort",
"networkx.algorithms.dag.is_directed_acyclic_graph",
"networkx.DiGraph",
"qiskit.Aer.get_backend"
] |
[((140, 161), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (155, 161), False, 'import sys\n'), ((1513, 1552), 'qiskit.Aer.get_backend', 'qk.Aer.get_backend', (['"""unitary_simulator"""'], {}), "('unitary_simulator')\n", (1531, 1552), True, 'import qiskit as qk\n'), ((6841, 6880), 'qiskit.Aer.get_backend', 'qk.Aer.get_backend', (['"""unitary_simulator"""'], {}), "('unitary_simulator')\n", (6859, 6880), True, 'import qiskit as qk\n'), ((7544, 7580), 'numpy.moveaxis', 'np.moveaxis', (['u', 'old_order', 'new_order'], {}), '(u, old_order, new_order)\n', (7555, 7580), True, 'import numpy as np\n'), ((10105, 10117), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (10115, 10117), True, 'import networkx as nx\n'), ((11725, 11744), 'qiskit.QuantumCircuit', 'qk.QuantumCircuit', ([], {}), '()\n', (11742, 11744), True, 'import qiskit as qk\n'), ((10275, 10330), 'networkx.algorithms.dag.is_directed_acyclic_graph', 'nx.algorithms.dag.is_directed_acyclic_graph', (['self.graph'], {}), '(self.graph)\n', (10318, 10330), True, 'import networkx as nx\n'), ((1570, 1603), 'qiskit.execute', 'qk.execute', (['bound_circ', 'simulator'], {}), '(bound_circ, simulator)\n', (1580, 1603), True, 'import qiskit as qk\n'), ((4412, 4431), 'qiskit.QuantumCircuit', 'qk.QuantumCircuit', ([], {}), '()\n', (4429, 4431), True, 'import qiskit as qk\n'), ((6898, 6931), 'qiskit.execute', 'qk.execute', (['bound_circ', 'simulator'], {}), '(bound_circ, simulator)\n', (6908, 6931), True, 'import qiskit as qk\n'), ((11065, 11096), 'networkx.topological_sort', 'nx.topological_sort', (['self.graph'], {}), '(self.graph)\n', (11084, 11096), True, 'import networkx as nx\n')]
|
from tkinter import Tk, Entry, Button
import threading
flag = True
master = Tk()
e = Entry(master)
e.pack()
e.focus_set()
def enterName():
print(e.get())
def stop():
global flag
flag = False
def exitApp():
master.destroy()
def cycle():
def callback():
global flag
a = 0
flag = True
while flag:
a = a + 2
print(a)
t1 = threading.Thread(target=callback)
t1.start()
b1 = Button(master, text="start", width=10, command=cycle)
b1.pack()
b2 = Button(master, text="stop", width=10, command=stop)
b2.pack()
b3 = Button(master, text="exit", width=10, command=exitApp)
b3.pack()
master.mainloop()
|
[
"tkinter.Button",
"threading.Thread",
"tkinter.Entry",
"tkinter.Tk"
] |
[((77, 81), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (79, 81), False, 'from tkinter import Tk, Entry, Button\n'), ((86, 99), 'tkinter.Entry', 'Entry', (['master'], {}), '(master)\n', (91, 99), False, 'from tkinter import Tk, Entry, Button\n'), ((455, 508), 'tkinter.Button', 'Button', (['master'], {'text': '"""start"""', 'width': '(10)', 'command': 'cycle'}), "(master, text='start', width=10, command=cycle)\n", (461, 508), False, 'from tkinter import Tk, Entry, Button\n'), ((524, 575), 'tkinter.Button', 'Button', (['master'], {'text': '"""stop"""', 'width': '(10)', 'command': 'stop'}), "(master, text='stop', width=10, command=stop)\n", (530, 575), False, 'from tkinter import Tk, Entry, Button\n'), ((591, 645), 'tkinter.Button', 'Button', (['master'], {'text': '"""exit"""', 'width': '(10)', 'command': 'exitApp'}), "(master, text='exit', width=10, command=exitApp)\n", (597, 645), False, 'from tkinter import Tk, Entry, Button\n'), ((400, 433), 'threading.Thread', 'threading.Thread', ([], {'target': 'callback'}), '(target=callback)\n', (416, 433), False, 'import threading\n')]
|
import ipaddress
import os
import errno
import logging
import sys
from typing import List, Union
import csv, io, json
dir_path = os.path.dirname(os.path.realpath(__file__))
def is_ip(string: str) -> bool:
try:
ipaddress.ip_address(string)
return True
except ValueError:
return False
# Create dir if not exists
def check_directory(path: str):
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
def is_int(v: any) -> bool:
v = str(v).strip()
return (
v == "0"
or (
v if v.find("..") > -1 else v.lstrip("-+").rstrip("0").rstrip(".")
).isdigit()
)
def get_platforms(path="templates") -> list:
if not os.path.exists(os.path.dirname(path)):
try:
return os.listdir(path)
except Exception as e:
raise e
def configure_logging(logger, debug=""):
if debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
fh = logging.FileHandler(f"{dir_path}/auto-nornir.log")
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
# ch.setFormatter(formatter)
fh.setFormatter(formatter)
logger.addHandler(ch)
logger.addHandler(fh)
return logger
class HumanBytes:
"""
USAGE
print(HumanBytes.format(2251799813685247)) # 2 pebibytes
print(HumanBytes.format(2000000000000000, True)) # 2 petabytes
print(HumanBytes.format(1099511627776)) # 1 tebibyte
print(HumanBytes.format(1000000000000, True)) # 1 terabyte
print(HumanBytes.format(1000000000, True)) # 1 gigabyte
print(HumanBytes.format(4318498233, precision=3)) # 4.022 gibibytes
print(HumanBytes.format(4318498233, True, 3)) # 4.318 gigabytes
print(HumanBytes.format(-4318498233, precision=2)) # -4.02 gibibytes
"""
METRIC_LABELS: List[str] = ["B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
BINARY_LABELS: List[str] = [
"B",
"KiB",
"MiB",
"GiB",
"TiB",
"PiB",
"EiB",
"ZiB",
"YiB",
]
PRECISION_OFFSETS: List[float] = [0.5, 0.05, 0.005, 0.0005] # PREDEFINED FOR SPEED.
PRECISION_FORMATS: List[str] = [
"{}{:.0f} {}",
"{}{:.1f} {}",
"{}{:.2f} {}",
"{}{:.3f} {}",
] # PREDEFINED FOR SPEED.
@staticmethod
def format(num: Union[int, float], metric: bool = False, precision: int = 1) -> str:
"""
Human-readable formatting of bytes, using binary (powers of 1024)
or metric (powers of 1000) representation.
"""
assert isinstance(num, (int, float)), "num must be an int or float"
assert isinstance(metric, bool), "metric must be a bool"
assert (
isinstance(precision, int) and precision >= 0 and precision <= 3
), "precision must be an int (range 0-3)"
unit_labels = HumanBytes.METRIC_LABELS if metric else HumanBytes.BINARY_LABELS
last_label = unit_labels[-1]
unit_step = 1000 if metric else 1024
unit_step_thresh = unit_step - HumanBytes.PRECISION_OFFSETS[precision]
is_negative = num < 0
if is_negative: # Faster than ternary assignment or always running abs().
num = abs(num)
for unit in unit_labels:
if num < unit_step_thresh:
# VERY IMPORTANT:
# Only accepts the CURRENT unit if we're BELOW the threshold where
# float rounding behavior would place us into the NEXT unit: F.ex.
# when rounding a float to 1 decimal, any number ">= 1023.95" will
# be rounded to "1024.0". Obviously we don't want ugly output such
# as "1024.0 KiB", since the proper term for that is "1.0 MiB".
break
if unit != last_label:
# We only shrink the number if we HAVEN'T reached the last unit.
# NOTE: These looped divisions accumulate floating point rounding
# errors, but each new division pushes the rounding errors further
# and further down in the decimals, so it doesn't matter at all.
num /= unit_step
return HumanBytes.PRECISION_FORMATS[precision].format(
"-" if is_negative else "", num, unit
)
# TODO: This is not good at all. Only works on not nested jsons
def json_to_csv(js):
csv = []
keys = []
for key in js[0].keys():
keys.append(key)
for host in js:
for key in keys:
if key != "groups":
csv.append(str(host[key]))
csv.append(",")
csv.pop()
csv.append("\n")
csv = "".join(csv)
keys = ",".join(keys) + "\n"
csv_text = keys + csv
return csv_text
def csv_to_json(csv_text):
reader = csv.DictReader(io.StringIO(csv_text))
json_data = json.dumps(list(reader))
return json_data
|
[
"io.StringIO",
"logging.FileHandler",
"csv.pop",
"os.path.realpath",
"logging.StreamHandler",
"os.path.dirname",
"ipaddress.ip_address",
"logging.Formatter",
"csv.append",
"os.listdir"
] |
[((147, 173), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (163, 173), False, 'import os\n'), ((1168, 1201), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (1189, 1201), False, 'import logging\n'), ((1211, 1261), 'logging.FileHandler', 'logging.FileHandler', (['f"""{dir_path}/auto-nornir.log"""'], {}), "(f'{dir_path}/auto-nornir.log')\n", (1230, 1261), False, 'import logging\n'), ((1278, 1351), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (1295, 1351), False, 'import logging\n'), ((226, 254), 'ipaddress.ip_address', 'ipaddress.ip_address', (['string'], {}), '(string)\n', (246, 254), False, 'import ipaddress\n'), ((4928, 4937), 'csv.pop', 'csv.pop', ([], {}), '()\n', (4935, 4937), False, 'import csv, io, json\n'), ((4946, 4962), 'csv.append', 'csv.append', (['"""\n"""'], {}), "('\\n')\n", (4956, 4962), False, 'import csv, io, json\n'), ((5123, 5144), 'io.StringIO', 'io.StringIO', (['csv_text'], {}), '(csv_text)\n', (5134, 5144), False, 'import csv, io, json\n'), ((406, 427), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (421, 427), False, 'import os\n'), ((891, 912), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (906, 912), False, 'import os\n'), ((947, 963), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (957, 963), False, 'import os\n'), ((467, 488), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (482, 488), False, 'import os\n'), ((4904, 4919), 'csv.append', 'csv.append', (['""","""'], {}), "(',')\n", (4914, 4919), False, 'import csv, io, json\n')]
|
#-----------------------------------------------------
# Mimas: conference submission and review system
# (c) <NAME> 2016-2020 http://www.allankelly.net
# Licensed under MIT License, see LICENSE file
# -----------------------------------------------------
# schedule.py
#
# System imports
import datetime
# Google imports
import logging
from google.appengine.ext import ndb
# Local imports
class Slot():
def __init__(self, start, end, type):
self.start_time = start
self.end_time = end
self.slot_type = type # Tracks or Plenary
class ScheduleDay():
def __init__(self):
self.day_tracks = []
self.day_slots = {}
class Schedule(ndb.Model):
setup_days_db = ndb.PickleProperty()
assignment_db = ndb.PickleProperty() # map: Dayname -> Track -> Slot -> SubKey
def __init__(self, *args, **kwargs):
super(Schedule, self).__init__(*args, **kwargs)
self.setup_days_db = {}
self.assignment_db = {}
def day_names(self):
return self.setup_days_db.keys()
def add_day(self, day_name):
self.setup_days_db[day_name] = ScheduleDay()
self.put()
def get_day(self, day_name):
return self.setup_days_db[day_name]
def delete_day(self, day_name):
if self.setup_days_db.has_key(day_name):
del self.setup_days_db[day_name]
self.put()
def tracks(self, day_name):
if self.setup_days_db.has_key(day_name):
return self.setup_days_db[day_name].day_tracks
return []
def add_track(self, day_name, track):
self.setup_days_db[day_name].day_tracks.append(track)
self.put()
def del_track(self, day_name, track):
self.setup_days_db[day_name].day_tracks.remove(track)
self.put()
def slots(self, day_name):
if self.setup_days_db.has_key(day_name):
return self.setup_days_db[day_name].day_slots
return []
def orderd_slot_keys(self, day_name):
if self.setup_days_db.has_key(day_name):
keys = self.setup_days_db[day_name].day_slots.keys()
keys.sort()
return keys
return []
def add_slot(self, day_name, slot):
self.setup_days_db[day_name].day_slots[slot.start_time]=slot
self.put()
def delete_slot_by_start_time(self, day_name, start_time):
self.setup_days_db[day_name].day_slots.pop(start_time, None)
self.put()
def get_assignment(self, day, track, slot):
if self.assignment_db.has_key(day):
if self.assignment_db[day].has_key(track):
if self.assignment_db[day][track].has_key(slot):
return self.assignment_db[day][track][slot]
return "Empty"
def assign_talk(self, sub_key, day, track, slot):
if not(self.assignment_db.has_key(day)):
self.assignment_db[day] = {}
if not(self.assignment_db[day].has_key(track)):
self.assignment_db[day][track] = {}
self.assignment_db[day][track][slot] = sub_key
self.put()
def clear_talk(self, day, track, slot):
if not(self.assignment_db.has_key(day)):
return
if not(self.assignment_db[day].has_key(track)):
return
del self.assignment_db[day][track][slot]
self.put()
def get_assigned_submissions(self):
submissions = []
for day in self.assignment_db:
for track in self.assignment_db[day]:
for slot in self.assignment_db[day][track]:
submissions.append(self.assignment_db[day][track][slot])
return submissions
def make_schedule(conf_key):
sched = Schedule(parent=conf_key)
sched.put()
return [sched.key]
def get_conference_schedule(conf_key):
sched_keys = Schedule.query(ancestor=conf_key).fetch(keys_only=True)
if len(sched_keys) == 0:
sched_keys = make_schedule(conf_key)
return sched_keys[0]
def talkTitle(safeKey):
if safeKey=="Empty":
return "Empty"
sub = ndb.Key(urlsafe=safeKey).get()
return sub.title()
|
[
"google.appengine.ext.ndb.PickleProperty",
"google.appengine.ext.ndb.Key"
] |
[((711, 731), 'google.appengine.ext.ndb.PickleProperty', 'ndb.PickleProperty', ([], {}), '()\n', (729, 731), False, 'from google.appengine.ext import ndb\n'), ((752, 772), 'google.appengine.ext.ndb.PickleProperty', 'ndb.PickleProperty', ([], {}), '()\n', (770, 772), False, 'from google.appengine.ext import ndb\n'), ((4064, 4088), 'google.appengine.ext.ndb.Key', 'ndb.Key', ([], {'urlsafe': 'safeKey'}), '(urlsafe=safeKey)\n', (4071, 4088), False, 'from google.appengine.ext import ndb\n')]
|
"""
Provide access to the tag definitions and utilities.
Reads and parses into a dict the json tag def file. Provides access to this dict.
Takes a key_dict and applies tags per the tag definations.
"""
import logging
import json
import os
import re
import sys
import stat_key_browser
KEY_TAG_DEFS_FILENAME = 'key_tags.json'
EXTRA_ATTRS = 'xtra_attrs'
def dedupe_list(l):
s = set(l)
deduped_l = [x for x in s]
return deduped_l
class Tagger(object):
def __init__(self, defs=None):
if defs is None:
def_path = self.get_defs_path()
try:
with open(def_path, 'r') as def_file:
defs = json.load(def_file)
except IOError as err:
logging.error('Unable to open {0}: {1}'.format(def_path, err))
logging.error("Try running 'make tags' to create the tag file")
sys.exit(1)
self.tag_defs = defs
def _add_tags(self, key, tags):
key.setdefault('tags', [])
key['tags'] += tags
def _dedupe_tag_lists(self, key_dict):
for data in key_dict.values():
if 'tags' in data:
data['tags'] = dedupe_list(data['tags'])
return key_dict
def _pop_keys(self, dictionary, *args):
di = dictionary.copy()
for key in args:
try:
di.pop(key)
except KeyError:
pass
return di
def _get_extra_attrs(self, defin):
arb_attrs = self._pop_keys(defin.copy(), 'keys', 're-keys', 'tags')
for (extra_attr, val) in arb_attrs.items():
if len(val) != 1:
msg = 'Extra attibute must have a single value. {0} has value {1}'
raise ValueError(msg.format(extra_attr, val))
return arb_attrs
def _add_extra_attrs(self, key, extra_attrs):
"""Add extra attrs to a key."""
for (attr_name, val) in extra_attrs.items():
key.setdefault(EXTRA_ATTRS, {})
key[EXTRA_ATTRS][attr_name] = '\n'.join(val)
def get_defs_path(self):
"""Return path to tag definitions file."""
basedir = stat_key_browser.__path__[0]
defs_path = os.path.join(basedir, 'data', KEY_TAG_DEFS_FILENAME)
logging.debug('Expect key tag definitions at ', path=defs_path)
return defs_path
def tag_list(self):
"""Return a list of all the tags that appear in the definations."""
tags = []
for defin in self.tag_defs:
tags += defin['tags']
tags = dedupe_list(tags)
tags.sort()
return tags
def tag_keys(self, key_dict):
"""Apply tags to keys in key_dict."""
for defin in self.tag_defs:
extra_attrs = self._get_extra_attrs(defin)
for key in defin.get('keys', []):
self._add_tags(key_dict[key], defin['tags'])
self._add_extra_attrs(key_dict[key], extra_attrs)
if 're-keys' in defin:
for (key, data) in key_dict.items():
for re_key in defin['re-keys']:
if re.search(re_key, key):
self._add_tags(data, defin['tags'])
self._add_extra_attrs(key_dict[key], extra_attrs)
# Fix multiply matching keys that have duplicated tags.
key_dict = self._dedupe_tag_lists(key_dict)
return key_dict
|
[
"logging.error",
"json.load",
"logging.debug",
"re.search",
"os.path.join",
"sys.exit"
] |
[((2208, 2260), 'os.path.join', 'os.path.join', (['basedir', '"""data"""', 'KEY_TAG_DEFS_FILENAME'], {}), "(basedir, 'data', KEY_TAG_DEFS_FILENAME)\n", (2220, 2260), False, 'import os\n'), ((2269, 2332), 'logging.debug', 'logging.debug', (['"""Expect key tag definitions at """'], {'path': 'defs_path'}), "('Expect key tag definitions at ', path=defs_path)\n", (2282, 2332), False, 'import logging\n'), ((667, 686), 'json.load', 'json.load', (['def_file'], {}), '(def_file)\n', (676, 686), False, 'import json\n'), ((817, 880), 'logging.error', 'logging.error', (['"""Try running \'make tags\' to create the tag file"""'], {}), '("Try running \'make tags\' to create the tag file")\n', (830, 880), False, 'import logging\n'), ((897, 908), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (905, 908), False, 'import sys\n'), ((3132, 3154), 're.search', 're.search', (['re_key', 'key'], {}), '(re_key, key)\n', (3141, 3154), False, 'import re\n')]
|
import movie_service
import requests.exceptions
def print_header():
print("------------------------------------------------")
print(" MOVIE SEARCH APP")
print("------------------------------------------------")
def run_search_loop():
exit_cmds = ['x', 'exit', 'quit', 'q']
search_term = "Search term"
while search_term.lower() not in exit_cmds:
search_term = input("\nWhat movie do you want to search for? ")
if search_term not in exit_cmds:
try:
movies = movie_service.search_movie(search_term)
movie_service.print_movies(movies)
except requests.exceptions.ConnectionError:
print('Error: your connection is down.')
except ValueError:
print('ValueError: Inappropriate argument value.')
except Exception as e:
print(type(e))
print(e.__cause__)
print('exiting...')
def main():
print_header()
run_search_loop()
if __name__ == '__main__':
main()
|
[
"movie_service.search_movie",
"movie_service.print_movies"
] |
[((544, 583), 'movie_service.search_movie', 'movie_service.search_movie', (['search_term'], {}), '(search_term)\n', (570, 583), False, 'import movie_service\n'), ((600, 634), 'movie_service.print_movies', 'movie_service.print_movies', (['movies'], {}), '(movies)\n', (626, 634), False, 'import movie_service\n')]
|
import os.path
import threading
import wx
from ..dofile.do_file_collection import DoFileCollection
EVT_COMPLETE_ID = wx.NewId()
def evt_complete(win, func):
win.Connect(-1, -1, EVT_COMPLETE_ID, func)
class CompleteEvent(wx.PyEvent):
def __init__(self, message, success):
super().__init__()
self.SetEventType(EVT_COMPLETE_ID)
self.message = message
self.success = success
class Worker(threading.Thread):
def __init__(self, panel, xlsform_path, settings_path, output_dir):
super().__init__()
self.panel = panel
self.xlsform_path = xlsform_path
self.settings_path = settings_path
self.filename = os.path.splitext(os.path.basename(xlsform_path))[0] + '.do'
self.output_path = os.path.join(output_dir, self.filename)
def run(self):
do_files = DoFileCollection.from_file(self.xlsform_path,
settings_path=self.settings_path)
do_files.write_out(self.output_path)
message = f'Do file saved to "{self.output_path}"\n'
wx.PostEvent(self.panel, CompleteEvent(message, True))
|
[
"wx.NewId"
] |
[((121, 131), 'wx.NewId', 'wx.NewId', ([], {}), '()\n', (129, 131), False, 'import wx\n')]
|
import asyncio
from pubgate.crypto.key import get_key
from pubgate.utils.networking import deliver
class UserUtils:
@property
def key(self):
return get_key(self.uri)
@property
def following(self): return f"{self.uri}/following"
@property
def followers(self): return f"{self.uri}/followers"
@property
def inbox(self): return f"{self.uri}/inbox"
@property
def outbox(self): return f"{self.uri}/outbox"
async def forward_to_followers(self, activity):
recipients = await self.followers_get()
try:
recipients.remove(activity["actor"])
except ValueError:
pass
asyncio.ensure_future(deliver(self.key, activity, recipients))
|
[
"pubgate.crypto.key.get_key",
"pubgate.utils.networking.deliver"
] |
[((168, 185), 'pubgate.crypto.key.get_key', 'get_key', (['self.uri'], {}), '(self.uri)\n', (175, 185), False, 'from pubgate.crypto.key import get_key\n'), ((693, 732), 'pubgate.utils.networking.deliver', 'deliver', (['self.key', 'activity', 'recipients'], {}), '(self.key, activity, recipients)\n', (700, 732), False, 'from pubgate.utils.networking import deliver\n')]
|
import pytest
from tests.tools.tools import load_test_images
from img_metadata_lib.image import fetch_image
from img_metadata_lib.image import extract_metadata
@pytest.fixture(params=load_test_images())
def image(request):
return request.param
def test_extract_metadata_returns_dict(image):
assert isinstance(extract_metadata(image), dict)
|
[
"tests.tools.tools.load_test_images",
"img_metadata_lib.image.extract_metadata"
] |
[((186, 204), 'tests.tools.tools.load_test_images', 'load_test_images', ([], {}), '()\n', (202, 204), False, 'from tests.tools.tools import load_test_images\n'), ((322, 345), 'img_metadata_lib.image.extract_metadata', 'extract_metadata', (['image'], {}), '(image)\n', (338, 345), False, 'from img_metadata_lib.image import extract_metadata\n')]
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from v2x_solution.users import models as user_models
@python_2_unicode_compatible
class TimeStampedModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
@python_2_unicode_compatible
class Road(TimeStampedModel):
""" Road Model """
name = models.CharField(max_length=140)
location = models.CharField(max_length=140)
speed = models.SmallIntegerField(null=True)
def __str__(self):
return '{} - {}'.format(self.name, self.location)
@python_2_unicode_compatible
class Situation(TimeStampedModel):
""" Situation Model """
road = models.ForeignKey(Road, null=True, on_delete=models.CASCADE)
isimpassable = models.BooleanField()
message = models.CharField(max_length=140)
startTime = models.DateTimeField()
endTime = models.DateTimeField()
creator = models.ForeignKey(user_models.User, null=True, on_delete=models.CASCADE)
def __str__(self):
return '{} - {}'.format(self.isimpassable, self.message)
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.SmallIntegerField",
"django.db.models.DateTimeField"
] |
[((230, 269), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (250, 269), False, 'from django.db import models\n'), ((287, 322), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (307, 322), False, 'from django.db import models\n'), ((461, 493), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(140)'}), '(max_length=140)\n', (477, 493), False, 'from django.db import models\n'), ((509, 541), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(140)'}), '(max_length=140)\n', (525, 541), False, 'from django.db import models\n'), ((554, 589), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (578, 589), False, 'from django.db import models\n'), ((782, 842), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Road'], {'null': '(True)', 'on_delete': 'models.CASCADE'}), '(Road, null=True, on_delete=models.CASCADE)\n', (799, 842), False, 'from django.db import models\n'), ((862, 883), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (881, 883), False, 'from django.db import models\n'), ((898, 930), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(140)'}), '(max_length=140)\n', (914, 930), False, 'from django.db import models\n'), ((947, 969), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (967, 969), False, 'from django.db import models\n'), ((984, 1006), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (1004, 1006), False, 'from django.db import models\n'), ((1021, 1093), 'django.db.models.ForeignKey', 'models.ForeignKey', (['user_models.User'], {'null': '(True)', 'on_delete': 'models.CASCADE'}), '(user_models.User, null=True, on_delete=models.CASCADE)\n', (1038, 1093), False, 'from django.db import models\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# nnutil2 - Tensorflow utilities for training neural networks
# Copyright (c) 2019, <NAME> <<EMAIL>>
#
# This file is part of 'nnutil2'.
#
# This file may be modified and distributed under the terms of the 3-clause BSD
# license. See the LICENSE file for details.
import unittest
import tensorflow as tf
import nnutil2 as nnu
class LinalgSymmetrize(tf.test.TestCase):
def setUp(self):
pass
def test_linalg_symmetrize_1(self):
N = 32
batch_size = 4
shape = (batch_size, N, N)
A = tf.random.normal(shape=shape)
A_sym = nnu.linalg.symmetrize(A, axis=[-1, -2])
self.assertAllClose(A_sym, tf.linalg.matrix_transpose(A_sym))
A_sym2 = nnu.linalg.symmetrize(A_sym, axis=[-1, -2])
self.assertAllClose(A_sym, A_sym2)
def test_linalg_antisymmetrize_1(self):
N = 32
batch_size = 4
shape = (batch_size, N, N)
A = tf.random.normal(shape=shape)
A_ant = nnu.linalg.antisymmetrize(A, axis=[-1, -2])
self.assertAllClose(A_ant, -tf.linalg.matrix_transpose(A_ant))
A_ant2 = nnu.linalg.antisymmetrize(A_ant, axis=[-1, -2])
self.assertAllClose(A_ant, A_ant2)
if __name__ == '__main__':
tf.test.main()
|
[
"tensorflow.test.main",
"tensorflow.random.normal",
"nnutil2.linalg.antisymmetrize",
"tensorflow.linalg.matrix_transpose",
"nnutil2.linalg.symmetrize"
] |
[((1281, 1295), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (1293, 1295), True, 'import tensorflow as tf\n'), ((582, 611), 'tensorflow.random.normal', 'tf.random.normal', ([], {'shape': 'shape'}), '(shape=shape)\n', (598, 611), True, 'import tensorflow as tf\n'), ((629, 668), 'nnutil2.linalg.symmetrize', 'nnu.linalg.symmetrize', (['A'], {'axis': '[-1, -2]'}), '(A, axis=[-1, -2])\n', (650, 668), True, 'import nnutil2 as nnu\n'), ((757, 800), 'nnutil2.linalg.symmetrize', 'nnu.linalg.symmetrize', (['A_sym'], {'axis': '[-1, -2]'}), '(A_sym, axis=[-1, -2])\n', (778, 800), True, 'import nnutil2 as nnu\n'), ((978, 1007), 'tensorflow.random.normal', 'tf.random.normal', ([], {'shape': 'shape'}), '(shape=shape)\n', (994, 1007), True, 'import tensorflow as tf\n'), ((1025, 1068), 'nnutil2.linalg.antisymmetrize', 'nnu.linalg.antisymmetrize', (['A'], {'axis': '[-1, -2]'}), '(A, axis=[-1, -2])\n', (1050, 1068), True, 'import nnutil2 as nnu\n'), ((1158, 1205), 'nnutil2.linalg.antisymmetrize', 'nnu.linalg.antisymmetrize', (['A_ant'], {'axis': '[-1, -2]'}), '(A_ant, axis=[-1, -2])\n', (1183, 1205), True, 'import nnutil2 as nnu\n'), ((704, 737), 'tensorflow.linalg.matrix_transpose', 'tf.linalg.matrix_transpose', (['A_sym'], {}), '(A_sym)\n', (730, 737), True, 'import tensorflow as tf\n'), ((1105, 1138), 'tensorflow.linalg.matrix_transpose', 'tf.linalg.matrix_transpose', (['A_ant'], {}), '(A_ant)\n', (1131, 1138), True, 'import tensorflow as tf\n')]
|
import os
from pydantic.main import ModelMetaclass
from confme.utils.dict_util import flatten, InfiniteDict
from confme.utils.typing import get_schema
def env_overwrite(config_cls: ModelMetaclass):
# extract possible parameters
config_dict = get_schema(config_cls)
parameters, _ = flatten(config_dict)
# make env variables case insensitive
keys, values = zip(*os.environ.items())
keys = [k.casefold() for k in keys]
# find passed arguments and fill it into the dict structure
infinite_dict = InfiniteDict()
for p in parameters:
if p.casefold() in keys:
i = keys.index(p.casefold())
infinite_dict.expand(p.split('.'), values[i])
return infinite_dict
|
[
"os.environ.items",
"confme.utils.typing.get_schema",
"confme.utils.dict_util.InfiniteDict",
"confme.utils.dict_util.flatten"
] |
[((254, 276), 'confme.utils.typing.get_schema', 'get_schema', (['config_cls'], {}), '(config_cls)\n', (264, 276), False, 'from confme.utils.typing import get_schema\n'), ((297, 317), 'confme.utils.dict_util.flatten', 'flatten', (['config_dict'], {}), '(config_dict)\n', (304, 317), False, 'from confme.utils.dict_util import flatten, InfiniteDict\n'), ((530, 544), 'confme.utils.dict_util.InfiniteDict', 'InfiniteDict', ([], {}), '()\n', (542, 544), False, 'from confme.utils.dict_util import flatten, InfiniteDict\n'), ((385, 403), 'os.environ.items', 'os.environ.items', ([], {}), '()\n', (401, 403), False, 'import os\n')]
|
import subprocess
from threading import Thread
import file_handler
class Worker(Thread):
def __init__(self, directory, parent):
Thread.__init__(self)
self.daemon = True
self.directory = directory
self.parent = parent
self.done = False
def run(self):
process = subprocess.Popen(['ls', '-lah', self.directory], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.communicate()[0].split('\n')
if process.returncode != 0:
self.done = True
else:
self.send_to_file(output)
for line in output[3:]:
f = file_handler.File(line)
if f.is_directory and not f.is_empty:
self.parent.add_to_queue(self.directory + f.name)
self.done = True
def send_to_file(self, output):
if len(output) < 3:
return
first_file = file_handler.File(output[3])
first_file.path = self.directory + first_file.name
files = [first_file]
should_use_wildcard = True
permissions = first_file.permissions + first_file.owner + first_file.group
for line in output[3:]:
f = file_handler.File(line)
f.path = self.directory + f.name
if f.name == '':
continue
files.append(f)
if permissions != f.permissions + f.owner + f.group:
should_use_wildcard = False
if should_use_wildcard:
first_file.path = self.directory + '*'
self.parent.write(first_file)
else:
for f in files:
self.parent.write(f)
|
[
"file_handler.File",
"threading.Thread.__init__",
"subprocess.Popen"
] |
[((143, 164), 'threading.Thread.__init__', 'Thread.__init__', (['self'], {}), '(self)\n', (158, 164), False, 'from threading import Thread\n'), ((320, 420), 'subprocess.Popen', 'subprocess.Popen', (["['ls', '-lah', self.directory]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['ls', '-lah', self.directory], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", (336, 420), False, 'import subprocess\n'), ((922, 950), 'file_handler.File', 'file_handler.File', (['output[3]'], {}), '(output[3])\n', (939, 950), False, 'import file_handler\n'), ((1205, 1228), 'file_handler.File', 'file_handler.File', (['line'], {}), '(line)\n', (1222, 1228), False, 'import file_handler\n'), ((644, 667), 'file_handler.File', 'file_handler.File', (['line'], {}), '(line)\n', (661, 667), False, 'import file_handler\n')]
|
from modules.Fastx import *
from subprocess import run
import argparse
import struct
import mmap
import sys
import os
def load_query_ids(query_ids_path):
ids = list()
with open(query_ids_path, 'r') as file:
for line in file:
if line == '\n':
continue
ids.append(line.strip())
return ids
def main(fastq_path, query_ids_path):
faidx_path = build_index(fastq_path)
name_to_offset, index_elements = load_fastq_index(faidx_path=faidx_path)
queries = load_query_ids(query_ids_path=query_ids_path)
output_path = os.path.splitext(fastq_path)[0] + "_" + os.path.splitext(os.path.basename(query_ids_path))[0] + ".fastq"
sys.stderr.write("Writing to: " + output_path + '\n')
with open(fastq_path, 'rb') as input_file, open(output_path, 'wb') as output_file:
mm = mmap.mmap(input_file.fileno(), 0, prot=mmap.PROT_READ)
for name in queries:
print("fetching %s" % name)
if name in name_to_offset:
offset_index = name_to_offset[name]
else:
exit("ERROR: read name not found in fastq index")
if offset_index < len(index_elements):
index_element = index_elements[offset_index]
else:
exit("ERROR: attempted to access fastq index element " + offset_index + " which is greater than the "
"size of the list of indexes")
s = extract_bytes_from_file(mmap_file_object=mm,
offset=index_element.sequence_offset,
n_bytes=index_element.length)
q = extract_bytes_from_file(mmap_file_object=mm,
offset=index_element.quality_offset,
n_bytes=index_element.length)
output_file.write(b'@')
output_file.write(name.encode('utf-8'))
output_file.write(b'\n')
output_file.write(s)
output_file.write(b'\n')
output_file.write(b'+')
output_file.write(b'\n')
output_file.write(q)
output_file.write(b'\n')
return
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--fastq",
type=str,
required=True,
help="path of file containing FASTA/FASTQ sequence"
)
parser.add_argument(
"--ids",
type=str,
required=True,
help="path of file containing 1 id per line to be queried"
)
args = parser.parse_args()
main(
fastq_path=args.fastq,
query_ids_path=args.ids,
)
|
[
"os.path.splitext",
"sys.stderr.write",
"argparse.ArgumentParser",
"os.path.basename"
] |
[((700, 753), 'sys.stderr.write', 'sys.stderr.write', (["('Writing to: ' + output_path + '\\n')"], {}), "('Writing to: ' + output_path + '\\n')\n", (716, 753), False, 'import sys\n'), ((2270, 2295), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2293, 2295), False, 'import argparse\n'), ((591, 619), 'os.path.splitext', 'os.path.splitext', (['fastq_path'], {}), '(fastq_path)\n', (607, 619), False, 'import os\n'), ((648, 680), 'os.path.basename', 'os.path.basename', (['query_ids_path'], {}), '(query_ids_path)\n', (664, 680), False, 'import os\n')]
|
import discord
from discord.ext import commands
import json
import datetime
import re
with open('Percorso specificato (usate il \)', 'r') as settings:
options = json.load(settings)
client = commands.Bot(command_prefix="!")
@client.event
async def on_ready():
print(f"SwapiTeams Automoderation Bot, activated! Loggato come: {client.user}. Ore: {datetime.datetime.utcnow()}. Contact SwapiTeams for support: https://discord.gg/CJ8t5sgBaA. Versione: {options['version']}")
if options['automod'] == True:
print(f"Caricato il settings.json | Automod: Attivato")
elif options['automod'] == False:
print(f"Caricato il settings.json | Automod: Disattivato")
else:
print("Non sono riuscito a caricare il settings.json")
def msg_cont(message, word):
return re.search(fr'\b({word})\b', message) is not None
@client.event
async def on_message(message):
bannedwords = options['bannedwords']
if options['automod'] == True:
if bannedwords != None and (isinstance(message.channel, discord.channel.DMChannel) == False):
for bannedword in bannedwords:
if msg_cont(message.content.lower(), bannedword):
await message.delete()
embed = discord.Embed(
title="Automod",
description=f"{message.author} non puoi scrivere questa parola",
color=0x1d1d1d
)
await message.channel.send(embed=embed)
print(f"{message.author} ha scritto {message.content} in {message.channel}")
await client.process_commands(message)
client.run(f"{options['token']}")
|
[
"json.load",
"discord.Embed",
"datetime.datetime.utcnow",
"discord.ext.commands.Bot",
"re.search"
] |
[((210, 242), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': '"""!"""'}), "(command_prefix='!')\n", (222, 242), False, 'from discord.ext import commands\n'), ((178, 197), 'json.load', 'json.load', (['settings'], {}), '(settings)\n', (187, 197), False, 'import json\n'), ((825, 862), 're.search', 're.search', (['f"""\\\\b({word})\\\\b"""', 'message'], {}), "(f'\\\\b({word})\\\\b', message)\n", (834, 862), False, 'import re\n'), ((373, 399), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (397, 399), False, 'import datetime\n'), ((1290, 1405), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Automod"""', 'description': 'f"""{message.author} non puoi scrivere questa parola"""', 'color': '(1907997)'}), "(title='Automod', description=\n f'{message.author} non puoi scrivere questa parola', color=1907997)\n", (1303, 1405), False, 'import discord\n')]
|
# -*- coding: utf-8 -*-
""" Test Access to Fomabin
"""
from unittest import TestCase
import os
from wordweaver.data import data_dir
from wordweaver.fst.utils.foma_access import foma_access
from wordweaver.log import logger
class TestFoma_access_shell(TestCase):
path_to_foma = None
foma_shell = None
fomabin_name = 'toy-kawe-stressed-markup.fomabin'
def setUp(self):
self.path_to_foma = os.path.join(data_dir, 'fomabins')
self.foma_shell = foma_access(os.path.join(self.path_to_foma, self.fomabin_name))
def test_up(self):
verb = "^PP-^ke^R-^'níkhons^H^"
res = self.foma_shell.up(verb)
if len(res) != 1:
self.fail("Expected one answer for '^PP-^ke^R-^'níkhons^H^'")
else:
for r in res:
logger.debug(r)
def test_down(self):
tags = 'Verb+Active+AgentSg1+PatSg3Neuter+7nikhon-r+Habitual'
res = self.foma_shell.down(tags)
if len(res) != 1:
self.fail("Excpected a single answer for Verb+Active+AgentSg1+PatSg3Neuter+7nikhon-r+Habitual")
elif res[0] != "^PP-^ke^R-^'níkhons^H^":
self.fail('Expected "^PP-^ke^R-^\'níkhons^H^", got ' + res[0])
else:
logger.debug(res[0])
def test_execute_foma_command(self):
res = self.foma_shell.execute_foma_command('random_upper')
for r in res:
logger.debug(r)
|
[
"os.path.join",
"wordweaver.log.logger.debug"
] |
[((416, 450), 'os.path.join', 'os.path.join', (['data_dir', '"""fomabins"""'], {}), "(data_dir, 'fomabins')\n", (428, 450), False, 'import os\n'), ((489, 539), 'os.path.join', 'os.path.join', (['self.path_to_foma', 'self.fomabin_name'], {}), '(self.path_to_foma, self.fomabin_name)\n', (501, 539), False, 'import os\n'), ((1401, 1416), 'wordweaver.log.logger.debug', 'logger.debug', (['r'], {}), '(r)\n', (1413, 1416), False, 'from wordweaver.log import logger\n'), ((800, 815), 'wordweaver.log.logger.debug', 'logger.debug', (['r'], {}), '(r)\n', (812, 815), False, 'from wordweaver.log import logger\n'), ((1237, 1257), 'wordweaver.log.logger.debug', 'logger.debug', (['res[0]'], {}), '(res[0])\n', (1249, 1257), False, 'from wordweaver.log import logger\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging as log
import yaml
import os
def get_settings_yml_file():
yml_file = None
config_file = "configs/settings.yaml"
try:
with open(config_file, 'r') as yml:
yml_file = yaml.load(yml, Loader=yaml.SafeLoader)
except KeyError:
log.error("Couldn't find {}", config_file)
exit()
return yml_file
def config_path():
yml_file = get_settings_yml_file()
try:
return yml_file['config']['path']
except KeyError:
log.error("No config path in settings file")
return "Missing key!"
def repo_bin():
yml_file = get_settings_yml_file()
try:
return yml_file['repo']['bin']
except KeyError:
log.error("No repo bin in settings file")
return "Missing key!"
def repo_reference():
yml_file = get_settings_yml_file()
try:
return yml_file['repo']['reference']
except KeyError:
log.error("No repo reference in settings file")
return "Missing key!"
def aarch32_toolchain_path():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch32_path']
except KeyError:
log.error("No aarch32 toolchain in settings file")
return "Missing key!"
def aarch64_toolchain_path():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch64_path']
except KeyError:
log.error("No aarch64 toolchain in settings file")
return "Missing key!"
def aarch32_prefix():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch32_prefix']
except KeyError:
log.error("No aarch32 prefix in settings file")
return "Missing key!"
def aarch64_prefix():
yml_file = get_settings_yml_file()
try:
return yml_file['toolchain']['aarch64_prefix']
except KeyError:
log.error("No aarch64 prefix in settings file")
return "Missing key!"
def workspace_path():
yml_file = get_settings_yml_file()
try:
return yml_file['workspace']['path']
except KeyError:
log.error("No workspace path in settings file")
return "Missing key!"
def log_dir():
try:
if os.environ['IBART_LOG_DIR']:
return os.environ['IBART_LOG_DIR']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['log']['dir']
except KeyError:
log.error("No log dir in settings file")
return "Missing key!"
def log_file():
try:
if os.environ['IBART_CORE_LOG']:
return os.environ['IBART_CORE_LOG']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['log']['file']
except KeyError:
log.error("No log file specified in settings file or env")
return "Missing key!"
def db_file():
try:
if os.environ['IBART_DB_FILE']:
return os.environ['IBART_DB_FILE']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['db']['file']
except KeyError:
log.error("No db file specified in settings file or env")
return "Missing key!"
def jobdefs_path():
try:
if os.environ['IBART_JOBDEFS']:
return os.environ['IBART_JOBDEFS']
except KeyError:
pass
yml_file = get_settings_yml_file()
try:
return yml_file['jobs']['path']
except KeyError:
log.error("No jobdefs folder specified in settings file or env")
return "Missing key!"
def remote_jobs():
yml_file = get_settings_yml_file()
my_jobs = []
try:
yml_iter = yml_file['jobs']['remotedefs']
for i in yml_iter:
my_jobs.append("{}".format(i))
except KeyError:
log.error("No remote jobdefs in settings file")
return "Missing key!"
return my_jobs
###############################################################################
# Everything below this line is just for debugging this
###############################################################################
def foo():
yml_file = get_settings_yml_file()
try:
return yml_file['foo']['aarch64_path']
except KeyError:
return "Missing key!"
def initialize():
log.info("Configure settings")
log.debug("config: {}".format(config_path()))
log.debug("repo binary: {}".format(repo_bin()))
log.debug("repo reference: {}".format(repo_reference()))
log.debug("aarch32_toolchain_path: {}".format(aarch32_toolchain_path()))
log.debug("aarch64_toolchain_path: {}".format(aarch64_toolchain_path()))
log.debug("aarch32_prefix: {}".format(aarch32_prefix()))
log.debug("aarch64_prefix: {}".format(aarch64_prefix()))
log.debug("workspace_path: {}".format(workspace_path()))
log.debug("log_dir: {}".format(log_dir()))
log.debug("log_file: {}".format(log_file()))
log.debug("db_file: {}".format(db_file()))
log.debug("config_path: {}".format(config_path()))
log.debug("remote_jobs: {}".format(remote_jobs()))
def initialize_logger():
LOG_FMT = ("[%(levelname)s] %(funcName)s():%(lineno)d %(message)s")
log.basicConfig(
# filename="core.log",
level=log.DEBUG,
format=LOG_FMT,
filemode='w')
if __name__ == "__main__":
initialize_logger()
initialize()
foo()
|
[
"logging.info",
"yaml.load",
"logging.error",
"logging.basicConfig"
] |
[((4369, 4399), 'logging.info', 'log.info', (['"""Configure settings"""'], {}), "('Configure settings')\n", (4377, 4399), True, 'import logging as log\n'), ((5258, 5320), 'logging.basicConfig', 'log.basicConfig', ([], {'level': 'log.DEBUG', 'format': 'LOG_FMT', 'filemode': '"""w"""'}), "(level=log.DEBUG, format=LOG_FMT, filemode='w')\n", (5273, 5320), True, 'import logging as log\n'), ((261, 299), 'yaml.load', 'yaml.load', (['yml'], {'Loader': 'yaml.SafeLoader'}), '(yml, Loader=yaml.SafeLoader)\n', (270, 299), False, 'import yaml\n'), ((329, 371), 'logging.error', 'log.error', (['"""Couldn\'t find {}"""', 'config_file'], {}), '("Couldn\'t find {}", config_file)\n', (338, 371), True, 'import logging as log\n'), ((548, 592), 'logging.error', 'log.error', (['"""No config path in settings file"""'], {}), "('No config path in settings file')\n", (557, 592), True, 'import logging as log\n'), ((757, 798), 'logging.error', 'log.error', (['"""No repo bin in settings file"""'], {}), "('No repo bin in settings file')\n", (766, 798), True, 'import logging as log\n'), ((975, 1022), 'logging.error', 'log.error', (['"""No repo reference in settings file"""'], {}), "('No repo reference in settings file')\n", (984, 1022), True, 'import logging as log\n'), ((1215, 1265), 'logging.error', 'log.error', (['"""No aarch32 toolchain in settings file"""'], {}), "('No aarch32 toolchain in settings file')\n", (1224, 1265), True, 'import logging as log\n'), ((1458, 1508), 'logging.error', 'log.error', (['"""No aarch64 toolchain in settings file"""'], {}), "('No aarch64 toolchain in settings file')\n", (1467, 1508), True, 'import logging as log\n'), ((1695, 1742), 'logging.error', 'log.error', (['"""No aarch32 prefix in settings file"""'], {}), "('No aarch32 prefix in settings file')\n", (1704, 1742), True, 'import logging as log\n'), ((1929, 1976), 'logging.error', 'log.error', (['"""No aarch64 prefix in settings file"""'], {}), "('No aarch64 prefix in settings file')\n", (1938, 1976), True, 'import logging as log\n'), ((2153, 2200), 'logging.error', 'log.error', (['"""No workspace path in settings file"""'], {}), "('No workspace path in settings file')\n", (2162, 2200), True, 'import logging as log\n'), ((2494, 2534), 'logging.error', 'log.error', (['"""No log dir in settings file"""'], {}), "('No log dir in settings file')\n", (2503, 2534), True, 'import logging as log\n'), ((2832, 2890), 'logging.error', 'log.error', (['"""No log file specified in settings file or env"""'], {}), "('No log file specified in settings file or env')\n", (2841, 2890), True, 'import logging as log\n'), ((3184, 3241), 'logging.error', 'log.error', (['"""No db file specified in settings file or env"""'], {}), "('No db file specified in settings file or env')\n", (3193, 3241), True, 'import logging as log\n'), ((3542, 3606), 'logging.error', 'log.error', (['"""No jobdefs folder specified in settings file or env"""'], {}), "('No jobdefs folder specified in settings file or env')\n", (3551, 3606), True, 'import logging as log\n'), ((3872, 3919), 'logging.error', 'log.error', (['"""No remote jobdefs in settings file"""'], {}), "('No remote jobdefs in settings file')\n", (3881, 3919), True, 'import logging as log\n')]
|
from importlib import import_module
import os
import re
from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
class Command(TemplateCommand):
help = (
"Creates a Django app directory structure for the given app name in "
"the current directory or optionally in the given directory."
)
missing_args_message = "You must provide an application name."
def handle(self, **options):
app_name, target = options.pop('name'), options.pop('directory')
self.validate_name(app_name, "app")
# Check that the app_name cannot be imported.
try:
import_module(app_name)
except ImportError:
pass
else:
raise CommandError(
"%r conflicts with the name of an existing Python module and "
"cannot be used as an app name. Please try another name." % app_name
)
super(Command, self).handle('app', app_name, target, **options)
filepath_proj = os.path.join(os.getcwd(),os.getcwd().split(os.sep)[-1])
filepath_app = os.path.join(os.getcwd(), app_name)
filepath_base = os.getcwd()
template_path = os.path.join(filepath_base, 'templates', app_name)
css_path = os.path.join(filepath_base, 'static', 'css', app_name)
js_path = os.path.join(filepath_base, 'static', 'js', app_name)
try:
os.mkdir(template_path)
os.mkdir(css_path)
os.mkdir(js_path)
except:
raise
css_template = '/* Blank CSS Sheet - Reset CSS with CTRL + F5 to Clear Browser Cache*/'
with open(os.path.join(css_path, "{}.css".format(app_name)), 'w') as f:
f.write(css_template)
f.close()
js_template = '// Blank Javascript File - Reset Javascript with CTRL + F5 to Clear Browser Cache'
with open(os.path.join(js_path, '{}.js'.format(app_name)), 'w') as f:
f.write(js_template)
f.close()
base_template = '''<!DOCTYPE html>
{{% load static %}}
<html>
<head>
<title></title>
<link rel='stylesheet' href='{{% static "sitepackages/bootstrap.min.css" %}}'>
<link rel='stylesheet' href='{{% static "{}" %}}'>
<meta name='viewport' content='width=device-width, initial-scale=1, shrink-to-fit=no'>
</head>
<body>
<!-- Insert repeated body code here -->
<div>
{{% block body_block %}}
<!-- Anything outside of this will be inherited if you extend -->
{{% endblock %}}
</div>
<script src='{{% static "sitepackages/jquery.min.js" %}}'></script>
<script src='{{% static "sitepackages/popper.min.js" %}}'></script>
<script src='{{% static "sitepackages/bootstrap.min.js" %}}'></script>
<script src='{{% static "{}" %}}'></script>
</body>
</html>'''.format(os.path.join('css',app_name,'{}.css'.format(app_name)), os.path.join('js',app_name,'{}.js'.format(app_name)))
extension_template = '''{{% extends "{}" %}}
{{% load {}_custom_tags %}}
{{% block body_block %}}
<!-- Add in your body html for this page here -->
{{% endblock %}}'''.format(app_name, os.path.join(app_name,'{}_base.html'.format(app_name)))
with open(os.path.join(template_path, '{}_base.html'.format(app_name)), 'w') as f:
f.write(base_template)
f.close()
with open(os.path.join(template_path, '{}_extension.html'.format(app_name)), 'w') as f:
f.write(extension_template)
f.close()
with open(os.path.join(filepath_proj,'settings.py'), 'r') as f:
file_string = f.read()
f.close()
pattern = re.compile(r"INSTALLED_APPS\s=\s\[\n*\s*")
matches = pattern.finditer(file_string)
for match in matches:
stop = match.span()[-1]
new_file_string = file_string[:stop] + "'" + app_name + "'" + ',' + '\n\t' + file_string[stop:]
with open(os.path.join(filepath_proj,'settings.py'), 'w') as f:
f.write(new_file_string)
f.close()
with open(os.path.join(filepath_app, 'forms.py'), 'w') as f:
form_text = '''from django import forms
from django.core import validators
from django.contrib.auth.models import User
# from {}.models import model_name(s)
# In the HTML don't forget to add csrf_token!!
# class Form_Name(forms.Form):
# name = forms.CharField()
# email = forms.EmailField()
# text = forms.CharField(widget = forms.Textarea)
# botcatcher = forms.CharField(required = False, widget = forms.HiddenInput, validators=[validators.MaxLengthValidator(0)])
# class Form_From_Model(forms.ModelForm):
# class Meta:
# model = model_name
# ##Several options for how to specify fields:
# #Option 1
# fields = '__all__'
# #Option 2
# exclude = ['field_one', 'field_two'] ##include but all specified
# #Option 3
# fields = ('field_one', 'field_two') ##include only specified'''.format(app_name)
f.write(form_text)
f.close()
with open(os.path.join(filepath_app, 'urls.py'), 'w') as f:
urls_text = '''from django.conf.urls import url
from ''' + app_name + ' import views' + '\n\n' + '''app_name = '{}'
urlpatterns = [
]'''.format(app_name)
f.write(urls_text)
f.close()
with open(os.path.join(filepath_proj, 'urls.py'), 'r') as f:
main_urls_text = f.read()
f.close()
pattern = re.compile(r'from\s(.*|\s*)admin')
matches = pattern.finditer(main_urls_text)
for match in matches:
stop = match.span()[-1]
newstring = main_urls_text[:stop] + '\n' + 'from ' + app_name + ' import views' + main_urls_text[stop:]
with open(os.path.join(filepath_proj, 'urls.py'), 'w') as f:
f.write(newstring)
f.close()
try:
os.mkdir(os.path.join(filepath_app, 'templatetags'))
except:
raise
if os.path.exists(os.path.join(filepath_app, 'templatetags')) == True:
with open(os.path.join(filepath_app, 'templatetags', '__init__.py'), 'w') as f:
f.write('')
f.close()
with open(os.path.join(filepath_app, 'templatetags', '{}_custom_tags.py'.format(app_name)), 'w') as f:
tag_template = '''from django import template
register = template.Library()
#<EMAIL>(name = 'filtername')
#def example(value,arg):
#Do something to value based on arg here#
##Ex. return value.replace(arg,'')##
#return value'''
f.write(tag_template)
f.close()
with open(os.path.join(filepath_app, 'views.py'), 'r') as f:
app_views_text = f.read()
f.close()
pattern = re.compile(r'from django.shortcuts import render')
matches = pattern.finditer(app_views_text)
for match in matches:
stop = match.span()[-1]
newstring = app_views_text[:stop] + '''\nfrom django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth import authenticate, login, logout''' + '\n' + '#from {}.forms import Form_Name(s)\n'.format(app_name) + '#from {}.models import Model_Name(s)\n\n'.format(app_name) + '#Remember to add LOGIN_URL = "/app_name/user_login" to settings.py if you are adding login' + app_views_text[stop:] + '''\n# def index(request):
# return render(request, '{}')
# def formview(request):
# form = Form_Name()
# if request.method == 'POST':
# form = Form_Name(request.POST)
# if form.is_valid():
# form.save(commit = True)
# return index(request)
# else:
# print('Error')
# return render(request, '{}', {{'form':form}})
# def modelview(request):
# example_data = Model_Name.objects.all()
# context_dict = {{'data_list':example_data}}
# return render(request, '{}', context = context_dict)'''.format(os.path.join(app_name,'index.html'), os.path.join(app_name,'form_page.html'), os.path.join(app_name,'model_page.html'))
with open(os.path.join(filepath_app, 'views.py'), 'w') as f:
f.write(newstring)
f.close()
with open(os.path.join(filepath_app, 'models.py'), 'r') as f:
app_models_text = f.read()
f.close()
newstring = app_models_text + '''\n# class Model_Name(models.Model):
# f_name = models.CharField(max_length = 256)
# l_name = models.CharField(max_length = 256)
# def __str__(self):
# return str(self.f_name + ' ' + self.l_name)'''
with open(os.path.join(filepath_app, 'models.py'), 'w') as f:
f.write(newstring)
f.close()
with open(os.path.join(filepath_base, 'auto_populate.py'), 'w') as f:
generator_template = '''import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '{}.settings')
import django
django.setup()
import faker
import random
from {}.models import #Model_Name(s)
f = faker.Faker()
def populate(N):
for entry in range(N):
# fake_fname = f.first_name()
# fake_lname = f.last_name()
# fake_email = f.free_email()
# added_object = Model_Name.objects.get_or_create(f_name = fake_fname, l_name = fake_lname, email = fake_email)[0]
if __name__ == '__main__':
print('populating data')
populate(N = )
print('data population complete')'''.format(os.getcwd().split(os.sep)[-1], app_name)
f.write(generator_template)
f.close()
|
[
"os.mkdir",
"importlib.import_module",
"os.getcwd",
"django.core.management.base.CommandError",
"os.path.join",
"re.compile"
] |
[((1200, 1211), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1209, 1211), False, 'import os\n'), ((1237, 1287), 'os.path.join', 'os.path.join', (['filepath_base', '"""templates"""', 'app_name'], {}), "(filepath_base, 'templates', app_name)\n", (1249, 1287), False, 'import os\n'), ((1307, 1361), 'os.path.join', 'os.path.join', (['filepath_base', '"""static"""', '"""css"""', 'app_name'], {}), "(filepath_base, 'static', 'css', app_name)\n", (1319, 1361), False, 'import os\n'), ((1380, 1433), 'os.path.join', 'os.path.join', (['filepath_base', '"""static"""', '"""js"""', 'app_name'], {}), "(filepath_base, 'static', 'js', app_name)\n", (1392, 1433), False, 'import os\n'), ((3727, 3773), 're.compile', 're.compile', (['"""INSTALLED_APPS\\\\s=\\\\s\\\\[\\\\n*\\\\s*"""'], {}), "('INSTALLED_APPS\\\\s=\\\\s\\\\[\\\\n*\\\\s*')\n", (3737, 3773), False, 'import re\n'), ((5585, 5620), 're.compile', 're.compile', (['"""from\\\\s(.*|\\\\s*)admin"""'], {}), "('from\\\\s(.*|\\\\s*)admin')\n", (5595, 5620), False, 'import re\n'), ((6899, 6948), 're.compile', 're.compile', (['"""from django.shortcuts import render"""'], {}), "('from django.shortcuts import render')\n", (6909, 6948), False, 'import re\n'), ((670, 693), 'importlib.import_module', 'import_module', (['app_name'], {}), '(app_name)\n', (683, 693), False, 'from importlib import import_module\n'), ((771, 923), 'django.core.management.base.CommandError', 'CommandError', (["('%r conflicts with the name of an existing Python module and cannot be used as an app name. Please try another name.'\n % app_name)"], {}), "(\n '%r conflicts with the name of an existing Python module and cannot be used as an app name. Please try another name.'\n % app_name)\n", (783, 923), False, 'from django.core.management.base import CommandError\n'), ((1074, 1085), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1083, 1085), False, 'import os\n'), ((1153, 1164), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1162, 1164), False, 'import os\n'), ((1460, 1483), 'os.mkdir', 'os.mkdir', (['template_path'], {}), '(template_path)\n', (1468, 1483), False, 'import os\n'), ((1496, 1514), 'os.mkdir', 'os.mkdir', (['css_path'], {}), '(css_path)\n', (1504, 1514), False, 'import os\n'), ((1527, 1544), 'os.mkdir', 'os.mkdir', (['js_path'], {}), '(js_path)\n', (1535, 1544), False, 'import os\n'), ((3601, 3643), 'os.path.join', 'os.path.join', (['filepath_proj', '"""settings.py"""'], {}), "(filepath_proj, 'settings.py')\n", (3613, 3643), False, 'import os\n'), ((4015, 4057), 'os.path.join', 'os.path.join', (['filepath_proj', '"""settings.py"""'], {}), "(filepath_proj, 'settings.py')\n", (4027, 4057), False, 'import os\n'), ((4143, 4181), 'os.path.join', 'os.path.join', (['filepath_app', '"""forms.py"""'], {}), "(filepath_app, 'forms.py')\n", (4155, 4181), False, 'import os\n'), ((5165, 5202), 'os.path.join', 'os.path.join', (['filepath_app', '"""urls.py"""'], {}), "(filepath_app, 'urls.py')\n", (5177, 5202), False, 'import os\n'), ((5455, 5493), 'os.path.join', 'os.path.join', (['filepath_proj', '"""urls.py"""'], {}), "(filepath_proj, 'urls.py')\n", (5467, 5493), False, 'import os\n'), ((5869, 5907), 'os.path.join', 'os.path.join', (['filepath_proj', '"""urls.py"""'], {}), "(filepath_proj, 'urls.py')\n", (5881, 5907), False, 'import os\n'), ((6008, 6050), 'os.path.join', 'os.path.join', (['filepath_app', '"""templatetags"""'], {}), "(filepath_app, 'templatetags')\n", (6020, 6050), False, 'import os\n'), ((6113, 6155), 'os.path.join', 'os.path.join', (['filepath_app', '"""templatetags"""'], {}), "(filepath_app, 'templatetags')\n", (6125, 6155), False, 'import os\n'), ((6769, 6807), 'os.path.join', 'os.path.join', (['filepath_app', '"""views.py"""'], {}), "(filepath_app, 'views.py')\n", (6781, 6807), False, 'import os\n'), ((8175, 8211), 'os.path.join', 'os.path.join', (['app_name', '"""index.html"""'], {}), "(app_name, 'index.html')\n", (8187, 8211), False, 'import os\n'), ((8212, 8252), 'os.path.join', 'os.path.join', (['app_name', '"""form_page.html"""'], {}), "(app_name, 'form_page.html')\n", (8224, 8252), False, 'import os\n'), ((8253, 8294), 'os.path.join', 'os.path.join', (['app_name', '"""model_page.html"""'], {}), "(app_name, 'model_page.html')\n", (8265, 8294), False, 'import os\n'), ((8314, 8352), 'os.path.join', 'os.path.join', (['filepath_app', '"""views.py"""'], {}), "(filepath_app, 'views.py')\n", (8326, 8352), False, 'import os\n'), ((8437, 8476), 'os.path.join', 'os.path.join', (['filepath_app', '"""models.py"""'], {}), "(filepath_app, 'models.py')\n", (8449, 8476), False, 'import os\n'), ((8862, 8901), 'os.path.join', 'os.path.join', (['filepath_app', '"""models.py"""'], {}), "(filepath_app, 'models.py')\n", (8874, 8901), False, 'import os\n'), ((8986, 9033), 'os.path.join', 'os.path.join', (['filepath_base', '"""auto_populate.py"""'], {}), "(filepath_base, 'auto_populate.py')\n", (8998, 9033), False, 'import os\n'), ((6188, 6245), 'os.path.join', 'os.path.join', (['filepath_app', '"""templatetags"""', '"""__init__.py"""'], {}), "(filepath_app, 'templatetags', '__init__.py')\n", (6200, 6245), False, 'import os\n'), ((1086, 1097), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1095, 1097), False, 'import os\n'), ((9678, 9689), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (9687, 9689), False, 'import os\n')]
|
import logging
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
from rest_framework import serializers
from rest_framework.relations import PrimaryKeyRelatedField
from dora.structures.models import Structure, StructureMember
from .models import (
AccessCondition,
BeneficiaryAccessMode,
CoachOrientationMode,
ConcernedPublic,
Credential,
LocationKind,
Requirement,
Service,
ServiceCategories,
ServiceKind,
ServiceSubCategories,
)
logger = logging.getLogger(__name__)
class CreatablePrimaryKeyRelatedField(PrimaryKeyRelatedField):
def __init__(self, **kwargs):
self.max_length = kwargs.pop("max_length", None)
super().__init__(**kwargs)
def use_pk_only_optimization(self):
return True
def to_internal_value(self, data):
if isinstance(data, int):
return super().to_internal_value(data)
# If we receive a string instead of a primary key, search
# by value, and create a new object if not found
name = data.strip()
if name == "":
raise ValidationError("Cette valeur est vide")
if self.max_length is not None and len(name) > self.max_length:
raise ValidationError(
f"Cette valeur doit avoir moins de {self.max_length} caractères"
)
if self.root.instance:
structure = self.root.instance.structure
else:
structure_slug = self.root.initial_data["structure"]
structure = Structure.objects.get(slug=structure_slug)
if not structure:
raise ValidationError("La structure ne peut pas être vide")
queryset = self.queryset
# find if it already exists in the same structure
obj = queryset.filter(name=name, structure=structure).first()
if not obj:
# then in the global repository
obj = queryset.filter(name=name, structure=None).first()
if not obj:
# otherwise create it
obj = queryset.create(name=name, structure=structure)
return obj
class StructureSerializer(serializers.ModelSerializer):
has_admin = serializers.SerializerMethodField()
class Meta:
model = Structure
fields = [
"slug",
"name",
"short_desc",
"address1",
"address2",
"postal_code",
"city",
"url",
"siret",
"has_admin",
]
def get_has_admin(self, structure):
return structure.membership.filter(is_admin=True, user__is_staff=False).exists()
class ServiceSerializer(serializers.ModelSerializer):
is_available = serializers.SerializerMethodField()
forms_info = serializers.SerializerMethodField()
structure = serializers.SlugRelatedField(
queryset=Structure.objects.all(), slug_field="slug"
)
structure_info = StructureSerializer(source="structure", read_only=True)
kinds_display = serializers.SerializerMethodField()
category_display = serializers.SerializerMethodField()
subcategories_display = serializers.SerializerMethodField()
access_conditions = CreatablePrimaryKeyRelatedField(
many=True,
queryset=AccessCondition.objects.all(),
max_length=140,
required=False,
)
access_conditions_display = serializers.SerializerMethodField()
concerned_public = CreatablePrimaryKeyRelatedField(
many=True,
queryset=ConcernedPublic.objects.all(),
max_length=140,
required=False,
)
concerned_public_display = serializers.SerializerMethodField()
requirements = CreatablePrimaryKeyRelatedField(
many=True,
queryset=Requirement.objects.all(),
max_length=140,
required=False,
)
requirements_display = serializers.SerializerMethodField()
credentials = CreatablePrimaryKeyRelatedField(
many=True,
queryset=Credential.objects.all(),
max_length=140,
required=False,
)
credentials_display = serializers.SerializerMethodField()
location_kinds_display = serializers.SerializerMethodField()
beneficiaries_access_modes_display = serializers.SerializerMethodField()
coach_orientation_modes_display = serializers.SerializerMethodField()
department = serializers.SerializerMethodField()
can_write = serializers.SerializerMethodField()
class Meta:
model = Service
fields = [
"slug",
"name",
"short_desc",
"full_desc",
"kinds",
"category",
"subcategories",
"access_conditions",
"concerned_public",
"is_cumulative",
"has_fee",
"fee_details",
"beneficiaries_access_modes",
"beneficiaries_access_modes_other",
"coach_orientation_modes",
"coach_orientation_modes_other",
"requirements",
"credentials",
"forms",
"online_form",
"contact_name",
"contact_phone",
"contact_email",
"is_contact_info_public",
"location_kinds",
"remote_url",
"address1",
"address2",
"postal_code",
"city_code",
"city",
"geom",
"recurrence",
"suspension_date",
"structure",
"creation_date",
"modification_date",
"is_draft",
"is_available",
"forms_info",
"structure",
"structure_info",
"kinds_display",
"category_display",
"subcategories_display",
"access_conditions_display",
"concerned_public_display",
"requirements_display",
"credentials_display",
"location_kinds_display",
"beneficiaries_access_modes_display",
"coach_orientation_modes_display",
"department",
"can_write",
]
lookup_field = "slug"
def get_is_available(self, obj):
return True
def get_forms_info(self, obj):
forms = [{"name": form, "url": default_storage.url(form)} for form in obj.forms]
return forms
def get_kinds_display(self, obj):
return [ServiceKind(kind).label for kind in obj.kinds]
def get_location_kinds_display(self, obj):
return [LocationKind(kind).label for kind in obj.location_kinds]
def get_category_display(self, obj):
return ServiceCategories(obj.category).label if obj.category else ""
def get_subcategories_display(self, obj):
try:
return [ServiceSubCategories(cat).label for cat in obj.subcategories]
except ValueError:
logger.exception(
"Incorrect Service sub-category", extra={"values": obj.subcategories}
)
return []
def get_beneficiaries_access_modes_display(self, obj):
return [
BeneficiaryAccessMode(mode).label for mode in obj.beneficiaries_access_modes
]
def get_coach_orientation_modes_display(self, obj):
return [
CoachOrientationMode(mode).label for mode in obj.coach_orientation_modes
]
def get_access_conditions_display(self, obj):
return [item.name for item in obj.access_conditions.all()]
def get_concerned_public_display(self, obj):
return [item.name for item in obj.concerned_public.all()]
def get_requirements_display(self, obj):
return [item.name for item in obj.requirements.all()]
def get_credentials_display(self, obj):
return [item.name for item in obj.credentials.all()]
def get_department(self, obj):
code = obj.postal_code
return code[:3] if code.startswith("97") else code[:2]
def get_can_write(self, obj):
user = self.context.get("request").user
return obj.can_write(user)
# def validate_structure(self, value):
# user = self.context.get("request").user
# if (
# not user.is_staff
# and not StructureMember.objects.filter(
# structure_id=value.id, user_id=user.id
# ).exists()
# ):
# raise serializers.ValidationError(
# "Vous n’appartenez pas à cette structure", "not_member_of_struct"
# )
# return value
def validate(self, data):
user = self.context.get("request").user
structure = data.get("structure") or self.instance.structure
user_structures = StructureMember.objects.filter(user_id=user.id).values_list(
"structure_id", flat=True
)
if "structure" in data:
if not user.is_staff and data["structure"].id not in user_structures:
raise serializers.ValidationError(
{"structure": "Vous n’appartenez pas à cette structure"},
"not_member_of_struct",
)
assert structure.id is None or structure.id in user_structures or user.is_staff
if "access_conditions" in data:
self._validate_custom_choice(
"access_conditions", data, user, user_structures, structure
)
if "concerned_public" in data:
self._validate_custom_choice(
"concerned_public", data, user, user_structures, structure
)
if "requirements" in data:
self._validate_custom_choice(
"requirements", data, user, user_structures, structure
)
if "credentials" in data:
self._validate_custom_choice(
"credentials", data, user, user_structures, structure
)
return data
def _validate_custom_choice(self, field, data, user, user_structures, structure):
values = data[field]
for val in values:
if val.structure_id is not None and val.structure_id != structure.id:
raise serializers.ValidationError(
{field: "Ce choix n'est pas disponible dans cette structure"},
"unallowed_custom_choices_bad_struc",
)
return values
class AnonymousServiceSerializer(ServiceSerializer):
contact_name = serializers.SerializerMethodField()
contact_phone = serializers.SerializerMethodField()
contact_email = serializers.SerializerMethodField()
is_contact_info_public = serializers.SerializerMethodField()
def get_contact_name(self, obj):
return obj.contact_name if obj.is_contact_info_public else ""
def get_contact_phone(self, obj):
return obj.contact_phone if obj.is_contact_info_public else ""
def get_contact_email(self, obj):
return obj.contact_email if obj.is_contact_info_public else ""
def get_is_contact_info_public(self, obj):
return True if obj.is_contact_info_public else None
class ServiceListSerializer(ServiceSerializer):
class Meta:
model = Service
fields = [
"slug",
"name",
"structure",
"structure_info",
"postal_code",
"city",
"department",
"is_draft",
"modification_date",
"category_display",
"short_desc",
]
lookup_field = "slug"
class FeedbackSerializer(serializers.Serializer):
full_name = serializers.CharField()
email = serializers.EmailField()
message = serializers.CharField()
|
[
"rest_framework.serializers.EmailField",
"dora.structures.models.Structure.objects.get",
"django.core.exceptions.ValidationError",
"rest_framework.serializers.SerializerMethodField",
"django.core.files.storage.default_storage.url",
"dora.structures.models.StructureMember.objects.filter",
"rest_framework.serializers.CharField",
"dora.structures.models.Structure.objects.all",
"logging.getLogger",
"rest_framework.serializers.ValidationError"
] |
[((543, 570), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (560, 570), False, 'import logging\n'), ((2223, 2258), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (2256, 2258), False, 'from rest_framework import serializers\n'), ((2762, 2797), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (2795, 2797), False, 'from rest_framework import serializers\n'), ((2815, 2850), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (2848, 2850), False, 'from rest_framework import serializers\n'), ((3060, 3095), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3093, 3095), False, 'from rest_framework import serializers\n'), ((3119, 3154), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3152, 3154), False, 'from rest_framework import serializers\n'), ((3183, 3218), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3216, 3218), False, 'from rest_framework import serializers\n'), ((3429, 3464), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3462, 3464), False, 'from rest_framework import serializers\n'), ((3673, 3708), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3706, 3708), False, 'from rest_framework import serializers\n'), ((3905, 3940), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3938, 3940), False, 'from rest_framework import serializers\n'), ((4134, 4169), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (4167, 4169), False, 'from rest_framework import serializers\n'), ((4199, 4234), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (4232, 4234), False, 'from rest_framework import serializers\n'), ((4276, 4311), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (4309, 4311), False, 'from rest_framework import serializers\n'), ((4350, 4385), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (4383, 4385), False, 'from rest_framework import serializers\n'), ((4403, 4438), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (4436, 4438), False, 'from rest_framework import serializers\n'), ((4455, 4490), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (4488, 4490), False, 'from rest_framework import serializers\n'), ((10483, 10518), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (10516, 10518), False, 'from rest_framework import serializers\n'), ((10539, 10574), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (10572, 10574), False, 'from rest_framework import serializers\n'), ((10595, 10630), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (10628, 10630), False, 'from rest_framework import serializers\n'), ((10660, 10695), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (10693, 10695), False, 'from rest_framework import serializers\n'), ((11632, 11655), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (11653, 11655), False, 'from rest_framework import serializers\n'), ((11668, 11692), 'rest_framework.serializers.EmailField', 'serializers.EmailField', ([], {}), '()\n', (11690, 11692), False, 'from rest_framework import serializers\n'), ((11707, 11730), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (11728, 11730), False, 'from rest_framework import serializers\n'), ((1142, 1182), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Cette valeur est vide"""'], {}), "('Cette valeur est vide')\n", (1157, 1182), False, 'from django.core.exceptions import ValidationError\n'), ((1274, 1360), 'django.core.exceptions.ValidationError', 'ValidationError', (['f"""Cette valeur doit avoir moins de {self.max_length} caractères"""'], {}), "(\n f'Cette valeur doit avoir moins de {self.max_length} caractères')\n", (1289, 1360), False, 'from django.core.exceptions import ValidationError\n'), ((1574, 1616), 'dora.structures.models.Structure.objects.get', 'Structure.objects.get', ([], {'slug': 'structure_slug'}), '(slug=structure_slug)\n', (1595, 1616), False, 'from dora.structures.models import Structure, StructureMember\n'), ((1661, 1714), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""La structure ne peut pas être vide"""'], {}), "('La structure ne peut pas être vide')\n", (1676, 1714), False, 'from django.core.exceptions import ValidationError\n'), ((2914, 2937), 'dora.structures.models.Structure.objects.all', 'Structure.objects.all', ([], {}), '()\n', (2935, 2937), False, 'from dora.structures.models import Structure, StructureMember\n'), ((6344, 6369), 'django.core.files.storage.default_storage.url', 'default_storage.url', (['form'], {}), '(form)\n', (6363, 6369), False, 'from django.core.files.storage import default_storage\n'), ((8758, 8805), 'dora.structures.models.StructureMember.objects.filter', 'StructureMember.objects.filter', ([], {'user_id': 'user.id'}), '(user_id=user.id)\n', (8788, 8805), False, 'from dora.structures.models import Structure, StructureMember\n'), ((9004, 9117), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (["{'structure': 'Vous n’appartenez pas à cette structure'}", '"""not_member_of_struct"""'], {}), "({'structure':\n 'Vous n’appartenez pas à cette structure'}, 'not_member_of_struct')\n", (9031, 9117), False, 'from rest_framework import serializers\n'), ((10198, 10334), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['{field: "Ce choix n\'est pas disponible dans cette structure"}', '"""unallowed_custom_choices_bad_struc"""'], {}), '({field:\n "Ce choix n\'est pas disponible dans cette structure"},\n \'unallowed_custom_choices_bad_struc\')\n', (10225, 10334), False, 'from rest_framework import serializers\n')]
|
import versioneer
from setuptools import setup
with open('README.rst', 'r') as fh:
long_description = fh.read()
with open('requirements.txt') as fh:
requirements = fh.readlines()
setup(
name='docser',
packages=['docser'],
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='A simple server for hosting Sphinx documentation',
long_description=long_description,
long_description_content_type='text/x-rst',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/chrisbrake/docser',
keywords=['docser', 'Sphinx', 'documentation'],
classifiers=[
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
],
install_requires=requirements
)
|
[
"versioneer.get_version",
"versioneer.get_cmdclass"
] |
[((254, 278), 'versioneer.get_version', 'versioneer.get_version', ([], {}), '()\n', (276, 278), False, 'import versioneer\n'), ((293, 318), 'versioneer.get_cmdclass', 'versioneer.get_cmdclass', ([], {}), '()\n', (316, 318), False, 'import versioneer\n')]
|
import flask
import threading
import requests
import json
import sshclient
import deployer_helper
from flask import request
from pathlib import Path
def req_handler(app,port):
@app.route('/deployment/dodeploy', methods=['POST'])
def dodeploy():
try :
req = request.get_json()
print(req)
ip = req["serverip"]
sshport = req['sshPort']
machine_username = req['machineusername']
machine_password = req['password']
serviceid = req['serviceid']
username = req['username']
application_name = req['applicationname']
service_name = req['servicename']
if username != 'admin':
config_path = '/userservice/'+ username + '/' + application_name + '/config.json'
filename = deployer_helper.getFileName(config_path, service_name)
smres = deployer_helper.getSensorTopic(username,application_name,service_name,serviceid,config_path)
deployer_helper.notifyActionManager(username,application_name,service_name,serviceid,config_path,smres['sensor_host'])
sensortopic = smres['temporary_topic']
print("Returned Sensor topic by sensor manager is ",sensortopic)
deployer_helper.generateDokerFile(config_path, service_name, sensortopic, serviceid)
file_path = '/userservice/'+username + '/' + application_name + '/' + service_name + '/' + filename
else:
filename = service_name + '.py'
file_path = '/userservice/bootstrap/init/' + service_name +'/' + filename
sensortopic = "None"
print("file path : ",file_path)
containerid = sshclient.deployService(username, machine_username, machine_password,ip,port,serviceid,service_name,file_path, filename,sensortopic)
containerid = containerid[:-1]
URL = "http://localhost:8080/servicelcm/service/deploymentStatus"
req = {
'serviceId' : serviceid,
'username' : username,
'serviceName' : service_name,
'status' : 'success',
'ip' : ip,
'port' : 55555,
'containerId' : containerid,
'applicationName' : application_name
}
print(req)
requests.post(url = URL, json = req)
except Exception as error:
print("Error ",error)
URL = "http://localhost:8080/servicelcm/service/deploymentStatus"
req = {
'serviceId' : serviceid,
'username' : username,
'serviceName' : service_name,
'status' : 'success',
'ip' : ip,
'port' : 55555,
'containerId' : containerid,
'applicationName' : application_name
}
requests.post(url = URL, json = req)
res = {'status' : 'ok'}
return flask.jsonify(res)
app.run(host = '0.0.0.0',port = port)
def main():
app = flask.Flask('Deoployment Manger')
port = 8888 #deployer port
req_t = threading.Thread(target = req_handler, args = (app,port))
req_t.start()
req_t.join()
return
if __name__ == '__main__':
main()
|
[
"threading.Thread",
"deployer_helper.getFileName",
"flask.Flask",
"deployer_helper.getSensorTopic",
"sshclient.deployService",
"flask.jsonify",
"deployer_helper.notifyActionManager",
"requests.post",
"deployer_helper.generateDokerFile",
"flask.request.get_json"
] |
[((2567, 2600), 'flask.Flask', 'flask.Flask', (['"""Deoployment Manger"""'], {}), "('Deoployment Manger')\n", (2578, 2600), False, 'import flask\n'), ((2638, 2692), 'threading.Thread', 'threading.Thread', ([], {'target': 'req_handler', 'args': '(app, port)'}), '(target=req_handler, args=(app, port))\n', (2654, 2692), False, 'import threading\n'), ((2488, 2506), 'flask.jsonify', 'flask.jsonify', (['res'], {}), '(res)\n', (2501, 2506), False, 'import flask\n'), ((265, 283), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (281, 283), False, 'from flask import request\n'), ((1509, 1651), 'sshclient.deployService', 'sshclient.deployService', (['username', 'machine_username', 'machine_password', 'ip', 'port', 'serviceid', 'service_name', 'file_path', 'filename', 'sensortopic'], {}), '(username, machine_username, machine_password, ip,\n port, serviceid, service_name, file_path, filename, sensortopic)\n', (1532, 1651), False, 'import sshclient\n'), ((2003, 2035), 'requests.post', 'requests.post', ([], {'url': 'URL', 'json': 'req'}), '(url=URL, json=req)\n', (2016, 2035), False, 'import requests\n'), ((706, 760), 'deployer_helper.getFileName', 'deployer_helper.getFileName', (['config_path', 'service_name'], {}), '(config_path, service_name)\n', (733, 760), False, 'import deployer_helper\n'), ((776, 876), 'deployer_helper.getSensorTopic', 'deployer_helper.getSensorTopic', (['username', 'application_name', 'service_name', 'serviceid', 'config_path'], {}), '(username, application_name, service_name,\n serviceid, config_path)\n', (806, 876), False, 'import deployer_helper\n'), ((873, 1000), 'deployer_helper.notifyActionManager', 'deployer_helper.notifyActionManager', (['username', 'application_name', 'service_name', 'serviceid', 'config_path', "smres['sensor_host']"], {}), "(username, application_name,\n service_name, serviceid, config_path, smres['sensor_host'])\n", (908, 1000), False, 'import deployer_helper\n'), ((1111, 1199), 'deployer_helper.generateDokerFile', 'deployer_helper.generateDokerFile', (['config_path', 'service_name', 'sensortopic', 'serviceid'], {}), '(config_path, service_name, sensortopic,\n serviceid)\n', (1144, 1199), False, 'import deployer_helper\n'), ((2413, 2445), 'requests.post', 'requests.post', ([], {'url': 'URL', 'json': 'req'}), '(url=URL, json=req)\n', (2426, 2445), False, 'import requests\n')]
|
import binascii
import sys
from uuid import UUID
import msgpack
signed = 0x02
chained = 0x03
usage = " usage:\n" \
" python3 unpack.py [ <binary-file-name> | <UPP(hex)> | <UPP(base64)> ]"
if len(sys.argv) < 2:
print(usage)
sys.exit(1)
upp = b''
arg = sys.argv[1]
# try to get UPP from binary file
try:
with open(arg, "rb") as f:
upp = f.read()
except OSError:
pass
if not upp:
# try to parse argument as hex string representation of UPP
try:
upp = binascii.unhexlify(arg)
except binascii.Error:
pass
if not upp:
# try to parse argument as base64 string representation of UPP
try:
upp = binascii.a2b_base64(arg)
except Exception:
print("unable to parse UPP from argument: \"{}\"".format(arg))
print(usage)
sys.exit(1)
if not (upp[0] == 0x95 or upp[0] == 0x96):
print("invalid UPP")
print(usage)
sys.exit(1)
# unpack msgpack formatted UPP
if upp[1] >> 4 == 2: # version 2
unpacked = msgpack.unpackb(upp)
elif upp[1] >> 4 == 1: # version 1 (legacy)
unpacked = msgpack.unpackb(upp, raw=True)
else:
print("unsupported UPP version")
print(usage)
sys.exit(1)
print(" hex: {}".format(binascii.hexlify(upp).decode()))
print("base64: {}".format(binascii.b2a_base64(upp).decode()))
version = unpacked[0]
print("- Version: 0x{:02x}".format(version))
uuid = UUID(binascii.hexlify(unpacked[1]).decode())
print("- UUID: {}".format(str(uuid)))
if version & 0x0F == chained:
prev_sign = unpacked[2]
print("- prev.Sign.: {}".format(binascii.b2a_base64(prev_sign, newline=False).decode()))
print(" [hex]: {:s} ({:d} bytes)".format(binascii.hexlify(prev_sign).decode(), len(prev_sign)))
payload_type = unpacked[-3]
print("- Type: 0x{:02x}".format(payload_type))
payload = unpacked[-2]
if type(payload) is bytes:
print("- Payload: {:s}".format(binascii.b2a_base64(payload, newline=False).decode()))
print(" [hex]: {:s} ({:d} bytes)".format(binascii.hexlify(payload).decode(), len(payload)))
else:
print("- Payload: {:s}".format(repr(payload)))
signature = unpacked[-1]
print("- Signature: {:s}".format(binascii.b2a_base64(signature, newline=False).decode()))
print(" [hex]: {:s} ({:d} bytes)".format(binascii.hexlify(signature).decode(), len(signature)))
|
[
"binascii.hexlify",
"msgpack.unpackb",
"binascii.unhexlify",
"binascii.b2a_base64",
"binascii.a2b_base64",
"sys.exit"
] |
[((243, 254), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (251, 254), False, 'import sys\n'), ((919, 930), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (927, 930), False, 'import sys\n'), ((1012, 1032), 'msgpack.unpackb', 'msgpack.unpackb', (['upp'], {}), '(upp)\n', (1027, 1032), False, 'import msgpack\n'), ((503, 526), 'binascii.unhexlify', 'binascii.unhexlify', (['arg'], {}), '(arg)\n', (521, 526), False, 'import binascii\n'), ((670, 694), 'binascii.a2b_base64', 'binascii.a2b_base64', (['arg'], {}), '(arg)\n', (689, 694), False, 'import binascii\n'), ((1093, 1123), 'msgpack.unpackb', 'msgpack.unpackb', (['upp'], {'raw': '(True)'}), '(upp, raw=True)\n', (1108, 1123), False, 'import msgpack\n'), ((1188, 1199), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1196, 1199), False, 'import sys\n'), ((817, 828), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (825, 828), False, 'import sys\n'), ((1406, 1435), 'binascii.hexlify', 'binascii.hexlify', (['unpacked[1]'], {}), '(unpacked[1])\n', (1422, 1435), False, 'import binascii\n'), ((1227, 1248), 'binascii.hexlify', 'binascii.hexlify', (['upp'], {}), '(upp)\n', (1243, 1248), False, 'import binascii\n'), ((1286, 1310), 'binascii.b2a_base64', 'binascii.b2a_base64', (['upp'], {}), '(upp)\n', (1305, 1310), False, 'import binascii\n'), ((2196, 2241), 'binascii.b2a_base64', 'binascii.b2a_base64', (['signature'], {'newline': '(False)'}), '(signature, newline=False)\n', (2215, 2241), False, 'import binascii\n'), ((2300, 2327), 'binascii.hexlify', 'binascii.hexlify', (['signature'], {}), '(signature)\n', (2316, 2327), False, 'import binascii\n'), ((1585, 1630), 'binascii.b2a_base64', 'binascii.b2a_base64', (['prev_sign'], {'newline': '(False)'}), '(prev_sign, newline=False)\n', (1604, 1630), False, 'import binascii\n'), ((1693, 1720), 'binascii.hexlify', 'binascii.hexlify', (['prev_sign'], {}), '(prev_sign)\n', (1709, 1720), False, 'import binascii\n'), ((1919, 1962), 'binascii.b2a_base64', 'binascii.b2a_base64', (['payload'], {'newline': '(False)'}), '(payload, newline=False)\n', (1938, 1962), False, 'import binascii\n'), ((2025, 2050), 'binascii.hexlify', 'binascii.hexlify', (['payload'], {}), '(payload)\n', (2041, 2050), False, 'import binascii\n')]
|
import logging
from elram.config import load_config
from elram.repository.models import User, database, Event, Attendance, Account, Transaction
CONFIG = load_config()
logger = logging.getLogger('main')
def populate_db(data):
models_mapping = {
'users': User,
'accounts': Account,
}
for model_key, model_data in data.items():
model_class = models_mapping.get(model_key)
if model_class is None:
logger.error('No model class found', extra={'model_key': model_key})
continue
models = (model_class(**data) for data in model_data)
model_class.bulk_create(models)
logger.info(
'Records created',
extra={'model': model_class.__name__, 'records': len(model_data)},
)
def init_db(db_name, user, password, host, port):
database.init(database=db_name, user=user, password=password, host=host, port=port)
database.connect()
database.create_tables([User, Event, Attendance, Account, Transaction])
return database
|
[
"elram.repository.models.database.connect",
"elram.repository.models.database.init",
"elram.repository.models.database.create_tables",
"logging.getLogger",
"elram.config.load_config"
] |
[((155, 168), 'elram.config.load_config', 'load_config', ([], {}), '()\n', (166, 168), False, 'from elram.config import load_config\n'), ((178, 203), 'logging.getLogger', 'logging.getLogger', (['"""main"""'], {}), "('main')\n", (195, 203), False, 'import logging\n'), ((843, 930), 'elram.repository.models.database.init', 'database.init', ([], {'database': 'db_name', 'user': 'user', 'password': 'password', 'host': 'host', 'port': 'port'}), '(database=db_name, user=user, password=password, host=host,\n port=port)\n', (856, 930), False, 'from elram.repository.models import User, database, Event, Attendance, Account, Transaction\n'), ((931, 949), 'elram.repository.models.database.connect', 'database.connect', ([], {}), '()\n', (947, 949), False, 'from elram.repository.models import User, database, Event, Attendance, Account, Transaction\n'), ((954, 1025), 'elram.repository.models.database.create_tables', 'database.create_tables', (['[User, Event, Attendance, Account, Transaction]'], {}), '([User, Event, Attendance, Account, Transaction])\n', (976, 1025), False, 'from elram.repository.models import User, database, Event, Attendance, Account, Transaction\n')]
|
from openalpr import Alpr
import re
import os
class Plate:
def __init__(self):
self.alpr = Alpr("eu","/etc/openalpr/conf", "/usr/share/openalpr/runtime_data")
if not self.alpr.is_loaded():
print("Erro ao carregar o ALPR..")
sys.exit(1)
self.alpr.set_top_n(10)
self.alpr.set_default_region("")
def plate_ocr(self, placa):
results = self.alpr.recognize_file(placa)
i = 0
plate = ""
for plate in results['results']:
i += 1
for candidate in plate['candidates']:
if candidate ['matches_template']:
prefix = "*"
teste = candidate['plate']
x = re.search('^[A-Z]{3}[0-9]{1}[A-Z]{1}[0-9]{2}', teste)
if (x):
plate = candidate['plate']
#return plate
break
self.alpr.unload()
if(plate != ""):
print(plate)
return plate
#placa = Plate()
#placa01 = placa.plate_ocr('/home/pi/Pictures/Mercosul/img01.jpeg')
#print(placa01)
|
[
"re.search",
"openalpr.Alpr"
] |
[((112, 180), 'openalpr.Alpr', 'Alpr', (['"""eu"""', '"""/etc/openalpr/conf"""', '"""/usr/share/openalpr/runtime_data"""'], {}), "('eu', '/etc/openalpr/conf', '/usr/share/openalpr/runtime_data')\n", (116, 180), False, 'from openalpr import Alpr\n'), ((736, 789), 're.search', 're.search', (['"""^[A-Z]{3}[0-9]{1}[A-Z]{1}[0-9]{2}"""', 'teste'], {}), "('^[A-Z]{3}[0-9]{1}[A-Z]{1}[0-9]{2}', teste)\n", (745, 789), False, 'import re\n')]
|
# This is where the models go!
from django.db import models
from django.urls import reverse
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
# Using the user: user = models.ForeignKey(settings.AUTH_USER_MODEL)
CurrentUser = None
CurrentTeam = None
class Team(models.Model):
users = models.ManyToManyField(settings.AUTH_USER_MODEL,
blank=True,
through='django_teams.TeamStatus',
related_name='team_member')
name = models.CharField(max_length=255)
private = models.BooleanField(default=False)
description = models.TextField(null=True, blank=True)
def get_absolute_url(self):
return reverse('team-detail', kwargs={'pk': self.pk})
def __str__(self):
return self.name
def add_user(self, user, team_role=1):
TeamStatus(user=user, team=self, role=team_role).save()
def approve_user(self, user):
ts = TeamStatus.objects.get(user=user, team=self)
if ts.role == 1:
ts.role = 10
ts.save()
def approved_objects(self):
return Ownership.objects.select_related('team').filter(team=self, approved=True)
@staticmethod
def get_current_team():
if CurrentTeam is not None:
return CurrentTeam
return None
class TeamStatus(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
team = models.ForeignKey('django_teams.Team', on_delete=models.DO_NOTHING)
comment = models.CharField(max_length=255, default='', null=True, blank=True)
TEAM_ROLES = (
(1, 'Requesting Access'),
(10, 'Team Member'),
(20, 'Team Leader'),
)
role = models.IntegerField(choices=TEAM_ROLES)
def approve(self):
self.role = 10
self.save()
def __str__(self):
return "%s requesting to join %s" % (self.user.__unicode__(), self.team.__unicode__())
class Ownership(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
approved = models.BooleanField(default=False)
team = models.ForeignKey('django_teams.Team', on_delete=models.DO_NOTHING)
@staticmethod
def check_permission(item):
content_type = ContentType.objects.get_for_model(item)
res = Ownership.objects.filter(team=Team.get_current_team(), content_type=content_type, object_id=item.id)
return len(res) > 0
@staticmethod
def grant_ownership(team, item):
content_type = ContentType.objects.get_for_model(item)
res = Ownership.objects.get_or_create(team=team, content_type=content_type, object_id=item.id)
if res[1]:
res[0].save()
|
[
"django.db.models.TextField",
"django.db.models.ManyToManyField",
"django.contrib.contenttypes.fields.GenericForeignKey",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"django.db.models.BooleanField",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"django.urls.reverse",
"django.db.models.IntegerField"
] |
[((397, 525), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['settings.AUTH_USER_MODEL'], {'blank': '(True)', 'through': '"""django_teams.TeamStatus"""', 'related_name': '"""team_member"""'}), "(settings.AUTH_USER_MODEL, blank=True, through=\n 'django_teams.TeamStatus', related_name='team_member')\n", (419, 525), False, 'from django.db import models\n'), ((637, 669), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (653, 669), False, 'from django.db import models\n'), ((684, 718), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (703, 718), False, 'from django.db import models\n'), ((737, 776), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (753, 776), False, 'from django.db import models\n'), ((1495, 1564), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (1512, 1564), False, 'from django.db import models\n'), ((1576, 1643), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""django_teams.Team"""'], {'on_delete': 'models.DO_NOTHING'}), "('django_teams.Team', on_delete=models.DO_NOTHING)\n", (1593, 1643), False, 'from django.db import models\n'), ((1658, 1725), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'default': '""""""', 'null': '(True)', 'blank': '(True)'}), "(max_length=255, default='', null=True, blank=True)\n", (1674, 1725), False, 'from django.db import models\n'), ((1856, 1895), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'TEAM_ROLES'}), '(choices=TEAM_ROLES)\n', (1875, 1895), False, 'from django.db import models\n'), ((2134, 2190), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ContentType'], {'on_delete': 'models.CASCADE'}), '(ContentType, on_delete=models.CASCADE)\n', (2151, 2190), False, 'from django.db import models\n'), ((2207, 2236), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (2234, 2236), False, 'from django.db import models\n'), ((2258, 2304), 'django.contrib.contenttypes.fields.GenericForeignKey', 'GenericForeignKey', (['"""content_type"""', '"""object_id"""'], {}), "('content_type', 'object_id')\n", (2275, 2304), False, 'from django.contrib.contenttypes.fields import GenericForeignKey\n'), ((2320, 2354), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2339, 2354), False, 'from django.db import models\n'), ((2366, 2433), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""django_teams.Team"""'], {'on_delete': 'models.DO_NOTHING'}), "('django_teams.Team', on_delete=models.DO_NOTHING)\n", (2383, 2433), False, 'from django.db import models\n'), ((825, 871), 'django.urls.reverse', 'reverse', (['"""team-detail"""'], {'kwargs': "{'pk': self.pk}"}), "('team-detail', kwargs={'pk': self.pk})\n", (832, 871), False, 'from django.urls import reverse\n'), ((2508, 2547), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['item'], {}), '(item)\n', (2541, 2547), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((2771, 2810), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['item'], {}), '(item)\n', (2804, 2810), False, 'from django.contrib.contenttypes.models import ContentType\n')]
|
from openpyxl.styles import colors, Font
from examples.c09_0_font_styles.openpyxl import index
from openpyxl import Workbook
from base_test_cases import ExcelTest
class TestOpenPyXLFontStyles(ExcelTest):
def test_font_color(self):
wb = Workbook()
ws = wb.active
a1 = ws['A1']
a1_font = a1.font
default_color = colors.Color(
indexed=None, type='theme', rgb=None, tint=0.0, theme=1, auto=None)
self.assertEqual(a1_font.color, default_color)
a1.font = Font(color="FF000000")
self.assertEqual(a1.font.color.rgb, "FF000000")
a1 = index.set_font_color_red(wb)
self.assertEqual(a1.font.color.rgb, colors.RED)
def test_font_size(self):
wb = Workbook()
ws = wb.active
a1 = ws['A1']
a1_font = a1.font
default_size = 11
self.assertEqual(a1_font.size, default_size)
new_size = 20
a1 = index.set_font_size(wb, new_size)
self.assertEqual(a1.font.size, new_size)
def test_font_style(self):
wb = Workbook()
ws = wb.active
a1 = ws['A1']
a1_font = a1.font
default_bold = False
default_italic = False
default_underline = None
default_style = 'Calibri'
self.assertEqual(a1_font.bold, default_bold)
self.assertEqual(a1_font.italic, default_italic)
self.assertEqual(a1_font.underline, default_underline)
self.assertEqual(a1_font.name, default_style)
new_style = "Helvetica"
a1 = index.set_font_style(wb, new_style)
self.assertEqual(a1.font.name, new_style)
self.assertTrue(a1.font.bold)
self.assertTrue(a1.font.italic)
four_kinds_of_underlines = [
'single', 'singleAccounting',
'double', 'doubleAccounting']
self.assertEqual(a1.font.underline, four_kinds_of_underlines[0])
|
[
"examples.c09_0_font_styles.openpyxl.index.set_font_size",
"openpyxl.Workbook",
"openpyxl.styles.Font",
"examples.c09_0_font_styles.openpyxl.index.set_font_color_red",
"openpyxl.styles.colors.Color",
"examples.c09_0_font_styles.openpyxl.index.set_font_style"
] |
[((251, 261), 'openpyxl.Workbook', 'Workbook', ([], {}), '()\n', (259, 261), False, 'from openpyxl import Workbook\n'), ((357, 442), 'openpyxl.styles.colors.Color', 'colors.Color', ([], {'indexed': 'None', 'type': '"""theme"""', 'rgb': 'None', 'tint': '(0.0)', 'theme': '(1)', 'auto': 'None'}), "(indexed=None, type='theme', rgb=None, tint=0.0, theme=1, auto=None\n )\n", (369, 442), False, 'from openpyxl.styles import colors, Font\n'), ((524, 546), 'openpyxl.styles.Font', 'Font', ([], {'color': '"""FF000000"""'}), "(color='FF000000')\n", (528, 546), False, 'from openpyxl.styles import colors, Font\n'), ((616, 644), 'examples.c09_0_font_styles.openpyxl.index.set_font_color_red', 'index.set_font_color_red', (['wb'], {}), '(wb)\n', (640, 644), False, 'from examples.c09_0_font_styles.openpyxl import index\n'), ((745, 755), 'openpyxl.Workbook', 'Workbook', ([], {}), '()\n', (753, 755), False, 'from openpyxl import Workbook\n'), ((941, 974), 'examples.c09_0_font_styles.openpyxl.index.set_font_size', 'index.set_font_size', (['wb', 'new_size'], {}), '(wb, new_size)\n', (960, 974), False, 'from examples.c09_0_font_styles.openpyxl import index\n'), ((1069, 1079), 'openpyxl.Workbook', 'Workbook', ([], {}), '()\n', (1077, 1079), False, 'from openpyxl import Workbook\n'), ((1550, 1585), 'examples.c09_0_font_styles.openpyxl.index.set_font_style', 'index.set_font_style', (['wb', 'new_style'], {}), '(wb, new_style)\n', (1570, 1585), False, 'from examples.c09_0_font_styles.openpyxl import index\n')]
|
"""
Script that trains Tensorflow singletask models on QM7 dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import deepchem as dc
import numpy as np
from qm7_datasets import load_qm7b_from_mat
np.random.seed(123)
qm7_tasks, datasets, transformers = load_qm7b_from_mat(split='stratified')
train_dataset, valid_dataset, test_dataset = datasets
fit_transformers = [dc.trans.CoulombFitTransformer(train_dataset)]
regression_metric = [dc.metrics.Metric(dc.metrics.mean_absolute_error, mode="regression"),
dc.metrics.Metric(dc.metrics.pearson_r2_score, mode="regression")]
model = dc.models.TensorflowMultiTaskFitTransformRegressor(
n_tasks=len(qm7_tasks), n_features=[23, 23], learning_rate=0.001 , momentum=.8, batch_size=25,
weight_init_stddevs=[1/np.sqrt(400),1/np.sqrt(100),1/np.sqrt(100)],
bias_init_consts=[0.,0.,0.], layer_sizes=[400,100,100],
dropouts=[0.01,0.01,0.01], fit_transformers=fit_transformers, n_evals=10, seed=123)
# Fit trained model
model.fit(train_dataset, nb_epoch=50)
model.save()
train_scores = model.evaluate(train_dataset, regression_metric, transformers)
print("Train scores [kcal/mol]")
print(train_scores)
valid_scores = model.evaluate(valid_dataset, regression_metric, transformers)
print("Valid scores [kcal/mol]")
print(valid_scores)
test_scores = model.evaluate(test_dataset, regression_metric, transformers)
print("Test scores [kcal/mol]")
print(test_scores)
|
[
"numpy.random.seed",
"deepchem.trans.CoulombFitTransformer",
"qm7_datasets.load_qm7b_from_mat",
"deepchem.metrics.Metric",
"numpy.sqrt"
] |
[((279, 298), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (293, 298), True, 'import numpy as np\n'), ((335, 373), 'qm7_datasets.load_qm7b_from_mat', 'load_qm7b_from_mat', ([], {'split': '"""stratified"""'}), "(split='stratified')\n", (353, 373), False, 'from qm7_datasets import load_qm7b_from_mat\n'), ((448, 493), 'deepchem.trans.CoulombFitTransformer', 'dc.trans.CoulombFitTransformer', (['train_dataset'], {}), '(train_dataset)\n', (478, 493), True, 'import deepchem as dc\n'), ((516, 584), 'deepchem.metrics.Metric', 'dc.metrics.Metric', (['dc.metrics.mean_absolute_error'], {'mode': '"""regression"""'}), "(dc.metrics.mean_absolute_error, mode='regression')\n", (533, 584), True, 'import deepchem as dc\n'), ((601, 666), 'deepchem.metrics.Metric', 'dc.metrics.Metric', (['dc.metrics.pearson_r2_score'], {'mode': '"""regression"""'}), "(dc.metrics.pearson_r2_score, mode='regression')\n", (618, 666), True, 'import deepchem as dc\n'), ((854, 866), 'numpy.sqrt', 'np.sqrt', (['(400)'], {}), '(400)\n', (861, 866), True, 'import numpy as np\n'), ((869, 881), 'numpy.sqrt', 'np.sqrt', (['(100)'], {}), '(100)\n', (876, 881), True, 'import numpy as np\n'), ((884, 896), 'numpy.sqrt', 'np.sqrt', (['(100)'], {}), '(100)\n', (891, 896), True, 'import numpy as np\n')]
|
# coding: utf-8
import unittest
from tapioca_asana import Asana
class TestTapiocaAsana(unittest.TestCase):
def setUp(self):
self.wrapper = Asana()
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"tapioca_asana.Asana"
] |
[((197, 212), 'unittest.main', 'unittest.main', ([], {}), '()\n', (210, 212), False, 'import unittest\n'), ((156, 163), 'tapioca_asana.Asana', 'Asana', ([], {}), '()\n', (161, 163), False, 'from tapioca_asana import Asana\n')]
|
#!/usr/bin/env python3
import sys
from PyQt5.QtWidgets import QVBoxLayout,QWidget
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
import matplotlib.pyplot as plt
import random
import numpy as np
class HistogramWidget(QWidget):
def __init__(self, statNames=None, histograms=None, binWidth=None):
super().__init__()
self._statNames = statNames
self._histograms = histograms
self._binWidth = binWidth
self.figure = plt.figure()
self.canvas = FigureCanvas(self.figure)
self.toolbar = NavigationToolbar(self.canvas, self)
layout = QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
self.setLayout(layout)
self.plot()
def plot(self, statNames=None, histograms=None, binWidth=None):
self._statNames = statNames
self._histograms = histograms
self._binWidth = binWidth
self.figure.clear()
if self._statNames is None or self._histograms is None or len(self._histograms) == 0:
return
ax = self.figure.add_subplot(111)
ax.set_title("Histograms")
legend = []
for i in range(len(self._statNames)):
label = self._statNames[i]
values = self._histograms[i]
first_nonzero_index = next((i for i, x in enumerate(values) if x!=0), None)
last_nonzero_index = next((len(values) - idx for idx, item in enumerate(reversed(values), 1) if item), None)
bins = [j for j in range((first_nonzero_index-10000), last_nonzero_index-10000)]
values = values[first_nonzero_index:last_nonzero_index]
# this is for merging bins according to bin width
mergedValues = [sum(values[i:i + self._binWidth]) for i in range(0, len(values), self._binWidth)]
mergedBins = [bins[i]/10 for i in range(0, len(bins), self._binWidth)]
left, right = mergedBins[:-1], mergedBins[1:]
X = np.array([left, right]).T.flatten()
X = np.append(X, mergedBins[len(mergedBins)-1])
Y = np.array([mergedValues, mergedValues]).T.flatten()[:-1]
legend.append(label)
ax.plot(X, Y)
ax.legend(legend)
self.canvas.draw()
|
[
"matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg",
"PyQt5.QtWidgets.QVBoxLayout",
"matplotlib.pyplot.figure",
"numpy.array",
"matplotlib.backends.backend_qt5agg.NavigationToolbar2QT"
] |
[((584, 596), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (594, 596), True, 'import matplotlib.pyplot as plt\n'), ((619, 644), 'matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg', 'FigureCanvas', (['self.figure'], {}), '(self.figure)\n', (631, 644), True, 'from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas\n'), ((668, 704), 'matplotlib.backends.backend_qt5agg.NavigationToolbar2QT', 'NavigationToolbar', (['self.canvas', 'self'], {}), '(self.canvas, self)\n', (685, 704), True, 'from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar\n'), ((722, 735), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (733, 735), False, 'from PyQt5.QtWidgets import QVBoxLayout, QWidget\n'), ((2109, 2132), 'numpy.array', 'np.array', (['[left, right]'], {}), '([left, right])\n', (2117, 2132), True, 'import numpy as np\n'), ((2221, 2259), 'numpy.array', 'np.array', (['[mergedValues, mergedValues]'], {}), '([mergedValues, mergedValues])\n', (2229, 2259), True, 'import numpy as np\n')]
|
#!/usr/bin/python
import spacy
import json
import numpy as np
import rospy
from std_msgs.msg import String
from spacy.matcher import PhraseMatcher
from spacy.matcher import Matcher
from spacy.tokens import Span
from spacy.lang.en import English
class Greet_Visitors:
def __init__(self):
rospy.init_node('Greet_Visitors')
self.tts_pub = rospy.Publisher('/hri/tts_input', String, queue_size=1,latch=True)
def subscribe_greet(self):
#print("subscriber is called")
greet_subscriber = rospy.Subscriber('/hri/greet_input', String,self.greet_callback)
location_subscriber = rospy.Subscriber('/hri/location_input', String,self.loaction_callback)
def greet_callback(self, msg):
#parse text and execute main code
self.text = msg.data
dictMsg={}
dictMsg["person"]=self.recognised_visitor()
if(self.recognised_visitor() == "plumber"):
dictMsg["room"]=self.ask_plumber()
dictWrapper=dictMsg
jsonStr = json.dumps(dictWrapper)
print(jsonStr)
output_pub = rospy.Publisher('/hri/greet_output', String, queue_size=1,latch=True)
output_pub.publish(jsonStr) #Publish what the component sees for debugging (as there is a delay due to system performance)
def location_callback(self, msg):
#parse text and execute main code
self.text = msg.data
dictMsg={}
dictMsg["person"]=self.recognised_room()
dictWrapper=dictMsg
jsonStr = json.dumps(dictWrapper)
print(jsonStr)
output_pub = rospy.Publisher('/hri/location_output', String, queue_size=1,latch=True)
output_pub.publish(jsonStr) #Publish what the component sees for debugging (as there is a delay due to system performance)
def string_to_tts(self, string):
self.tts_pub.publish(string) #Publish what the component sees for debugging (as there is a delay due to system performance)
def string_to_obj(self, string):
return json.dumps(string)
def obj_to_string(self, obj):
return json.loads(obj)
def recognised_visitor(self):
if(self.return_people() is not None):
return self.return_people().text
else:
return "unrecognised"
def recognised_room(self):
if(self.return_rooms() is not None):
return self.return_rooms().text
else:
return "unrecognised"
def ask_plumber(self):
if(self.return_rooms() is not None):
return self.return_rooms().text
def return_rooms(self):
nlp = spacy.load("en_core_web_sm")
doc = nlp(self.text)
rooms = ["kitchen", "bedroom", "bathroom", "hallway", "living room"]
room_patterns = list(nlp.pipe(rooms))
roomMatcher = PhraseMatcher(nlp.vocab)
roomMatcher.add("ROOM", [*room_patterns])
for match_id, start, end in roomMatcher(doc):
# Create a Span with the label for "GPE"
roomSpan = Span(doc, start, end, label="ROOM")
return roomSpan
def return_people(self):
nlp = spacy.load("en_core_web_sm")
doc = nlp(self.text)
people = ["doctor", "<NAME>", "postman", "<NAME>", "plumber"]
people_patterns = list(nlp.pipe(people))
peopleMatcher = PhraseMatcher(nlp.vocab)
peopleMatcher.add("PEOPLE", [*people_patterns])
for match_id, start, end in peopleMatcher(doc):
peopleSpan = Span(doc, start, end, label="PEOPLE")
return peopleSpan
greet_visitors = Greet_Visitors()
greet_visitors.subscribe_greet()
rospy.spin()
|
[
"rospy.Subscriber",
"json.loads",
"rospy.Publisher",
"json.dumps",
"spacy.tokens.Span",
"spacy.load",
"rospy.init_node",
"rospy.spin",
"spacy.matcher.PhraseMatcher"
] |
[((3704, 3716), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3714, 3716), False, 'import rospy\n'), ((301, 334), 'rospy.init_node', 'rospy.init_node', (['"""Greet_Visitors"""'], {}), "('Greet_Visitors')\n", (316, 334), False, 'import rospy\n'), ((358, 425), 'rospy.Publisher', 'rospy.Publisher', (['"""/hri/tts_input"""', 'String'], {'queue_size': '(1)', 'latch': '(True)'}), "('/hri/tts_input', String, queue_size=1, latch=True)\n", (373, 425), False, 'import rospy\n'), ((524, 589), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/hri/greet_input"""', 'String', 'self.greet_callback'], {}), "('/hri/greet_input', String, self.greet_callback)\n", (540, 589), False, 'import rospy\n'), ((619, 690), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/hri/location_input"""', 'String', 'self.loaction_callback'], {}), "('/hri/location_input', String, self.loaction_callback)\n", (635, 690), False, 'import rospy\n'), ((1040, 1063), 'json.dumps', 'json.dumps', (['dictWrapper'], {}), '(dictWrapper)\n', (1050, 1063), False, 'import json\n'), ((1108, 1178), 'rospy.Publisher', 'rospy.Publisher', (['"""/hri/greet_output"""', 'String'], {'queue_size': '(1)', 'latch': '(True)'}), "('/hri/greet_output', String, queue_size=1, latch=True)\n", (1123, 1178), False, 'import rospy\n'), ((1556, 1579), 'json.dumps', 'json.dumps', (['dictWrapper'], {}), '(dictWrapper)\n', (1566, 1579), False, 'import json\n'), ((1624, 1697), 'rospy.Publisher', 'rospy.Publisher', (['"""/hri/location_output"""', 'String'], {'queue_size': '(1)', 'latch': '(True)'}), "('/hri/location_output', String, queue_size=1, latch=True)\n", (1639, 1697), False, 'import rospy\n'), ((2051, 2069), 'json.dumps', 'json.dumps', (['string'], {}), '(string)\n', (2061, 2069), False, 'import json\n'), ((2124, 2139), 'json.loads', 'json.loads', (['obj'], {}), '(obj)\n', (2134, 2139), False, 'import json\n'), ((2660, 2688), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (2670, 2688), False, 'import spacy\n'), ((2866, 2890), 'spacy.matcher.PhraseMatcher', 'PhraseMatcher', (['nlp.vocab'], {}), '(nlp.vocab)\n', (2879, 2890), False, 'from spacy.matcher import PhraseMatcher\n'), ((3198, 3226), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (3208, 3226), False, 'import spacy\n'), ((3402, 3426), 'spacy.matcher.PhraseMatcher', 'PhraseMatcher', (['nlp.vocab'], {}), '(nlp.vocab)\n', (3415, 3426), False, 'from spacy.matcher import PhraseMatcher\n'), ((3072, 3107), 'spacy.tokens.Span', 'Span', (['doc', 'start', 'end'], {'label': '"""ROOM"""'}), "(doc, start, end, label='ROOM')\n", (3076, 3107), False, 'from spacy.tokens import Span\n'), ((3566, 3603), 'spacy.tokens.Span', 'Span', (['doc', 'start', 'end'], {'label': '"""PEOPLE"""'}), "(doc, start, end, label='PEOPLE')\n", (3570, 3603), False, 'from spacy.tokens import Span\n')]
|
# This script is to extract any files inside of a .unitypackage file.
# Please make sure you only use this on .unitypackage files you own.
# This will create a folder with the exact same name as the input file.
# Have fun!
# Used for creating the temp folder name.
from hashlib import md5
# Uncompressing .unityasset files.
import tarfile
# Gnarly file handling stuff.
from pathlib import Path
from shutil import copy2, rmtree
# Getting input from the user.
import argparse
from sys import exit
parser = argparse.ArgumentParser()
parser.add_argument('file', action='store', help='Path to the .unityasset file')
# parser.add_argument('-o', '--override', metavar='folder', action='store', default=None, help='Override default export location')
args = parser.parse_args()
unity_file = Path(args.file)
# There is probably a better way to just get the name of a file, but I'm lazy and want this to work.
unity_file_name = args.file.split('.')[:-1]
unity_file_name = '.'.join(unity_file_name)
# Does file exist?
if not unity_file.exists():
print(f'File "{unity_file}" does not exist!')
exit(1)
# Is file Valid? (Doesn't actually check magic nor the contents of the tar archive. This is just a dumb check)
if not unity_file.name.split('.')[-1] == 'unitypackage':
print(f'File "{unity_file.name}" is not valid.')
exit(1)
# Make target directory.
# Check if the target directory exists.
new_directory = Path(unity_file_name)
if new_directory.exists():
# We don't want to overwrite a previous export.
print('The target directory exists... Could it be possible that you\'ve already exported this asset file?')
exit(1)
new_directory.mkdir(0o755, parents=True, exist_ok=False)
print('Initialized working environment.') # We just created the folders and checked some stuff, it's not like anything fancy really happened.
# Generate the temp directory name and pathlib object. This is an MD5 hash of the input filename.
tmp_dir_name = md5(unity_file_name.encode()).hexdigest()
tmp_dir = Path(tmp_dir_name)
# Extract to the temp directory.
unity_tar_file = tarfile.open(unity_file, mode='r:gz')
unity_tar_file.extractall(tmp_dir)
print(f'Read and extracted "{unity_file_name}"')
index=0
print('Processing extracted files...')
# Iterate through the directories.
for asset_directory in tmp_dir.iterdir():
index=index+1
# Generate links to asset elements known to be present.
asset_file = Path(asset_directory, 'asset')
# Check if it exists.
if not asset_file.exists():
# No need to make a fuss if it doesn't. Just continue.
continue
# Get original filename and directory structure.
asset_path_name = Path(asset_directory, 'pathname')
with open(asset_path_name, 'r') as f:
pathname = f.read().split('/')
new_asset_name = pathname[-1]
new_dir_name = '/'.join(pathname[:-1])
new_dir = Path(new_directory, new_dir_name)
new_dir.mkdir(mode=0o755, parents=True, exist_ok=True)
print(f'Found and copied {index} files...', end='\r')
copy2(asset_file, Path(new_dir, new_asset_name))
print('\nDone processing the extracted files.')
rmtree(tmp_dir)
print('Cleaned up environment, enjoy!')
|
[
"argparse.ArgumentParser",
"pathlib.Path",
"tarfile.open",
"shutil.rmtree",
"sys.exit"
] |
[((504, 529), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (527, 529), False, 'import argparse\n'), ((783, 798), 'pathlib.Path', 'Path', (['args.file'], {}), '(args.file)\n', (787, 798), False, 'from pathlib import Path\n'), ((1401, 1422), 'pathlib.Path', 'Path', (['unity_file_name'], {}), '(unity_file_name)\n', (1405, 1422), False, 'from pathlib import Path\n'), ((1983, 2001), 'pathlib.Path', 'Path', (['tmp_dir_name'], {}), '(tmp_dir_name)\n', (1987, 2001), False, 'from pathlib import Path\n'), ((2052, 2089), 'tarfile.open', 'tarfile.open', (['unity_file'], {'mode': '"""r:gz"""'}), "(unity_file, mode='r:gz')\n", (2064, 2089), False, 'import tarfile\n'), ((3040, 3055), 'shutil.rmtree', 'rmtree', (['tmp_dir'], {}), '(tmp_dir)\n', (3046, 3055), False, 'from shutil import copy2, rmtree\n'), ((1083, 1090), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (1087, 1090), False, 'from sys import exit\n'), ((1311, 1318), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (1315, 1318), False, 'from sys import exit\n'), ((1609, 1616), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (1613, 1616), False, 'from sys import exit\n'), ((2385, 2415), 'pathlib.Path', 'Path', (['asset_directory', '"""asset"""'], {}), "(asset_directory, 'asset')\n", (2389, 2415), False, 'from pathlib import Path\n'), ((2606, 2639), 'pathlib.Path', 'Path', (['asset_directory', '"""pathname"""'], {}), "(asset_directory, 'pathname')\n", (2610, 2639), False, 'from pathlib import Path\n'), ((2795, 2828), 'pathlib.Path', 'Path', (['new_directory', 'new_dir_name'], {}), '(new_directory, new_dir_name)\n', (2799, 2828), False, 'from pathlib import Path\n'), ((2959, 2988), 'pathlib.Path', 'Path', (['new_dir', 'new_asset_name'], {}), '(new_dir, new_asset_name)\n', (2963, 2988), False, 'from pathlib import Path\n')]
|
#!/usr/bin/env python3
"""
Cross validator
Usage:
crossvalidator.py (--model=<model>) [--tub=<tub1,tub2,..tubn>] [--type=(linear|categorical)] [--output=<csv-filename>]
Options:
-h --help Show this screen.
--tub TUBPATHS List of paths to tubs. Comma separated. Use quotes to use wildcards. ie "~/tubs/*"
--type TYPE Either categorical or linear [default: 'linear']
--output CSVFILE Csv filename
"""
import os
from docopt import docopt
import math
import donkeycar as dk
import statistics
import numpy as np
import csv
from donkeycar.parts.keras import KerasCategorical, KerasLinear
from donkeycar.parts.datastore import TubGroup
def print_mean(l, message):
mean = statistics.mean(l)
stdev = statistics.stdev(l)
print(message, " mean: ", mean, " and standard dev: ", stdev)
def print_lse(correct, estimates, message):
print(message, " lse: ", lse(correct, estimates))
def lse(correct, estimates):
sum = 0.0
for index in range(len(correct)):
c = correct[index]
e = estimates[index]
sum += math.pow(c - e, 2)
return sum / len(correct)
def validate(model_path=None, tub_names=None, model_type='linear', output=None):
print("Using a model of type: ", model_type)
if model_type == "categorical":
kl = KerasCategorical()
elif model_type == "linear":
kl = KerasLinear()
if model_path:
kl.load(model_path)
print('tub_names', tub_names)
tubgroup = TubGroup(tub_names)
# See Also: ShowPredictionPlots
if not output:
output = model_path + ".validator.csv"
print('saving to output file: ', output)
with open(output, 'w') as csvfile:
w = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
w.writerow(['Angle', 'Angle estimate', 'Angle error', 'Throttle', 'Throttle estimate', 'Throttle error'])
for tub in tubgroup.tubs:
num_records = tub.get_num_records()
print('cross validation set size: %d' % num_records)
correct_angles = []
correct_throttles = []
estimate_angles = []
estimate_throttles = []
error_angles = []
error_throttles = []
for iRec in tub.get_index(shuffled=False):
record = tub.get_record(iRec)
img = record["cam/image_array"]
user_angle = float(record["user/angle"])
user_throttle = float(record["user/throttle"])
pilot_angle, pilot_throttle = kl.run(img)
correct_angles.append(user_angle)
correct_throttles.append(user_throttle)
estimate_angles.append(pilot_angle.item())
estimate_throttles.append(pilot_throttle.item())
error_angle = user_angle - pilot_angle.item()
error_angles.append(error_angle)
error_throttle = user_throttle - pilot_throttle.item()
error_throttles.append(error_throttle)
w.writerow([user_angle, pilot_angle, error_angle, user_throttle, pilot_throttle, error_throttle])
print_mean(correct_angles, "Correct angle")
print_mean(estimate_angles, "Estimate angle")
print_mean(correct_throttles, "Correct throttle")
print_mean(estimate_throttles, "Estimate throttle")
print_mean(error_angles, "Error angle")
print_mean(error_throttles, "Error throttle")
print_lse(correct_angles, estimate_angles, "Angle LSE")
print_lse(correct_throttles, estimate_throttles, "Throttle LSE")
if __name__ == '__main__':
args = docopt(__doc__)
validate(model_path=args['--model'], tub_names=args['--tub'], model_type=args['--type'], output=args['--output'])
|
[
"csv.writer",
"math.pow",
"docopt.docopt",
"statistics.stdev",
"donkeycar.parts.keras.KerasLinear",
"statistics.mean",
"donkeycar.parts.datastore.TubGroup",
"donkeycar.parts.keras.KerasCategorical"
] |
[((718, 736), 'statistics.mean', 'statistics.mean', (['l'], {}), '(l)\n', (733, 736), False, 'import statistics\n'), ((749, 768), 'statistics.stdev', 'statistics.stdev', (['l'], {}), '(l)\n', (765, 768), False, 'import statistics\n'), ((1494, 1513), 'donkeycar.parts.datastore.TubGroup', 'TubGroup', (['tub_names'], {}), '(tub_names)\n', (1502, 1513), False, 'from donkeycar.parts.datastore import TubGroup\n'), ((3737, 3752), 'docopt.docopt', 'docopt', (['__doc__'], {}), '(__doc__)\n', (3743, 3752), False, 'from docopt import docopt\n'), ((1087, 1105), 'math.pow', 'math.pow', (['(c - e)', '(2)'], {}), '(c - e, 2)\n', (1095, 1105), False, 'import math\n'), ((1317, 1335), 'donkeycar.parts.keras.KerasCategorical', 'KerasCategorical', ([], {}), '()\n', (1333, 1335), False, 'from donkeycar.parts.keras import KerasCategorical, KerasLinear\n'), ((1715, 1791), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quotechar': '"""|"""', 'quoting': 'csv.QUOTE_MINIMAL'}), "(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)\n", (1725, 1791), False, 'import csv\n'), ((1382, 1395), 'donkeycar.parts.keras.KerasLinear', 'KerasLinear', ([], {}), '()\n', (1393, 1395), False, 'from donkeycar.parts.keras import KerasCategorical, KerasLinear\n')]
|
###################################################
# Copyright (c) 2019 #
# Authors: @iArunava <<EMAIL>> #
# @AvivSham <<EMAIL>> #
# #
# License: BSD License 3.0 #
# #
# The Code in this file is distributed for free #
# usage and modification with proper linkage back #
# to this repository. #
###################################################
import torch
import torch.nn as nn
class InitialBlock(nn.Module):
def __init__ (self,in_channels = 3,out_channels = 13):
super().__init__()
self.maxpool = nn.MaxPool2d(kernel_size=2,
stride = 2,
padding = 0)
self.conv = nn.Conv2d(in_channels,
out_channels,
kernel_size = 3,
stride = 2,
padding = 1)
self.prelu = nn.PReLU(16)
self.batchnorm = nn.BatchNorm2d(out_channels)
def forward(self, x):
main = self.conv(x)
main = self.batchnorm(main)
side = self.maxpool(x)
x = torch.cat((main, side), dim=1)
x = self.prelu(x)
return x
|
[
"torch.nn.PReLU",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.BatchNorm2d",
"torch.nn.MaxPool2d"
] |
[((720, 768), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)', 'padding': '(0)'}), '(kernel_size=2, stride=2, padding=0)\n', (732, 768), True, 'import torch.nn as nn\n'), ((872, 944), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels'], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)'}), '(in_channels, out_channels, kernel_size=3, stride=2, padding=1)\n', (881, 944), True, 'import torch.nn as nn\n'), ((1103, 1115), 'torch.nn.PReLU', 'nn.PReLU', (['(16)'], {}), '(16)\n', (1111, 1115), True, 'import torch.nn as nn\n'), ((1142, 1170), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (1156, 1170), True, 'import torch.nn as nn\n'), ((1334, 1364), 'torch.cat', 'torch.cat', (['(main, side)'], {'dim': '(1)'}), '((main, side), dim=1)\n', (1343, 1364), False, 'import torch\n')]
|
# Generated by Django 3.1.4 on 2021-01-05 20:37
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('seller', '0003_auto_20210105_2035'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField(blank=True)),
('price', models.DecimalField(decimal_places=2, max_digits=50)),
('title', models.CharField(max_length=255)),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('seller', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='seller.seller')),
],
),
]
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.DecimalField",
"django.db.models.UUIDField"
] |
[((395, 488), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (411, 488), False, 'from django.db import migrations, models\n'), ((519, 547), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (535, 547), False, 'from django.db import migrations, models\n'), ((576, 628), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(50)'}), '(decimal_places=2, max_digits=50)\n', (595, 628), False, 'from django.db import migrations, models\n'), ((657, 689), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (673, 689), False, 'from django.db import migrations, models\n'), ((717, 769), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)'}), '(default=uuid.uuid4, editable=False)\n', (733, 769), False, 'from django.db import migrations, models\n'), ((803, 842), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (823, 842), False, 'from django.db import migrations, models\n'), ((876, 911), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (896, 911), False, 'from django.db import migrations, models\n'), ((941, 1028), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""seller.seller"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'seller.seller')\n", (958, 1028), False, 'from django.db import migrations, models\n')]
|
import os
import click
ROOT_DIRECTORY = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir)
)
PLUGIN_DIRECTORY = os.path.join(ROOT_DIRECTORY, 'detectem/plugins')
PLUGIN_DIRECTORIES = [
d for d in os.listdir(PLUGIN_DIRECTORY)
if os.path.isdir(os.path.join(PLUGIN_DIRECTORY, d)) and d != '__pycache__'
]
@click.command()
@click.option(
'--matcher',
type=click.Choice(['url', 'body', 'header', 'xpath']),
required=True,
help='Set the matcher type.',
)
@click.option(
'--category',
type=click.Choice(PLUGIN_DIRECTORIES),
required=True,
help='Set plugin category.',
)
@click.argument('name')
def main(name, category, matcher):
create_plugin_file(name, category, matcher)
create_test_file(name, matcher)
def create_plugin_file(name, category, matcher):
plugin_template = '''
from detectem.plugin import Plugin
class {title}Plugin(Plugin):
name = '{name}'
homepage = ''
matchers = [
{{'{matcher}': 'Plugin signature v(?P<version>[0-9\.]+)'}},
]
"""
js_matchers = [
{{'check': '', 'version': ''}},
]
"""
'''.format(name=name, title=name.title(), matcher=matcher).lstrip()
plugin_filename = name + '.py'
plugin_filepath = os.path.join(
PLUGIN_DIRECTORY, category, plugin_filename
)
if os.path.exists(plugin_filepath):
raise FileExistsError('Plugin file already exists.')
with open(plugin_filepath, mode='w') as f:
f.write(plugin_template)
print('Created plugin file at {}'.format(plugin_filepath))
def create_test_file(name, matcher):
test_template = '''
- plugin: {name}
matches:
- {matcher}:
version:
'''.format(name=name, matcher=matcher).lstrip()
test_filename = name + '.yml'
test_filepath = os.path.join(
ROOT_DIRECTORY, 'tests', 'plugins', 'fixtures', test_filename
)
if os.path.exists(test_filepath):
raise FileExistsError('Test file already exists.')
with open(test_filepath, mode='w') as f:
f.write(test_template)
print('Created test file at {}'.format(test_filepath))
if __name__ == "__main__":
main()
|
[
"click.argument",
"os.path.dirname",
"os.path.exists",
"click.command",
"click.Choice",
"os.path.join",
"os.listdir"
] |
[((135, 183), 'os.path.join', 'os.path.join', (['ROOT_DIRECTORY', '"""detectem/plugins"""'], {}), "(ROOT_DIRECTORY, 'detectem/plugins')\n", (147, 183), False, 'import os\n'), ((335, 350), 'click.command', 'click.command', ([], {}), '()\n', (348, 350), False, 'import click\n'), ((628, 650), 'click.argument', 'click.argument', (['"""name"""'], {}), "('name')\n", (642, 650), False, 'import click\n'), ((1250, 1307), 'os.path.join', 'os.path.join', (['PLUGIN_DIRECTORY', 'category', 'plugin_filename'], {}), '(PLUGIN_DIRECTORY, category, plugin_filename)\n', (1262, 1307), False, 'import os\n'), ((1330, 1361), 'os.path.exists', 'os.path.exists', (['plugin_filepath'], {}), '(plugin_filepath)\n', (1344, 1361), False, 'import os\n'), ((1798, 1873), 'os.path.join', 'os.path.join', (['ROOT_DIRECTORY', '"""tests"""', '"""plugins"""', '"""fixtures"""', 'test_filename'], {}), "(ROOT_DIRECTORY, 'tests', 'plugins', 'fixtures', test_filename)\n", (1810, 1873), False, 'import os\n'), ((1896, 1925), 'os.path.exists', 'os.path.exists', (['test_filepath'], {}), '(test_filepath)\n', (1910, 1925), False, 'import os\n'), ((76, 101), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (91, 101), False, 'import os\n'), ((222, 250), 'os.listdir', 'os.listdir', (['PLUGIN_DIRECTORY'], {}), '(PLUGIN_DIRECTORY)\n', (232, 250), False, 'import os\n'), ((392, 440), 'click.Choice', 'click.Choice', (["['url', 'body', 'header', 'xpath']"], {}), "(['url', 'body', 'header', 'xpath'])\n", (404, 440), False, 'import click\n'), ((539, 571), 'click.Choice', 'click.Choice', (['PLUGIN_DIRECTORIES'], {}), '(PLUGIN_DIRECTORIES)\n', (551, 571), False, 'import click\n'), ((272, 305), 'os.path.join', 'os.path.join', (['PLUGIN_DIRECTORY', 'd'], {}), '(PLUGIN_DIRECTORY, d)\n', (284, 305), False, 'import os\n')]
|
import torch
import torch.nn as nn
from utils.tools import transpose1323
from my_packages.DepthProjection.models.HG_model import HGModel
class DepthProjectionModule(nn.Module):
def __init__(self):
super(DepthProjectionModule, self).__init__()
self.model = HGModel("my_packages/DepthProjection/pretrained/best_generalization_net_G.pth")
def forward(self, input):
input = transpose1323(input)
data1 = self.model(input[0:1])
data2 = self.model(input[1:2])
p = torch.mean(torch.stack([data1, data2]), dim=0)
p = torch.squeeze(p[0])
return p
|
[
"my_packages.DepthProjection.models.HG_model.HGModel",
"utils.tools.transpose1323",
"torch.stack",
"torch.squeeze"
] |
[((278, 357), 'my_packages.DepthProjection.models.HG_model.HGModel', 'HGModel', (['"""my_packages/DepthProjection/pretrained/best_generalization_net_G.pth"""'], {}), "('my_packages/DepthProjection/pretrained/best_generalization_net_G.pth')\n", (285, 357), False, 'from my_packages.DepthProjection.models.HG_model import HGModel\n'), ((405, 425), 'utils.tools.transpose1323', 'transpose1323', (['input'], {}), '(input)\n', (418, 425), False, 'from utils.tools import transpose1323\n'), ((575, 594), 'torch.squeeze', 'torch.squeeze', (['p[0]'], {}), '(p[0])\n', (588, 594), False, 'import torch\n'), ((527, 554), 'torch.stack', 'torch.stack', (['[data1, data2]'], {}), '([data1, data2])\n', (538, 554), False, 'import torch\n')]
|
import urllib.parse as urlparse
from urllib.parse import parse_qs
import utils.helpers as hp
import pandas as pd
import model.queries as qrs
import view.templates as tmpl
import numpy as np
import plotly.graph_objects as go
import plotly as py
import plotly.express as px
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
import dash_table
import dash
import dash_core_components as dcc
import dash_bootstrap_components as dbc
import dash_html_components as html
from view.problematic_pairs import ProblematicPairsPage
from model.DataLoader import GeneralDataLoader
class PairPlotsPage():
indx_dict = {'ps_packetloss': 'Packet loss', 'ps_owd': 'One-way delay',
'ps_retransmits': 'Retransmits', 'ps_throughput': 'Throughput'}
def __init__(self):
self.parent = ProblematicPairsPage()
self.root_parent = GeneralDataLoader()
def getData(self, src, dest):
time_list = hp.GetTimeRanges(self.root_parent.dateFrom, self.root_parent.dateTo)
df = pd.DataFrame(qrs.queryAllValues(self._idx, src, dest, time_list))
df.rename(columns={hp.getValueField(self._idx): 'value'}, inplace=True)
if len(df) > 0:
df['log_value'] = np.log(df['value'].replace(0, np.nan))
df['sqrt'] = df['value']**(1/2)
return df
def buildGraph(self, df, host_src, host_dest):
fig = go.Figure()
title = f'{self.indx_dict[self._idx]}: {host_src} ⇒ {host_dest}'
title = title if len(title)<80 else "<br>".join([f'{self.indx_dict[self._idx]}: ', f'{host_src} ⇒ {host_dest}'])
if len(df) > 0:
df = df.sort_values('timestamp', ascending=False)
df['dt'] = pd.to_datetime(df['timestamp'], unit='ms')
fig.add_trace(go.Scatter(x=df['dt'], y=df['value'],
mode='markers',
marker=dict(
color='navy'),
name='measures',
yaxis="y1"),
)
fig.add_trace(go.Scatter(x=df['dt'], y=df['sqrt'],
mode='markers',
marker=dict(
color='#F03A47'),
name='sqrt',
yaxis="y2",
visible='legendonly'),
)
fig.add_trace(go.Scatter(x=df['dt'], y=df['log_value'],
mode='markers',
marker=dict(
color='#00BCD4'),
name='log',
yaxis="y3",
visible='legendonly'),
)
fig.update_layout(
xaxis=dict(
domain=[0.05, 0.9]
),
yaxis1=dict(
title="measures",
anchor="free",
side="left",
position=0.05,
titlefont=dict(
color="navy"
),
tickfont=dict(
color="navy"
)
),
yaxis2=dict(
title="sqrt",
anchor="x",
overlaying="y",
side="right",
titlefont=dict(
color="#F03A47"
),
tickfont=dict(
color="#F03A47"
),
),
yaxis3=dict(
title="log",
anchor="free",
overlaying="y",
side="right",
position=0.98,
titlefont=dict(
color="#00BCD4"
),
tickfont=dict(
color="#00BCD4"
),
)
)
fig.update_layout(title=title,
template = 'plotly_white')
else:
fig.update_layout(title=title,
template = 'plotly_white',
annotations = [
{
"text": "No data found",
"xref": "paper",
"yref": "paper",
"showarrow": False,
"font": {
"size": 18
}
}
])
return fig
def defaultLayout(self):
return html.Div([
dbc.Row([
dbc.Col([
dbc.Row([
dbc.Col(html.Label('Index:'), width=3, className='dd-fields'),
dbc.Col(html.Label('Source:'), width=3, className='dd-fields'),
dbc.Col(html.Label('Destination:'), width=3, className='dd-fields'),
dbc.Col(width=1),
], no_gutters=True, justify="center", className='dd-field-names'),
dbc.Row([
dbc.Col(dcc.Dropdown(
id='idx-dropdown',
options=[{'label':idx, 'value':idx} for idx in hp.INDECES],
), width=3, className='dd-fields'),
dbc.Col(dcc.Dropdown(
id='src-dropdown',
placeholder="First select an index",
), width=3, className='dd-fields'),
dbc.Col(dcc.Dropdown(
id='dest-dropdown',
placeholder="First select an index",
), width=3, className='dd-fields'),
dbc.Col(dbc.Button('Plot'.upper(),
id={
'type': 'plot',
'index': 'default'
}, className='plot-input-button', n_clicks=0), width=1),
], align="center", no_gutters=True, justify="center"),
dbc.Row([
dbc.Col(html.P(id='total-pairs', className='dd-count'), width=3, className='dd-fields'),
dbc.Col(html.P(id='total-srcs', className='dd-count'), width=3, className='dd-fields'),
dbc.Col(html.P(id='total-dests', className='dd-count'), width=4, className='dd-fields'),
], no_gutters=True, justify="center"),
], width=12, className='fields-wrapper'),
], justify="center", className='dd-container boxwithshadow'),
])
def phraseProblem(self, ptype, idx):
if ptype == 'high_sigma' or ptype == 'all_packets_lost':
phrase = 'overall'
elif ptype == 'has_bursts':
phrase = 'periods of'
if idx == 'ps_throughput':
return (f'The pair shows {phrase} low throughout')
if idx == 'ps_retransmits':
return (f'The pair shows {phrase} high number of retransmitted packages')
if idx == 'ps_owd':
return (f'The pair shows {phrase} high latency')
if idx == 'ps_packetloss':
return (f'The pair shows {phrase} high packet loss')
def createCards(self):
other_issues_div = html.Div('None', className="card-text")
if self.parent.problems[['src', 'dest']].isin({'src': [self._src], 'dest': [self._dest]}).any().all():
data = self.parent.problems[(self.parent.problems['src']==self._src) &
(self.parent.problems['dest']==self._dest)].set_index('idx').to_dict('index')
watch4 = ['high_sigma', 'all_packets_lost', 'has_bursts']
'''Store the sentences in a dictionary'''
ddict = {}
no_issues = []
for idx in data:
for k, v in data[idx].items():
if k in watch4 and v == 1:
ddict[idx] = {'text':self.phraseProblem(k, idx), 'avg':data[idx]['value']}
if idx not in ddict:
ddict[idx] = {'text':f'{self.parent.indx_dict[idx]}: None found', 'avg':data[idx]['value']}
'''Search for other problems for the same pair and show them. Otherwise return None'''
other_indeces = [item for item in ddict.keys() if item != self._idx]
if len(other_indeces) > 0:
other_issues_div = html.Div([
html.Div([
html.Div(ddict[item]['text'], className="card-text"),
html.H3(f"{int(round(ddict[item]['avg'], 0))} {hp.getValueUnit(item)}", className="card-text")
]) for item in other_indeces
])
itext = html.Div(ddict[self._idx]['text'], className="card-text")
ival = html.H2(f"{int(round(ddict[self._idx]['avg'], 0))} {hp.getValueUnit(self._idx)}", className="card-text")
else:
itext = html.Div('None found', className="card-text")
ival = html.Div(className="card-text")
# TOFIX: the case when dest -> src exists in problems is not covered
src = self.root_parent.all_df[(self.root_parent.all_df['ip']==self._src)]
dest = self.root_parent.all_df[(self.root_parent.all_df['ip']==self._dest)]
return dbc.Col(
html.Div([
html.H2('Issue for this type of measure', className="card-title"),
itext,
ival
], className='issue ppage-header'), width=3), dbc.Col(
dbc.Row([
dbc.Col([
html.Div([
html.H2('SOURCE', className="card-title"),
html.Div(src['host'].values, className="card-text"),
html.Div(src['ip'].values, className="card-text"),
html.Div(src['site'].values, className="card-text")
], className='src-issue ppage-header'),
], width=6, className='issue-wrapper src'),dbc.Col(
html.Div([
html.H2('DESTINATION', className="card-title"),
html.Div(dest['host'].values, className="card-text"),
html.Div(dest['ip'].values, className="card-text"),
html.Div(dest['site'].values, className="card-text")
], className='dest-issue ppage-header'), width=6, className='issue-wrapper dest')
], justify="center", align="center", className='issue-wrapper')
, width=6), dbc.Col(
html.Div([
html.H2('Other issues for the same pair', className="card-title"),
other_issues_div
], className='other-issue ppage-header')
, width=3)
def specificPairLayout(self, url):
data = parse_qs(urlparse.urlparse(url).query)
# self._src and self._dest are the orginal values
self._src = data['src'][0]
self._dest = data['dest'][0]
self._idx = data['idx'][0]
host_src = data['src_host'][0]
host_dest = data['dest_host'][0]
pair = self.getData(self._src, self._dest)
reversed_pair = self.getData(self._dest, self._src)
return html.Div([
dbc.Row(
self.createCards(), className='issue-header boxwithshadow', no_gutters=True, justify='center'
),
dbc.Row([
dbc.Col(
html.Div([
dcc.Graph(figure=self.buildGraph(pair, host_src, host_dest))
], className='pair-plot boxwithshadow')
),
dbc.Col(
html.Div([
dcc.Graph(figure=self.buildGraph(reversed_pair, host_dest, host_src))
], className='pair-plot boxwithshadow')
)
], className='page-cont')
])
|
[
"utils.helpers.getValueField",
"utils.helpers.GetTimeRanges",
"dash_html_components.H2",
"dash_html_components.Div",
"plotly.graph_objects.Figure",
"dash_html_components.Label",
"view.problematic_pairs.ProblematicPairsPage",
"dash_bootstrap_components.Col",
"model.DataLoader.GeneralDataLoader",
"pandas.to_datetime",
"utils.helpers.getValueUnit",
"dash_core_components.Dropdown",
"dash_html_components.P",
"urllib.parse.urlparse",
"model.queries.queryAllValues"
] |
[((818, 840), 'view.problematic_pairs.ProblematicPairsPage', 'ProblematicPairsPage', ([], {}), '()\n', (838, 840), False, 'from view.problematic_pairs import ProblematicPairsPage\n'), ((868, 887), 'model.DataLoader.GeneralDataLoader', 'GeneralDataLoader', ([], {}), '()\n', (885, 887), False, 'from model.DataLoader import GeneralDataLoader\n'), ((944, 1012), 'utils.helpers.GetTimeRanges', 'hp.GetTimeRanges', (['self.root_parent.dateFrom', 'self.root_parent.dateTo'], {}), '(self.root_parent.dateFrom, self.root_parent.dateTo)\n', (960, 1012), True, 'import utils.helpers as hp\n'), ((1395, 1406), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (1404, 1406), True, 'import plotly.graph_objects as go\n'), ((8067, 8106), 'dash_html_components.Div', 'html.Div', (['"""None"""'], {'className': '"""card-text"""'}), "('None', className='card-text')\n", (8075, 8106), True, 'import dash_html_components as html\n'), ((1040, 1091), 'model.queries.queryAllValues', 'qrs.queryAllValues', (['self._idx', 'src', 'dest', 'time_list'], {}), '(self._idx, src, dest, time_list)\n', (1058, 1091), True, 'import model.queries as qrs\n'), ((1712, 1754), 'pandas.to_datetime', 'pd.to_datetime', (["df['timestamp']"], {'unit': '"""ms"""'}), "(df['timestamp'], unit='ms')\n", (1726, 1754), True, 'import pandas as pd\n'), ((9602, 9659), 'dash_html_components.Div', 'html.Div', (["ddict[self._idx]['text']"], {'className': '"""card-text"""'}), "(ddict[self._idx]['text'], className='card-text')\n", (9610, 9659), True, 'import dash_html_components as html\n'), ((9818, 9863), 'dash_html_components.Div', 'html.Div', (['"""None found"""'], {'className': '"""card-text"""'}), "('None found', className='card-text')\n", (9826, 9863), True, 'import dash_html_components as html\n'), ((9883, 9914), 'dash_html_components.Div', 'html.Div', ([], {'className': '"""card-text"""'}), "(className='card-text')\n", (9891, 9914), True, 'import dash_html_components as html\n'), ((11965, 11987), 'urllib.parse.urlparse', 'urlparse.urlparse', (['url'], {}), '(url)\n', (11982, 11987), True, 'import urllib.parse as urlparse\n'), ((1120, 1147), 'utils.helpers.getValueField', 'hp.getValueField', (['self._idx'], {}), '(self._idx)\n', (1136, 1147), True, 'import utils.helpers as hp\n'), ((9731, 9757), 'utils.helpers.getValueUnit', 'hp.getValueUnit', (['self._idx'], {}), '(self._idx)\n', (9746, 9757), True, 'import utils.helpers as hp\n'), ((10244, 10309), 'dash_html_components.H2', 'html.H2', (['"""Issue for this type of measure"""'], {'className': '"""card-title"""'}), "('Issue for this type of measure', className='card-title')\n", (10251, 10309), True, 'import dash_html_components as html\n'), ((11691, 11756), 'dash_html_components.H2', 'html.H2', (['"""Other issues for the same pair"""'], {'className': '"""card-title"""'}), "('Other issues for the same pair', className='card-title')\n", (11698, 11756), True, 'import dash_html_components as html\n'), ((9304, 9356), 'dash_html_components.Div', 'html.Div', (["ddict[item]['text']"], {'className': '"""card-text"""'}), "(ddict[item]['text'], className='card-text')\n", (9312, 9356), True, 'import dash_html_components as html\n'), ((11093, 11139), 'dash_html_components.H2', 'html.H2', (['"""DESTINATION"""'], {'className': '"""card-title"""'}), "('DESTINATION', className='card-title')\n", (11100, 11139), True, 'import dash_html_components as html\n'), ((11173, 11225), 'dash_html_components.Div', 'html.Div', (["dest['host'].values"], {'className': '"""card-text"""'}), "(dest['host'].values, className='card-text')\n", (11181, 11225), True, 'import dash_html_components as html\n'), ((11259, 11309), 'dash_html_components.Div', 'html.Div', (["dest['ip'].values"], {'className': '"""card-text"""'}), "(dest['ip'].values, className='card-text')\n", (11267, 11309), True, 'import dash_html_components as html\n'), ((11343, 11395), 'dash_html_components.Div', 'html.Div', (["dest['site'].values"], {'className': '"""card-text"""'}), "(dest['site'].values, className='card-text')\n", (11351, 11395), True, 'import dash_html_components as html\n'), ((5328, 5344), 'dash_bootstrap_components.Col', 'dbc.Col', ([], {'width': '(1)'}), '(width=1)\n', (5335, 5344), True, 'import dash_bootstrap_components as dbc\n'), ((10577, 10618), 'dash_html_components.H2', 'html.H2', (['"""SOURCE"""'], {'className': '"""card-title"""'}), "('SOURCE', className='card-title')\n", (10584, 10618), True, 'import dash_html_components as html\n'), ((10652, 10703), 'dash_html_components.Div', 'html.Div', (["src['host'].values"], {'className': '"""card-text"""'}), "(src['host'].values, className='card-text')\n", (10660, 10703), True, 'import dash_html_components as html\n'), ((10737, 10786), 'dash_html_components.Div', 'html.Div', (["src['ip'].values"], {'className': '"""card-text"""'}), "(src['ip'].values, className='card-text')\n", (10745, 10786), True, 'import dash_html_components as html\n'), ((10820, 10871), 'dash_html_components.Div', 'html.Div', (["src['site'].values"], {'className': '"""card-text"""'}), "(src['site'].values, className='card-text')\n", (10828, 10871), True, 'import dash_html_components as html\n'), ((5044, 5064), 'dash_html_components.Label', 'html.Label', (['"""Index:"""'], {}), "('Index:')\n", (5054, 5064), True, 'import dash_html_components as html\n'), ((5139, 5160), 'dash_html_components.Label', 'html.Label', (['"""Source:"""'], {}), "('Source:')\n", (5149, 5160), True, 'import dash_html_components as html\n'), ((5235, 5261), 'dash_html_components.Label', 'html.Label', (['"""Destination:"""'], {}), "('Destination:')\n", (5245, 5261), True, 'import dash_html_components as html\n'), ((5523, 5620), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""idx-dropdown"""', 'options': "[{'label': idx, 'value': idx} for idx in hp.INDECES]"}), "(id='idx-dropdown', options=[{'label': idx, 'value': idx} for\n idx in hp.INDECES])\n", (5535, 5620), True, 'import dash_core_components as dcc\n'), ((5796, 5864), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""src-dropdown"""', 'placeholder': '"""First select an index"""'}), "(id='src-dropdown', placeholder='First select an index')\n", (5808, 5864), True, 'import dash_core_components as dcc\n'), ((6046, 6115), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""dest-dropdown"""', 'placeholder': '"""First select an index"""'}), "(id='dest-dropdown', placeholder='First select an index')\n", (6058, 6115), True, 'import dash_core_components as dcc\n'), ((6782, 6828), 'dash_html_components.P', 'html.P', ([], {'id': '"""total-pairs"""', 'className': '"""dd-count"""'}), "(id='total-pairs', className='dd-count')\n", (6788, 6828), True, 'import dash_html_components as html\n'), ((6903, 6948), 'dash_html_components.P', 'html.P', ([], {'id': '"""total-srcs"""', 'className': '"""dd-count"""'}), "(id='total-srcs', className='dd-count')\n", (6909, 6948), True, 'import dash_html_components as html\n'), ((7023, 7069), 'dash_html_components.P', 'html.P', ([], {'id': '"""total-dests"""', 'className': '"""dd-count"""'}), "(id='total-dests', className='dd-count')\n", (7029, 7069), True, 'import dash_html_components as html\n'), ((9441, 9462), 'utils.helpers.getValueUnit', 'hp.getValueUnit', (['item'], {}), '(item)\n', (9456, 9462), True, 'import utils.helpers as hp\n')]
|
from tkinter import *
from chatbot import Botler
BG_COLOR = "#272727"
TEXT_COLOR = "#FAFAFA"
FONT = "Helvetica 14"
FONT_BOLD = "Helvetica 13 bold"
class ChatApplication:
"""Runs application """
def __init__(self):
"""Generates window GUI an initializes Chatbot"""
self._init_window()
self.chat = Botler()
def _init_window(self):
"""Initializes window with corresponding settings"""
self.window = Tk()
# Add title to window
self.window.title("Botler the Butler")
self.window.resizable(width=False, height=False)
self.window.configure(width=420, height=720, bg=BG_COLOR)
# text widget
self.text_widget = Text(self.window, width=20, height=2, bg=BG_COLOR, fg=TEXT_COLOR, font=FONT, padx=5, pady=5)
self.text_widget.place(relheight=0.9, relwidth=1)
self.text_widget.configure(cursor="arrow", state=DISABLED)
# scroll bar
scrollbar = Scrollbar(self.text_widget)
scrollbar.place(relheight=1, relx=0.974)
scrollbar.configure(command=self.text_widget.yview)
# bottom label
label = Label(self.window, bg=BG_COLOR, height=80)
label.place(relwidth=1, rely=0.9)
# message entry box
self.msg_entry = Entry(label, bg=BG_COLOR, fg=TEXT_COLOR, font=FONT)
self.msg_entry.place(relwidth=0.74, relheight=0.04, rely=0.008, relx=0.011)
self.msg_entry.focus()
self.msg_entry.bind("<Return>", self._on_enter_pressed)
# send button
send_button = Button(label, text="Send", font=FONT_BOLD, width=20, bg=BG_COLOR, command=lambda: self._on_enter_pressed(None))
send_button.place(relx=0.77, rely=0.008, relheight=0.04, relwidth=0.22)
def run(self):
"""Runs main loop for window"""
self.window.mainloop()
def _on_enter_pressed(self, event):
"""If user submits its message, it processes the input to generate a response"""
msg = self.msg_entry.get()
if not msg:
return
self._insert_message(msg, "You")
response = self.chat.generate_response(msg.lower())
self._insert_message(response, self.chat.name)
def _insert_message(self, msg, sender):
"""Inserts a message to the screen"""
if not msg:
return
self.msg_entry.delete(0, END)
msg1 = f"{sender}: {msg}\n\n"
self.text_widget.configure(state=NORMAL)
self.text_widget.insert(END, msg1)
self.text_widget.configure(state=DISABLED)
self.text_widget.see(END)
|
[
"chatbot.Botler"
] |
[((332, 340), 'chatbot.Botler', 'Botler', ([], {}), '()\n', (338, 340), False, 'from chatbot import Botler\n')]
|
import logging
from pylons import request, response, session, tmpl_context as c
from pylons.controllers.util import abort
# added for auth
from authkit.authorize.pylons_adaptors import authorize
from authkit.permissions import RemoteUser, ValidAuthKitUser, UserIn
from pypesvds.lib.base import BaseController, render
log = logging.getLogger(__name__)
class IndexController(BaseController):
@authorize(ValidAuthKitUser())
def index(self):
# Return a rendered template
#return render('/index.mako')
# or, return a response
return render('/pypesvds.mako')
def signout(self):
return render('/signin.html')
|
[
"pypesvds.lib.base.render",
"authkit.permissions.ValidAuthKitUser",
"logging.getLogger"
] |
[((327, 354), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (344, 354), False, 'import logging\n'), ((573, 597), 'pypesvds.lib.base.render', 'render', (['"""/pypesvds.mako"""'], {}), "('/pypesvds.mako')\n", (579, 597), False, 'from pypesvds.lib.base import BaseController, render\n'), ((410, 428), 'authkit.permissions.ValidAuthKitUser', 'ValidAuthKitUser', ([], {}), '()\n', (426, 428), False, 'from authkit.permissions import RemoteUser, ValidAuthKitUser, UserIn\n'), ((637, 659), 'pypesvds.lib.base.render', 'render', (['"""/signin.html"""'], {}), "('/signin.html')\n", (643, 659), False, 'from pypesvds.lib.base import BaseController, render\n')]
|
import json
import pytest
from toucan_connectors.google_analytics.google_analytics_connector import (
GoogleAnalyticsConnector, GoogleAnalyticsDataSource)
def test_google_analytics(mocker):
gac = GoogleAnalyticsConnector(
type="GoogleAnalytics",
name="Test",
credentials={
"type": "test",
"project_id": "test",
"private_key_id": "test",
"private_key": "test",
"client_email": "test",
"client_id": "test",
"auth_uri": "test",
"token_uri": "test",
"auth_provider_x509_cert_url": "test",
"client_x509_cert_url": "test"
}
)
gads = GoogleAnalyticsDataSource(
name="Test", domain="test",
report_request={
"viewId": "0123456789",
"dateRanges": [
{"startDate": "2018-06-01", "endDate": "2018-07-01"}
]
})
fixture = json.load(open('tests/google_analytics/fixtures/reports.json'))
module = 'toucan_connectors.google_analytics.google_analytics_connector'
mocker.patch(f'{module}.ServiceAccountCredentials.from_json_keyfile_dict')
mocker.patch(f'{module}.build')
mocker.patch(f'{module}.get_query_results').return_value = fixture['reports'][0]
df = gac.get_df(gads)
assert df.shape == (3, 11)
@pytest.mark.skip(reason="This uses a live instance")
def test_live_instance():
gac = GoogleAnalyticsConnector(
type="GoogleAnalytics",
name="Test",
credentials={
"type": "",
"project_id": "",
"private_key_id": "",
"private_key": "",
"client_email": "",
"client_id": "",
"auth_uri": "",
"token_uri": "",
"auth_provider_x509_cert_url": "",
"client_x509_cert_url": ""
}
)
gads = GoogleAnalyticsDataSource(
name="Test", domain="test",
report_request={
"viewId": "119151898",
"pageSize": 100,
"orderBys": [
{
"fieldName": "ga:date",
"orderType": "VALUE",
"sortOrder": "%(sortOrder)s"
}
],
"dimensions": [
{"name": "ga:hostname"},
{"name": "ga:date"},
{"name": "ga:dimension1"},
{"name": "ga:deviceCategory"},
{"name": "ga:eventLabel"}
],
"dateRanges": [
{"startDate": "2018-06-01", "endDate": "2018-07-01"}
],
"metrics": [
{"expression": "ga:sessions"},
{"expression": "ga:sessionDuration"}
]
},
parameters={'sortOrder': 'DESCENDING'}
)
df = gac.get_df(gads)
assert df.shape == (230, 11)
|
[
"toucan_connectors.google_analytics.google_analytics_connector.GoogleAnalyticsDataSource",
"pytest.mark.skip",
"toucan_connectors.google_analytics.google_analytics_connector.GoogleAnalyticsConnector"
] |
[((1363, 1415), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""This uses a live instance"""'}), "(reason='This uses a live instance')\n", (1379, 1415), False, 'import pytest\n'), ((208, 544), 'toucan_connectors.google_analytics.google_analytics_connector.GoogleAnalyticsConnector', 'GoogleAnalyticsConnector', ([], {'type': '"""GoogleAnalytics"""', 'name': '"""Test"""', 'credentials': "{'type': 'test', 'project_id': 'test', 'private_key_id': 'test',\n 'private_key': 'test', 'client_email': 'test', 'client_id': 'test',\n 'auth_uri': 'test', 'token_uri': 'test', 'auth_provider_x509_cert_url':\n 'test', 'client_x509_cert_url': 'test'}"}), "(type='GoogleAnalytics', name='Test', credentials={\n 'type': 'test', 'project_id': 'test', 'private_key_id': 'test',\n 'private_key': 'test', 'client_email': 'test', 'client_id': 'test',\n 'auth_uri': 'test', 'token_uri': 'test', 'auth_provider_x509_cert_url':\n 'test', 'client_x509_cert_url': 'test'})\n", (232, 544), False, 'from toucan_connectors.google_analytics.google_analytics_connector import GoogleAnalyticsConnector, GoogleAnalyticsDataSource\n'), ((700, 873), 'toucan_connectors.google_analytics.google_analytics_connector.GoogleAnalyticsDataSource', 'GoogleAnalyticsDataSource', ([], {'name': '"""Test"""', 'domain': '"""test"""', 'report_request': "{'viewId': '0123456789', 'dateRanges': [{'startDate': '2018-06-01',\n 'endDate': '2018-07-01'}]}"}), "(name='Test', domain='test', report_request={\n 'viewId': '0123456789', 'dateRanges': [{'startDate': '2018-06-01',\n 'endDate': '2018-07-01'}]})\n", (725, 873), False, 'from toucan_connectors.google_analytics.google_analytics_connector import GoogleAnalyticsConnector, GoogleAnalyticsDataSource\n'), ((1452, 1744), 'toucan_connectors.google_analytics.google_analytics_connector.GoogleAnalyticsConnector', 'GoogleAnalyticsConnector', ([], {'type': '"""GoogleAnalytics"""', 'name': '"""Test"""', 'credentials': "{'type': '', 'project_id': '', 'private_key_id': '', 'private_key': '',\n 'client_email': '', 'client_id': '', 'auth_uri': '', 'token_uri': '',\n 'auth_provider_x509_cert_url': '', 'client_x509_cert_url': ''}"}), "(type='GoogleAnalytics', name='Test', credentials={\n 'type': '', 'project_id': '', 'private_key_id': '', 'private_key': '',\n 'client_email': '', 'client_id': '', 'auth_uri': '', 'token_uri': '',\n 'auth_provider_x509_cert_url': '', 'client_x509_cert_url': ''})\n", (1476, 1744), False, 'from toucan_connectors.google_analytics.google_analytics_connector import GoogleAnalyticsConnector, GoogleAnalyticsDataSource\n'), ((1904, 2479), 'toucan_connectors.google_analytics.google_analytics_connector.GoogleAnalyticsDataSource', 'GoogleAnalyticsDataSource', ([], {'name': '"""Test"""', 'domain': '"""test"""', 'report_request': "{'viewId': '119151898', 'pageSize': 100, 'orderBys': [{'fieldName':\n 'ga:date', 'orderType': 'VALUE', 'sortOrder': '%(sortOrder)s'}],\n 'dimensions': [{'name': 'ga:hostname'}, {'name': 'ga:date'}, {'name':\n 'ga:dimension1'}, {'name': 'ga:deviceCategory'}, {'name':\n 'ga:eventLabel'}], 'dateRanges': [{'startDate': '2018-06-01', 'endDate':\n '2018-07-01'}], 'metrics': [{'expression': 'ga:sessions'}, {\n 'expression': 'ga:sessionDuration'}]}", 'parameters': "{'sortOrder': 'DESCENDING'}"}), "(name='Test', domain='test', report_request={\n 'viewId': '119151898', 'pageSize': 100, 'orderBys': [{'fieldName':\n 'ga:date', 'orderType': 'VALUE', 'sortOrder': '%(sortOrder)s'}],\n 'dimensions': [{'name': 'ga:hostname'}, {'name': 'ga:date'}, {'name':\n 'ga:dimension1'}, {'name': 'ga:deviceCategory'}, {'name':\n 'ga:eventLabel'}], 'dateRanges': [{'startDate': '2018-06-01', 'endDate':\n '2018-07-01'}], 'metrics': [{'expression': 'ga:sessions'}, {\n 'expression': 'ga:sessionDuration'}]}, parameters={'sortOrder':\n 'DESCENDING'})\n", (1929, 2479), False, 'from toucan_connectors.google_analytics.google_analytics_connector import GoogleAnalyticsConnector, GoogleAnalyticsDataSource\n')]
|
#! /usr/bin/env python3
#
# Authors: <NAME>, <NAME>, <NAME>
# (c) 2021
from pathlib import Path
from datetime import datetime
import os.path
import requests
import yaml
import json
from pyspectator.processor import Cpu
from crontab import CronTab
import sys
# constants
CONFIG_FILE = 'config.yaml'
MAX_CPU_TEMP = 'maxCpuTemp'
CHECK_INTERVAL = 'checkInterval'
TELEGRAM_CHAT = 'telegramChatID'
TELEGRAM_API = 'telegramApiUrl'
TELEGRAM_TOKEN = 'telegramToken'
# initialize main variables
maxCpuTemp = None
checkInterval = None
telegramChatID = None
telegramToken = None
time = str(datetime.now())
log = {}
warnings = []
warningMessage = ''
codePath = str(Path(__file__).parent.absolute()) + '/'
if os.path.isfile(codePath + CONFIG_FILE):
# read config file
try:
with open(codePath + CONFIG_FILE, 'r') as yamlFile:
config = yaml.load(yamlFile, Loader=yaml.CLoader)
if MAX_CPU_TEMP in config:
maxCpuTemp = config[MAX_CPU_TEMP]
if CHECK_INTERVAL in config:
checkInterval = config[CHECK_INTERVAL]
if TELEGRAM_CHAT in config:
telegramChatID = config[TELEGRAM_CHAT]
if TELEGRAM_TOKEN in config:
telegramToken = config[TELEGRAM_TOKEN]
except BaseException as err:
print('Error:', err)
else:
sys.exit('config file missing')
# In case something went wrong, assign default values
if maxCpuTemp == None or isinstance(maxCpuTemp, float) != True:
maxCpuTemp = 80.0
if checkInterval == None or isinstance(checkInterval, int) != True:
checkInterval = 10
# In case something telegrammy is missing, abort: Programm is not runnable
if telegramChatID == None or isinstance(telegramChatID, str) != True or \
telegramToken == None or isinstance(telegramToken, str) != True:
sys.exit('telegram config missing')
# update cronjob, if the user has changed interval time
myCron = CronTab(user=True)
intTime = '*/' + str(checkInterval)
for job in myCron:
if job.comment == 'hardwareCheck' and str(job.minute) != intTime:
job.minute.every(checkInterval)
myCron.write()
# read cpu-temperature
cpu = Cpu(monitoring_latency=1)
temperature = cpu.temperature
log['cpu-temp'] = temperature
# check if cpu-temperature exceeds max
if temperature > maxCpuTemp:
warnings.append('Temperature is too high: ' + \
str(temperature) + ' (max: ' + str(maxCpuTemp) + ')')
# save data to logfile
try:
with open(codePath + 'log.json', 'r+') as logFile:
data = json.load(logFile)
data.update({time: log})
logFile.seek(0)
json.dump(data, logFile, indent=2, ensure_ascii=False)
except BaseException as err:
print('Error:', err)
# write telegram message
if len(warnings) > 0:
warnings.insert(0, 'Your Computer has occurred a problem:')
warningMessage = '\n'.join(warnings)
send_text = 'https://api.telegram.org/' + telegramToken + \
'/sendMessage?chat_id=' + telegramChatID + \
'&parse_mode=Markdown&text=' + warningMessage
try:
response = requests.get(send_text)
except requests.exceptions as err:
print('Error:', err)
|
[
"json.dump",
"yaml.load",
"pyspectator.processor.Cpu",
"json.load",
"crontab.CronTab",
"pathlib.Path",
"requests.get",
"datetime.datetime.now",
"sys.exit"
] |
[((1902, 1920), 'crontab.CronTab', 'CronTab', ([], {'user': '(True)'}), '(user=True)\n', (1909, 1920), False, 'from crontab import CronTab\n'), ((2139, 2164), 'pyspectator.processor.Cpu', 'Cpu', ([], {'monitoring_latency': '(1)'}), '(monitoring_latency=1)\n', (2142, 2164), False, 'from pyspectator.processor import Cpu\n'), ((581, 595), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (593, 595), False, 'from datetime import datetime\n'), ((1309, 1340), 'sys.exit', 'sys.exit', (['"""config file missing"""'], {}), "('config file missing')\n", (1317, 1340), False, 'import sys\n'), ((1800, 1835), 'sys.exit', 'sys.exit', (['"""telegram config missing"""'], {}), "('telegram config missing')\n", (1808, 1835), False, 'import sys\n'), ((2507, 2525), 'json.load', 'json.load', (['logFile'], {}), '(logFile)\n', (2516, 2525), False, 'import json\n'), ((2591, 2645), 'json.dump', 'json.dump', (['data', 'logFile'], {'indent': '(2)', 'ensure_ascii': '(False)'}), '(data, logFile, indent=2, ensure_ascii=False)\n', (2600, 2645), False, 'import json\n'), ((3052, 3075), 'requests.get', 'requests.get', (['send_text'], {}), '(send_text)\n', (3064, 3075), False, 'import requests\n'), ((852, 892), 'yaml.load', 'yaml.load', (['yamlFile'], {'Loader': 'yaml.CLoader'}), '(yamlFile, Loader=yaml.CLoader)\n', (861, 892), False, 'import yaml\n'), ((655, 669), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (659, 669), False, 'from pathlib import Path\n')]
|
from bs4 import BeautifulSoup
# fix: InsecureRequestWarning: Unverified HTTPS request is being made to host
import requests.packages.urllib3
# 測試table
# import prettytable as pt
url = "https://rate.bot.com.tw/xrt?Lang=zh-TW"
herders = {
'User-Agent': 'Mozilla/5.0 (Macintosh Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36'
}
defaultCurrency = ["美金 (USD)", "日圓 (JPY)", "英鎊 (GBP)", "人民幣 (CNY)", "歐元 (EUR)"]
allCurrency = [
"美金 (USD)",
"港幣 (HKD)",
"英鎊 (GBP)",
"澳幣 (AUD)",
"加拿大幣 (CAD)",
"新加坡幣 (SGD)",
"瑞士法郎 (CHF)",
"日圓 (JPY)",
"南非幣 (ZAR)",
"瑞典幣 (SEK)",
"紐元 (NZD)",
"泰幣 (THB)",
"菲國比索 (PHP)",
"印尼幣 (IDR)",
"歐元 (EUR)",
"韓元 (KRW)",
"越南盾 (VND)",
"馬來幣 (MYR)",
"人民幣 (CNY)",
]
def getBoTExchange(msg=""):
try:
if msg != "":
msg = msg.upper()
for a in allCurrency:
if msg in a:
msg = a
found = True
break
if not found:
return None
# fix: InsecureRequestWarning: Unverified HTTPS request is being made to host
requests.packages.urllib3.disable_warnings()
res = requests.get(url, headers=herders, verify=False)
res.encoding = 'UTF-8'
soup = BeautifulSoup(res.text, "lxml")
time = soup.find(
"span", class_="time").text.strip()
table = [s for s in soup.select("table.table tbody tr")]
queryResult = {}
for t in table:
currency = t.select("td div.visible-phone")[0].text.strip()
cashRateBuy = t.select("td")[1].text.strip()
cashRateSell = t.select("td")[2].text.strip()
spotRateBuy = t.select("td")[3].text.strip()
spotRateSell = t.select("td")[4].text.strip()
queryResult[currency] = [cashRateBuy,
cashRateSell, spotRateBuy, spotRateSell]
result = {}
if msg == "":
# 只抓預設值
result = {d: queryResult[d] for d in defaultCurrency}
else:
result = {msg: queryResult[msg]}
if len(result) > 0:
return result
return None
except:
return None
def toMsg(source=None):
try:
if source != None:
resMsg = "|幣別\t\t|即期買\t|即期賣\t|\n"
for r in source:
resMsg += "|%s | %s | %s |\n" % (
r, source[r][2], source[r][3])
resMsg += "https://rate.bot.com.tw/xrt?Lang=zh-TW"
return resMsg
else:
return None
except:
return None
if __name__ == "__main__":
print(toMsg(getBoTExchange()))
|
[
"bs4.BeautifulSoup"
] |
[((1347, 1378), 'bs4.BeautifulSoup', 'BeautifulSoup', (['res.text', '"""lxml"""'], {}), "(res.text, 'lxml')\n", (1360, 1378), False, 'from bs4 import BeautifulSoup\n')]
|
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
from datetime import datetime, timedelta
import time
import io
import logging
from crea.blockchain import Blockchain
from crea.block import Block
from crea.account import Account
from crea.amount import Amount
from crea.witness import Witness
from creabase import operations
from crea.transactionbuilder import TransactionBuilder
from creagraphenebase.account import PasswordKey, PrivateKey, PublicKey
from crea.crea import Crea
from crea.utils import parse_time, formatTimedelta
from creaapi.exceptions import NumRetriesReached
from crea.nodelist import NodeList
from creabase.transactions import getBlockParams
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
# example wif
wif = "<KEY>"
if __name__ == "__main__":
stm_online = Crea()
ref_block_num, ref_block_prefix = getBlockParams(stm_online)
print("ref_block_num %d - ref_block_prefix %d" % (ref_block_num, ref_block_prefix))
stm = Crea(offline=True)
op = operations.Transfer({'from': 'creabot',
'to': 'holger80',
'amount': "0.001 CBD",
'memo': ""})
tb = TransactionBuilder(crea_instance=stm)
tb.appendOps([op])
tb.appendWif(wif)
tb.constructTx(ref_block_num=ref_block_num, ref_block_prefix=ref_block_prefix)
tx = tb.sign(reconstruct_tx=False)
print(tx.json())
|
[
"creabase.operations.Transfer",
"logging.basicConfig",
"creabase.transactions.getBlockParams",
"crea.crea.Crea",
"crea.transactionbuilder.TransactionBuilder",
"logging.getLogger"
] |
[((741, 768), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (758, 768), False, 'import logging\n'), ((769, 808), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (788, 808), False, 'import logging\n'), ((884, 890), 'crea.crea.Crea', 'Crea', ([], {}), '()\n', (888, 890), False, 'from crea.crea import Crea\n'), ((929, 955), 'creabase.transactions.getBlockParams', 'getBlockParams', (['stm_online'], {}), '(stm_online)\n', (943, 955), False, 'from creabase.transactions import getBlockParams\n'), ((1055, 1073), 'crea.crea.Crea', 'Crea', ([], {'offline': '(True)'}), '(offline=True)\n', (1059, 1073), False, 'from crea.crea import Crea\n'), ((1084, 1181), 'creabase.operations.Transfer', 'operations.Transfer', (["{'from': 'creabot', 'to': 'holger80', 'amount': '0.001 CBD', 'memo': ''}"], {}), "({'from': 'creabot', 'to': 'holger80', 'amount':\n '0.001 CBD', 'memo': ''})\n", (1103, 1181), False, 'from creabase import operations\n'), ((1277, 1314), 'crea.transactionbuilder.TransactionBuilder', 'TransactionBuilder', ([], {'crea_instance': 'stm'}), '(crea_instance=stm)\n', (1295, 1314), False, 'from crea.transactionbuilder import TransactionBuilder\n')]
|
"""Script containing the abstract policy class."""
import numpy as np
import tensorflow as tf
from hbaselines.utils.tf_util import get_trainable_vars
from hbaselines.utils.tf_util import get_target_updates
class ActorCriticPolicy(object):
"""Base Actor Critic Policy.
Attributes
----------
sess : tf.compat.v1.Session
the current TensorFlow session
ob_space : gym.spaces.*
the observation space of the environment
ac_space : gym.spaces.*
the action space of the environment
co_space : gym.spaces.*
the context space of the environment
buffer_size : int
the max number of transitions to store
batch_size : int
SGD batch size
actor_lr : float
actor learning rate
critic_lr : float
critic learning rate
verbose : int
the verbosity level: 0 none, 1 training information, 2 tensorflow debug
tau : float
target update rate
gamma : float
discount factor
layer_norm : bool
enable layer normalisation
layers : list of int or None
the size of the Neural network for the policy
act_fun : tf.nn.*
the activation function to use in the neural network
use_huber : bool
specifies whether to use the huber distance function as the loss for
the critic. If set to False, the mean-squared error metric is used
instead
"""
def __init__(self,
sess,
ob_space,
ac_space,
co_space,
buffer_size,
batch_size,
actor_lr,
critic_lr,
verbose,
tau,
gamma,
layer_norm,
layers,
act_fun,
use_huber):
"""Instantiate the base policy object.
Parameters
----------
sess : tf.compat.v1.Session
the current TensorFlow session
ob_space : gym.spaces.*
the observation space of the environment
ac_space : gym.spaces.*
the action space of the environment
co_space : gym.spaces.*
the context space of the environment
buffer_size : int
the max number of transitions to store
batch_size : int
SGD batch size
actor_lr : float
actor learning rate
critic_lr : float
critic learning rate
verbose : int
the verbosity level: 0 none, 1 training information, 2 tensorflow
debug
tau : float
target update rate
gamma : float
discount factor
layer_norm : bool
enable layer normalisation
layers : list of int or None
the size of the Neural network for the policy
act_fun : tf.nn.*
the activation function to use in the neural network
use_huber : bool
specifies whether to use the huber distance function as the loss
for the critic. If set to False, the mean-squared error metric is
used instead
"""
self.sess = sess
self.ob_space = ob_space
self.ac_space = ac_space
self.co_space = co_space
self.buffer_size = buffer_size
self.batch_size = batch_size
self.actor_lr = actor_lr
self.critic_lr = critic_lr
self.verbose = verbose
self.layers = layers
self.tau = tau
self.gamma = gamma
self.layer_norm = layer_norm
self.act_fun = act_fun
self.use_huber = use_huber
print(locals())
def initialize(self):
"""Initialize the policy.
This is used at the beginning of training by the algorithm, after the
model parameters have been initialized.
"""
raise NotImplementedError
def update(self, update_actor=True, **kwargs):
"""Perform a gradient update step.
Parameters
----------
update_actor : bool
specifies whether to update the actor policy. The critic policy is
still updated if this value is set to False.
Returns
-------
float
critic loss
float
actor loss
"""
raise NotImplementedError
def get_action(self, obs, context, apply_noise, random_actions, env_num=0):
"""Call the actor methods to compute policy actions.
Parameters
----------
obs : array_like
the observation
context : array_like or None
the contextual term. Set to None if no context is provided by the
environment.
apply_noise : bool
whether to add Gaussian noise to the output of the actor. Defaults
to False
random_actions : bool
if set to True, actions are sampled randomly from the action space
instead of being computed by the policy. This is used for
exploration purposes.
env_num : int
the environment number. Used to handle situations when multiple
parallel environments are being used.
Returns
-------
array_like
computed action by the policy
"""
raise NotImplementedError
def store_transition(self, obs0, context0, action, reward, obs1, context1,
done, is_final_step, env_num=0, evaluate=False):
"""Store a transition in the replay buffer.
Parameters
----------
obs0 : array_like
the last observation
context0 : array_like or None
the last contextual term. Set to None if no context is provided by
the environment.
action : array_like
the action
reward : float
the reward
obs1 : array_like
the current observation
context1 : array_like or None
the current contextual term. Set to None if no context is provided
by the environment.
done : float
is the episode done
is_final_step : bool
whether the time horizon was met in the step corresponding to the
current sample. This is used by the TD3 algorithm to augment the
done mask.
env_num : int
the environment number. Used to handle situations when multiple
parallel environments are being used.
evaluate : bool
whether the sample is being provided by the evaluation environment.
If so, the data is not stored in the replay buffer.
"""
raise NotImplementedError
def get_td_map(self):
"""Return dict map for the summary (to be run in the algorithm)."""
raise NotImplementedError
@staticmethod
def _get_obs(obs, context, axis=0):
"""Return the processed observation.
If the contextual term is not None, this will look as follows:
-----------------
processed_obs = | obs | context |
-----------------
Otherwise, this method simply returns the observation.
Parameters
----------
obs : array_like
the original observation
context : array_like or None
the contextual term. Set to None if no context is provided by the
environment.
axis : int
the axis to concatenate the observations and contextual terms by
Returns
-------
array_like
the processed observation
"""
if context is not None and context[0] is not None:
context = context.flatten() if axis == 0 else context
obs = np.concatenate((obs, context), axis=axis)
return obs
@staticmethod
def _get_ob_dim(ob_space, co_space):
"""Return the processed observation dimension.
If the context space is not None, it is included in the computation of
this term.
Parameters
----------
ob_space : gym.spaces.*
the observation space of the environment
co_space : gym.spaces.*
the context space of the environment
Returns
-------
tuple
the true observation dimension
"""
ob_dim = ob_space.shape
if co_space is not None:
ob_dim = tuple(map(sum, zip(ob_dim, co_space.shape)))
return ob_dim
@staticmethod
def _setup_target_updates(model_scope, target_scope, scope, tau, verbose):
"""Create the soft and initial target updates.
The initial model parameters are assumed to be stored under the scope
name "model", while the target policy parameters are assumed to be
under the scope name "target".
If an additional outer scope was provided when creating the policies,
they can be passed under the `scope` parameter.
Parameters
----------
model_scope : str
the scope of the model parameters
target_scope : str
the scope of the target parameters
scope : str or None
the outer scope, set to None if not available
tau : float
target update rate
verbose : int
the verbosity level: 0 none, 1 training information, 2 tensorflow
debug
Returns
-------
tf.Operation
initial target updates, to match the target with the model
tf.Operation
soft target update operations
"""
if scope is not None:
model_scope = scope + '/' + model_scope
target_scope = scope + '/' + target_scope
return get_target_updates(
get_trainable_vars(model_scope),
get_trainable_vars(target_scope),
tau, verbose)
@staticmethod
def _remove_fingerprint(val, ob_dim, fingerprint_dim, additional_dim):
"""Remove the fingerprint from the input.
This is a hacky procedure to remove the fingerprint elements from the
computation. The fingerprint elements are the last few elements of the
observation dimension, before any additional concatenated observations
(e.g. contexts or actions).
Parameters
----------
val : tf.Variable
the original input
ob_dim : int
number of environmental observation elements
fingerprint_dim : int
number of fingerprint elements
additional_dim : int
number of additional elements that were added to the input variable
Returns
-------
tf.Variable
the input with the fingerprints zeroed out
"""
return val * tf.constant([1.0] * (ob_dim - fingerprint_dim) +
[0.0] * fingerprint_dim +
[1.0] * additional_dim)
|
[
"hbaselines.utils.tf_util.get_trainable_vars",
"tensorflow.constant",
"numpy.concatenate"
] |
[((7880, 7921), 'numpy.concatenate', 'np.concatenate', (['(obs, context)'], {'axis': 'axis'}), '((obs, context), axis=axis)\n', (7894, 7921), True, 'import numpy as np\n'), ((9920, 9951), 'hbaselines.utils.tf_util.get_trainable_vars', 'get_trainable_vars', (['model_scope'], {}), '(model_scope)\n', (9938, 9951), False, 'from hbaselines.utils.tf_util import get_trainable_vars\n'), ((9965, 9997), 'hbaselines.utils.tf_util.get_trainable_vars', 'get_trainable_vars', (['target_scope'], {}), '(target_scope)\n', (9983, 9997), False, 'from hbaselines.utils.tf_util import get_trainable_vars\n'), ((10939, 11042), 'tensorflow.constant', 'tf.constant', (['([1.0] * (ob_dim - fingerprint_dim) + [0.0] * fingerprint_dim + [1.0] *\n additional_dim)'], {}), '([1.0] * (ob_dim - fingerprint_dim) + [0.0] * fingerprint_dim + \n [1.0] * additional_dim)\n', (10950, 11042), True, 'import tensorflow as tf\n')]
|
import numpy as np
import opt_prob
import scipy.optimize
# -- problem setup
name = '2.4 GOLDPR'
problem = opt_prob.Cons(name)
def cns(x):
g = -1.0*np.array(problem.cns(x))
return g.tolist()
# -- start optimization
x0 = ((np.array(problem.lb) + np.array(problem.ub)) / 2.0).tolist()
bounds = []
for lb_i, ub_i in zip(problem.lb, problem.ub):
bounds.append((lb_i, ub_i))
ineq_cons = {'type':'ineq', 'fun': cns}
method = 'SLSQP'
options = {'disp': True}
res = scipy.optimize.minimize(problem.obj, x0, method=method, bounds=bounds,
constraints=ineq_cons, options=options)
print(res)
|
[
"numpy.array",
"opt_prob.Cons"
] |
[((108, 127), 'opt_prob.Cons', 'opt_prob.Cons', (['name'], {}), '(name)\n', (121, 127), False, 'import opt_prob\n'), ((234, 254), 'numpy.array', 'np.array', (['problem.lb'], {}), '(problem.lb)\n', (242, 254), True, 'import numpy as np\n'), ((257, 277), 'numpy.array', 'np.array', (['problem.ub'], {}), '(problem.ub)\n', (265, 277), True, 'import numpy as np\n')]
|
from tacotron2.text.vi_number_and_units import normalize_vi
from random import randrange
print("Start generate vietnamese number strings")
with open("training_data/vietnamese_number.txt", 'w', encoding='utf-8') as f:
for i in range(20000):
n = randrange(1000000000, 2000000000)
f.write(normalize_vi(str(n)) + '\n')
print("done!")
|
[
"random.randrange"
] |
[((260, 293), 'random.randrange', 'randrange', (['(1000000000)', '(2000000000)'], {}), '(1000000000, 2000000000)\n', (269, 293), False, 'from random import randrange\n')]
|
import re
import nltk
debug = False
list = ''
def tokenise(doc):
tokenized_doc = nltk.word_tokenize(doc)
tagged_sentences = nltk.pos_tag(tokenized_doc)
ne_chunked_sents = nltk.ne_chunk(tagged_sentences)
named_entities = []
for tagged_tree in ne_chunked_sents:
if hasattr(tagged_tree, 'label'):
entity_name = ' '.join(c[0] for c in tagged_tree.leaves()) #
entity_type = tagged_tree.label() # get NE category
named_entities.append((entity_name, entity_type))
doc = doc.replace(entity_name, entity_type)
if (debug) : print(named_entities)
if (debug) : print('%-20s "%s"' % ('NER', doc))
return doc
def regexReplace( str, token, desc, regex):
global list
list = list + ', ' + desc
r = re.compile(regex)
cleanStr = re.sub( r, token, str, re.I)
if (debug) :
if (str != cleanStr):
print('%-20s "%s"' % (desc, cleanStr))
return cleanStr
def replacePPI(str):
str = regexReplace(str, '<EMAIL>', 'email address', '[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+')
str = regexReplace(str, 'UKPOSTCODE', 'uk postcode', '(gir ?0aa|GIR ?0AA|[a-pr-uwyzA-PR-UWYZ]([0-9]{1,2}|([a-hk-yA-HK-Y][0-9]([0-9abehmnprv-yABEHMNPRV-Y])?)|[0-9][a-hjkps-uwA-HJKPS-UW]) ?[0-9][abd-hjlnp-uw-zABD-HJLNP-UW-Z]{2})')
# Amex numbers look like US phone numbers
str = regexReplace(str, 'CARDNUM', 'Amex', '3[47][0-9]{13}')
str = regexReplace(str, 'CARDNUM', 'BCGlobal', '(6541|6556)[0-9]{12}')
str = regexReplace(str, 'CARDNUM', 'Carte Blanche Card', '389[0-9]{11}')
str = regexReplace(str, 'CARDNUM', 'Diners Club Card', '3(?:0[0-5]|[68][0-9])[0-9]{11}')
str = regexReplace(str, 'CARDNUM', 'Discover Card', '65[4-9][0-9]{13}|64[4-9][0-9]{13}|6011[0-9]{12}|(622(?:12[6-9]|1[3-9][0-9]|[2-8][0-9][0-9]|9[01][0-9]|92[0-5])[0-9]{10})')
str = regexReplace(str, 'CARDNUM', 'Insta Payment Card', '63[7-9][0-9]{13}')
str = regexReplace(str, 'CARDNUM', 'JCB Card', '(?:2131|1800|35\d{3})\d{11}$')
str = regexReplace(str, 'CARDNUM', 'KoreanLocalCard', '9[0-9]{15}')
str = regexReplace(str, 'CARDNUM', 'Laser Card', '(6304|6706|6709|6771)[0-9]{12,15}')
str = regexReplace(str, 'CARDNUM', 'Maestro Card', '(5018|5020|5038|6304|6759|6761|6763)[0-9]{8,15}')
str = regexReplace(str, 'CARDNUM', 'Mastercard', '5[1-5][0-9]{14}')
str = regexReplace(str, 'CARDNUM', 'Solo Card', '(6334|6767)[0-9]{12}|(6334|6767)[0-9]{14}|(6334|6767)[0-9]{15}')
str = regexReplace(str, 'CARDNUM', 'Switch Card', '(4903|4905|4911|4936|6333|6759)[0-9]{12}|(4903|4905|4911|4936|6333|6759)[0-9]{14}|(4903|4905|4911|4936|6333|6759)[0-9]{15}|564182[0-9]{10}|564182[0-9]{12}|564182[0-9]{13}|633110[0-9]{10}|633110[0-9]{12}|633110[0-9]{13}')
str = regexReplace(str, 'CARDNUM', 'Union Pay Card', '(62[0-9]{14,17})')
str = regexReplace(str, 'CARDNUM', 'Visa Card', '4[0-9]{12}(?:[0-9]{3})?')
str = regexReplace(str, 'CARDNUM', 'Visa Master Card', '(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14})')
str = regexReplace(str, 'ZIPCODEUS' , 'zip code', '[0-9]{5}(-[0-9]{4})?')
str = regexReplace(str, 'POSTCODECA', 'Canada postcode', '[abceghj-nprstvxyABCEGHJ-NPRSTVXY]{1}[0-9]{1}[abceghj-nprstv-zABCEGHJ-NPRSTV-Z]{1}[ ]?[0-9]{1}[abceghj-nprstv-zABCEGHJ-NPRSTV-Z]{1}[0-9]{1}')
### after all the more specific matches
# Problem with chomping leading and training space
str = regexReplace(str, ' USPHONE ', 'US phone', '(1?\W*([2-9][0-8][0-9])\W*([2-9][0-9]{2})\W*([0-9]{4})(\se?x?t?(\d*))?)')
str = regexReplace(str, 'USPHONE', 'US phone', '(\s|^)(?:(?:\+?1\s*(?:[.-]\s*)?)?(?:\(\s*([2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9])\s*\)|([2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9]))\s*(?:[.-]\s*)?)?([2-9]1[02-9]|[2-9][02-9]1|[2-9][02-9]{2})\s*(?:[.-]\s*)?([0-9]{4})(?:\s*(?:#|x\.?|ext\.?|extension)\s*(\d+))?(\s|$)')
str = regexReplace(str, 'SSN', 'ssn', '(?!219-09-9999|078-05-1120)(?!666|000|9\d{2})\d{3}-(?!00)\d{2}-(?!0{4})\d{4}')
str = regexReplace(str, 'UKPHONE', 'uk phone', '(?:(?:\(?(?:0(?:0|11)\)?[\s-]?\(?|\+)44\)?[\s-]?(?:\(?0\)?[\s-]?)?)|(?:\(?0))(?:(?:\d{5}\)?[\s-]?\d{4,5})|(?:\d{4}\)?[\s-]?(?:\d{5}|\d{3}[\s-]?\d{3}))|(?:\d{3}\)?[\s-]?\d{3}[\s-]?\d{3,4})|(?:\d{2}\)?[\s-]?\d{4}[\s-]?\d{4}))(?:[\s-]?(?:x|ext\.?|\#)\d{3,4})?')
str = regexReplace(str, 'ACCOUNTNO', 'account number', '\d{5-12')
return str
def getSubstituteText(key, type):
return ""
|
[
"nltk.ne_chunk",
"nltk.pos_tag",
"re.sub",
"nltk.word_tokenize",
"re.compile"
] |
[((87, 110), 'nltk.word_tokenize', 'nltk.word_tokenize', (['doc'], {}), '(doc)\n', (105, 110), False, 'import nltk\n'), ((134, 161), 'nltk.pos_tag', 'nltk.pos_tag', (['tokenized_doc'], {}), '(tokenized_doc)\n', (146, 161), False, 'import nltk\n'), ((185, 216), 'nltk.ne_chunk', 'nltk.ne_chunk', (['tagged_sentences'], {}), '(tagged_sentences)\n', (198, 216), False, 'import nltk\n'), ((790, 807), 're.compile', 're.compile', (['regex'], {}), '(regex)\n', (800, 807), False, 'import re\n'), ((823, 850), 're.sub', 're.sub', (['r', 'token', 'str', 're.I'], {}), '(r, token, str, re.I)\n', (829, 850), False, 'import re\n')]
|
"""OmegaConfParser example."""
import fromconfig
import random
def random_hex() -> str:
return hex(hash(random.random()))
if __name__ == "__main__":
config = {
"host": "localhost",
"port": "8008",
"url": "${host}:${port}",
"path": "models/${now:}/${random_hex:}", # Use default resolver now + custom resolver
"resolvers": {"random_hex": random_hex}, # Register custom resolver
}
parser = fromconfig.parser.OmegaConfParser()
parsed = parser(config)
print(parsed)
assert parsed["url"] == "localhost:8008"
|
[
"random.random",
"fromconfig.parser.OmegaConfParser"
] |
[((452, 487), 'fromconfig.parser.OmegaConfParser', 'fromconfig.parser.OmegaConfParser', ([], {}), '()\n', (485, 487), False, 'import fromconfig\n'), ((111, 126), 'random.random', 'random.random', ([], {}), '()\n', (124, 126), False, 'import random\n')]
|
"""This module holds functions for miri data trending
All functions in this module are tailored for the miri datatrending application.
Detailed descriptions are given for every function individually.
-------
- <NAME>
Use
---
Dependencies
------------
MIRI_trend_requestsDRAFT1900201.docx
References
----------
Notes
-----
"""
import jwql.instrument_monitors.nirspec_monitors.data_trending.utils.mnemonics as mn
import jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition as cond
import statistics
import sqlite3
import warnings
import numpy as np
from collections import defaultdict
def extract_data(condition, mnemonic):
'''Function extracts data from given mnemmonic at a given condition
Parameters
----------
condition : object
conditon object that holds one or more subconditions
mnemonic : AstropyTable
holds single table with mnemonic data
Return
------
temp : list or None
holds data that applies to given condition
'''
temp = []
#look for all values that fit to the given conditions
for element in mnemonic:
if condition.state(float(element['time'])):
temp.append(float(element['value']))
#return temp is one ore more values fit to the condition
#return None if no applicable data was found
if len(temp) > 0:
return temp
else:
return None
def lamp_distinction(caa_flag, lamp_sel, lamp_curr, lamp_volt):
"""Distincts over all calibration lamps and returns representative current means
each
Parameters
----------
"""
#initilize empty dict
lamp_values = defaultdict(list)
for index, flag in enumerate(caa_flag):
if flag['value'] == 'ON':
#initialize lamp value to default
current_lamp = "default"
#find current lamp value
for lamp in lamp_sel:
if lamp['time'] <= flag['time']:
current_lamp = lamp['value']
#go to next Value if dummy lamps are activated
if (current_lamp == 'NO_LAMP') or (current_lamp == 'DUMMY'):
continue
#define on_time of current lamp
try:
start_time = flag['time']
i = 1
if caa_flag[index+i]['value'] == 'OFF':
end_time = caa_flag[index+1]['time']
else:
i += 1
except IndexError:
break
#append and evaluate current and voltage values
temp_curr = []
temp_volt = []
#append current values to list
for curr in lamp_curr:
if curr['time'] >= start_time:
if curr['time'] < end_time:
temp_curr.append(float(curr['value']))
else:
break
#append voltage values to list
for volt in lamp_volt:
if volt['time'] >= start_time :
if volt['time'] < end_time:
temp_volt.append(float(volt['value']))
else:
break
lamp_data = []
#append current values
lamp_data.append(start_time)
lamp_data.append(end_time)
lamp_data.append(len(temp_curr))
lamp_data.append(statistics.mean(temp_curr))
lamp_data.append(statistics.stdev(temp_curr))
#append voltage values
lamp_data.append(len(temp_volt))
lamp_data.append(statistics.mean(temp_volt))
lamp_data.append(statistics.stdev(temp_volt))
lamp_values[current_lamp].append(( lamp_data ))
return lamp_values
def extract_filterpos(move_stat, wheel_pos, wheel_val):
'''Extracts ratio values which correspond to given position values and their
proposed nominals
Parameters
----------
condition : object
conditon object that holds one or more subconditions
nominals : dict
holds nominal values for all wheel positions
ratio_mem : AstropyTable
holds ratio values of one specific mnemonic
pos_mem : AstropyTable
holds pos values of one specific mnemonic
Return
------
pos_values : dict
holds ratio values and times with corresponding positionlabel as key
'''
#initilize empty dict for assigned ratio values
pos_values = defaultdict(list)
for index, stat in enumerate(move_stat):
#raise warning if position is UNKNOWN
if stat['value'] == "SUCCESS":
#initialize lamp value to default
current_pos = "default"
pos_val = 0
pos_time = 0
#Evaluate current position
for pos in wheel_pos:
if pos['time'] <= stat['time']:
current_pos = pos['value']
if pos['time'] > stat['time']:
break
#Evaluate corresponding value
for val in wheel_val:
if val['time'] <= stat['time']:
pos_val = val['value']
pos_time = val['time']
if val['time'] > stat['time']:
break
print (current_pos, pos_val, pos_time)
if current_pos != 'default':
pos_values[current_pos].append((pos_time, pos_val))
else:
continue
return pos_values
def once_a_day_routine(mnemonic_data):
'''Routine for processing a 15min data file once a day
Parameters
----------
mnemonic_data : dict
dict holds time and value in a astropy table with correspining identifier as key
Return
------
return_data : dict
Holds extracted data with applied conditions
'''
#abbreviate attribute
m = mnemonic_data
return_data = dict()
###########################################################################
con_set_1 = [ \
cond.unequal(m.mnemonic('INRSD_EXP_STAT'),'STARTED')]
#setup condition
condition_1 = cond.condition(con_set_1)
for identifier in mn.mnemonic_cond_1:
data = extract_data(condition_1, m.mnemonic(identifier))
if data != None:
return_data.update( {identifier:data} )
else:
print("no data for {}".format(identifier))
del condition_1
###########################################################################
con_set_2 = [ \
cond.equal(m.mnemonic('INRSH_LAMP_SEL'), 'NO_LAMP')]
#setup condition
condition_2 = cond.condition(con_set_2)
for identifier in mn.mnemonic_cond_2:
data = extract_data(condition_2, m.mnemonic(identifier))
if data != None:
return_data.update( {identifier:data} )
else:
print("no data for {}".format(identifier))
del condition_2
###########################################################################
con_set_3 = [ \
cond.unequal(m.mnemonic('INRSM_MOVE_STAT'), 'STARTED')]
#setup condition
condition_3 = cond.condition(con_set_3)
for identifier in mn.mnemonic_cond_3:
data = extract_data(condition_3, m.mnemonic(identifier))
if data != None:
return_data.update( {identifier:data} )
else:
print("no data for {}".format(identifier))
del condition_3
return return_data
def whole_day_routine(mnemonic_data):
'''Proposed routine for processing a 15min data file once a day
Parameters
----------
mnemonic_data : dict
dict holds time and value in a astropy table with correspining identifier as key
Return
------
data_cond_1 : dict
holds extracted data with condition 1 applied
data_cond_1 : dict
holds extracted data with condition 2 applied
'''
#abbreviate attribute
m = mnemonic_data
return_data = dict()
###########################################################################
con_set_ft_10 = [
cond.equal(m.mnemonic('ICTM_RT_FILTER'), 10, stringval = False)]
#setup condition
condition_ft_10 = cond.condition(con_set_ft_10)
for identifier in mn.mnemonic_ft10:
data = extract_data(condition_ft_10, m.mnemonic(identifier))
if data != None:
return_data.update( {identifier:data} )
else:
print("no data for {}".format(identifier))
del condition_ft_10
##########################################################################
con_set_caa = [ \
cond.equal(m.mnemonic('INRSH_CAA_PWRF_ST'), 'ON')]
#setup condition
condition_caa = cond.condition(con_set_caa)
for identifier in mn.mnemonic_caa:
data = extract_data(condition_caa, m.mnemonic(identifier))
if data != None:
return_data.update( {identifier:data} )
else:
print("no data for {}".format(identifier))
del condition_caa
###########################################################################
data_lamps = lamp_distinction( m.mnemonic('INRSI_CAA_ON_FLAG'),
m.mnemonic('INRSH_LAMP_SEL'),
m.mnemonic('INRSI_C_CAA_CURRENT'),
m.mnemonic('INRSI_C_CAA_VOLTAGE') )
return return_data, data_lamps
def wheelpos_routine(mnemonic_data):
'''Proposed routine for positionsensors each day
Parameters
----------
mnemonic_data : dict
dict holds time and value in a astropy table with correspining identifier as key
Return
------
FW : dict
holds FW ratio values and times with corresponding positionlabel as key
GW14 : dict
holds GW14 ratio values and times with corresponding positionlabel as key
GW23 : dict
holds GW23 ratio values and times with corresponding positionlabel as key
CCC : dict
holds CCC ratio values and times with corresponding positionlabel as key
'''
#abbreviate attribute
m = mnemonic_data
FW = extract_filterpos( m.mnemonic('INRSI_FWA_MOVE_ST'),
m.mnemonic('INRSI_FWA_MECH_POS'),
m.mnemonic('INRSI_C_FWA_POSITION'))
GWX = extract_filterpos(m.mnemonic('INRSI_GWA_MOVE_ST'),
m.mnemonic('INRSI_GWA_MECH_POS'),
m.mnemonic('INRSI_C_GWA_X_POSITION'))
GWY = extract_filterpos(m.mnemonic('INRSI_GWA_MOVE_ST'),
m.mnemonic('INRSI_GWA_MECH_POS'),
m.mnemonic('INRSI_C_GWA_Y_POSITION'))
return FW, GWX, GWY
if __name__ =='__main__':
pass
|
[
"collections.defaultdict",
"statistics.stdev",
"jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition.condition",
"statistics.mean"
] |
[((1720, 1737), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1731, 1737), False, 'from collections import defaultdict\n'), ((4648, 4665), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (4659, 4665), False, 'from collections import defaultdict\n'), ((6411, 6436), 'jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition.condition', 'cond.condition', (['con_set_1'], {}), '(con_set_1)\n', (6425, 6436), True, 'import jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition as cond\n'), ((6980, 7005), 'jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition.condition', 'cond.condition', (['con_set_2'], {}), '(con_set_2)\n', (6994, 7005), True, 'import jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition as cond\n'), ((7552, 7577), 'jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition.condition', 'cond.condition', (['con_set_3'], {}), '(con_set_3)\n', (7566, 7577), True, 'import jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition as cond\n'), ((8640, 8669), 'jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition.condition', 'cond.condition', (['con_set_ft_10'], {}), '(con_set_ft_10)\n', (8654, 8669), True, 'import jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition as cond\n'), ((9220, 9247), 'jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition.condition', 'cond.condition', (['con_set_caa'], {}), '(con_set_caa)\n', (9234, 9247), True, 'import jwql.instrument_monitors.nirspec_monitors.data_trending.utils.condition as cond\n'), ((3546, 3572), 'statistics.mean', 'statistics.mean', (['temp_curr'], {}), '(temp_curr)\n', (3561, 3572), False, 'import statistics\n'), ((3604, 3631), 'statistics.stdev', 'statistics.stdev', (['temp_curr'], {}), '(temp_curr)\n', (3620, 3631), False, 'import statistics\n'), ((3745, 3771), 'statistics.mean', 'statistics.mean', (['temp_volt'], {}), '(temp_volt)\n', (3760, 3771), False, 'import statistics\n'), ((3803, 3830), 'statistics.stdev', 'statistics.stdev', (['temp_volt'], {}), '(temp_volt)\n', (3819, 3830), False, 'import statistics\n')]
|
from __future__ import print_function
import argparse
import collections
from ete3 import PhyloTree
def printTSV(myDict, colList=None):
""" Pretty print a list of dictionaries (myDict) as a dynamically sized table.
If column names (colList) aren't specified, they will show in random order.
Author: <NAME> - Use it as you want but don't blame me.
"""
if not colList:
colList = list(myDict[0].keys() if myDict else [])
myList = [colList]
for item in myDict:
myList.append([str(item[col] if item[col] is not None else '') for col in colList])
for item in myList:
print(*item, sep="\t")
def main():
parser = argparse.ArgumentParser(description='Gene Copy Number Finder')
parser.add_argument('--genetree', required=True, help='GeneTree in nhx format')
parser.add_argument('--speciesorder', required=True, help='Comma-separated species list')
args = parser.parse_args()
species_list = args.speciesorder.split(",")
species_list = [_.strip() for _ in species_list]
table = []
with open(args.genetree, "r") as f:
# reads multiple gene tree line by line gene tree
for line in f:
# Remove empty NHX features that can be produced by TreeBest but break ete3
line = line.replace('[&&NHX]', '')
# reads single gene tree
genetree = PhyloTree(line)
leaves = genetree.get_leaf_names()
leaves_parts = [_.split("_") for _ in leaves]
for i, leaf_parts in enumerate(leaves_parts):
if len(leaf_parts) != 2:
raise Exception("Leaf node '%s' is not in gene_species format" % leaves[i])
leaves_species = [_[1] for _ in leaves_parts]
species_counter = collections.Counter(leaves_species)
# Assign to ref_species the first element of species_list which
# appears in a leaf node
for ref_species in species_list:
if ref_species in species_counter:
break
else:
raise Exception("None of the specified species was found in the GeneTree '%s'" % line)
# Find the gene of the (first) leaf node for the ref_species
for leaf_parts in leaves_parts:
if leaf_parts[1] == ref_species:
species_counter['gene'] = leaf_parts[0]
break
table.append(species_counter)
colList = ["gene"] + species_list
printTSV(table, colList)
if __name__ == "__main__":
main()
|
[
"collections.Counter",
"ete3.PhyloTree",
"argparse.ArgumentParser"
] |
[((674, 736), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Gene Copy Number Finder"""'}), "(description='Gene Copy Number Finder')\n", (697, 736), False, 'import argparse\n'), ((1381, 1396), 'ete3.PhyloTree', 'PhyloTree', (['line'], {}), '(line)\n', (1390, 1396), False, 'from ete3 import PhyloTree\n'), ((1787, 1822), 'collections.Counter', 'collections.Counter', (['leaves_species'], {}), '(leaves_species)\n', (1806, 1822), False, 'import collections\n')]
|
'''
Advent of Code - 2018
--- Day 4: Repose Record ---
Released under the MIT License <http://opensource.org/licenses/mit-license.php>
'''
import os
from collections import defaultdict
from datetime import datetime
import re
def sleep_pattern(events):
sleep = defaultdict(lambda: [0 for i in range(60)])
for event in sorted(events):
time, action = event[1:].split("] ")
date = datetime.strptime(time, '%Y-%m-%d %H:%M')
if "Guard" in action:
guard = int(re.findall("[\d]+", action)[0])
elif "asleep" in action:
start = date.minute
elif "wakes" in action:
end = date.minute
for m in range(start, end):
sleep[guard][m] += 1
return sleep
def part1(sleep):
guard_most_sleep = max(sleep.keys(), key = (lambda g: sum(sleep[g])))
minute_most_asleep = sleep[guard_most_sleep].index(max(sleep[guard_most_sleep]))
return guard_most_sleep * minute_most_asleep
def part2(sleep):
guard_most_sleep_minute = max(sleep.keys(), key=lambda g: max(sleep[g]))
minute_most_asleep = sleep[guard_most_sleep_minute].index(max(sleep[guard_most_sleep_minute]))
return guard_most_sleep_minute * minute_most_asleep
if __name__ == '__main__':
with open('../input/d04.txt', mode='r') as f:
_input = f.readlines()
sleep = sleep_pattern(_input)
print('Part One: {}'.format(part1(sleep)))
print('Part Two: {}'.format(part2(sleep)))
|
[
"datetime.datetime.strptime",
"re.findall"
] |
[((408, 449), 'datetime.datetime.strptime', 'datetime.strptime', (['time', '"""%Y-%m-%d %H:%M"""'], {}), "(time, '%Y-%m-%d %H:%M')\n", (425, 449), False, 'from datetime import datetime\n'), ((505, 533), 're.findall', 're.findall', (['"""[\\\\d]+"""', 'action'], {}), "('[\\\\d]+', action)\n", (515, 533), False, 'import re\n')]
|
"""Initialisation procedures."""
# pylint: disable=import-outside-toplevel
import numpy as np
import scipy.integrate as sci
import probnum.filtsmooth as pnfs
import probnum.statespace as pnss
from probnum import randvars
# In the initialisation-via-RK function below, this value is added to the marginal stds of the initial derivatives that are known.
# If we put in zero, there are linalg errors (because a zero-cov RV is conditioned on a dirac likelihood).
# This value is chosen such that its square-root is a really small damping factor).
SMALL_VALUE = 1e-28
def initialize_odefilter_with_rk(
f, y0, t0, prior, initrv, df=None, h0=1e-2, method="DOP853"
):
r"""Initialize an ODE filter by fitting the prior process to a few steps of an approximate ODE solution computed with Scipy's RK.
It goes as follows:
1. The ODE integration problem is set up on the interval ``[t0, t0 + (2*order+1)*h0]``
and solved with a call to ``scipy.integrate.solve_ivp``. The solver is uses adaptive steps with ``atol=rtol=1e-12``,
but is forced to pass through the
events ``(t0, t0+h0, t0 + 2*h0, ..., t0 + (2*order+1)*h0)``.
The result is a vector of time points and states, with at least ``(2*order+1)``.
Potentially, the adaptive steps selected many more steps, but because of the events, fewer steps cannot have happened.
2. A prescribed prior is fitted to the first ``(2*order+1)`` (t, y) pairs of the solution. ``order`` is the order of the prior.
3. The value of the resulting posterior at time ``t=t0`` is an estimate of the state and all its derivatives.
The resulting marginal standard deviations estimate the error. This random variable is returned.
Parameters
----------
f
ODE vector field.
y0
Initial value.
t0
Initial time point.
prior
Prior distribution used for the ODE solver. For instance an integrated Brownian motion prior (``IBM``).
initrv
Initial random variable.
df
Jacobian of the ODE vector field. Optional. If specified, more components of the result will be exact.
h0
Maximum step-size to use for computing the approximate ODE solution. The smaller, the more accurate, but also, the smaller, the less stable.
The best value here depends on the ODE problem, and probably the chosen method. Optional. Default is ``1e-2``.
method
Which solver to use. This is communicated as a string that is compatible with ``scipy.integrate.solve_ivp(..., method=method)``.
Optional. Default is `DOP853`.
Returns
-------
Normal
Estimated (improved) initial random variable. Compatible with the specified prior.
Examples
--------
>>> from dataclasses import astuple
>>> from probnum.randvars import Normal
>>> from probnum.statespace import IBM
>>> from probnum.problems.zoo.diffeq import vanderpol
Compute the initial values of the van-der-Pol problem as follows
>>> f, t0, tmax, y0, df, *_ = astuple(vanderpol())
>>> print(y0)
[2. 0.]
>>> prior = IBM(ordint=3, spatialdim=2)
>>> initrv = Normal(mean=np.zeros(prior.dimension), cov=np.eye(prior.dimension))
>>> improved_initrv = initialize_odefilter_with_rk(f, y0, t0, prior=prior, initrv=initrv, df=df)
>>> print(prior.proj2coord(0) @ improved_initrv.mean)
[2. 0.]
>>> print(np.round(improved_initrv.mean, 1))
[ 2. 0. -2. 58.2 0. -2. 60. -1745.7]
>>> print(np.round(np.log10(improved_initrv.std), 1))
[-13.8 -11.3 -9. -1.5 -13.8 -11.3 -9. -1.5]
"""
y0 = np.asarray(y0)
ode_dim = y0.shape[0] if y0.ndim > 0 else 1
order = prior.ordint
proj_to_y = prior.proj2coord(0)
zeros_shift = np.zeros(ode_dim)
zeros_cov = np.zeros((ode_dim, ode_dim))
measmod = pnss.DiscreteLTIGaussian(
proj_to_y,
zeros_shift,
zeros_cov,
proc_noise_cov_cholesky=zeros_cov,
forward_implementation="sqrt",
backward_implementation="sqrt",
)
# order + 1 would suffice in theory, 2*order + 1 is for good measure
# (the "+1" is a safety factor for order=1)
num_steps = 2 * order + 1
t_eval = np.arange(t0, t0 + (num_steps + 1) * h0, h0)
sol = sci.solve_ivp(
f,
(t0, t0 + (num_steps + 1) * h0),
y0=y0,
atol=1e-12,
rtol=1e-12,
t_eval=t_eval,
method=method,
)
ts = sol.t[:num_steps]
ys = sol.y[:, :num_steps].T
initmean = initrv.mean.copy()
initmean[0 :: (order + 1)] = y0
initmean[1 :: (order + 1)] = f(t0, y0)
initcov_diag = np.diag(initrv.cov).copy()
initcov_diag[0 :: (order + 1)] = SMALL_VALUE
initcov_diag[1 :: (order + 1)] = SMALL_VALUE
if df is not None:
if order > 1:
initmean[2 :: (order + 1)] = df(t0, y0) @ f(t0, y0)
initcov_diag[2 :: (order + 1)] = SMALL_VALUE
initcov = np.diag(initcov_diag)
initcov_cholesky = np.diag(np.sqrt(initcov_diag))
initrv = randvars.Normal(initmean, initcov, cov_cholesky=initcov_cholesky)
kalman = pnfs.Kalman(prior, measmod, initrv)
out = kalman.filtsmooth(ys, ts)
estimated_initrv = out.state_rvs[0]
return estimated_initrv
def initialize_odefilter_with_taylormode(f, y0, t0, prior, initrv):
"""Initialize an ODE filter with Taylor-mode automatic differentiation.
This requires JAX. For an explanation of what happens ``under the hood``, see [1]_.
References
----------
.. [1] <NAME>. and <NAME>., Stable implementation of probabilistic ODE solvers,
*arXiv:2012.10106*, 2020.
The implementation is inspired by the implementation in
https://github.com/jacobjinkelly/easy-neural-ode/blob/master/latent_ode.py
Parameters
----------
f
ODE vector field.
y0
Initial value.
t0
Initial time point.
prior
Prior distribution used for the ODE solver. For instance an integrated Brownian motion prior (``IBM``).
initrv
Initial random variable.
Returns
-------
Normal
Estimated initial random variable. Compatible with the specified prior.
Examples
--------
>>> import sys, pytest
>>> if sys.platform.startswith('win'):
... pytest.skip('this doctest does not work on Windows')
>>> from dataclasses import astuple
>>> from probnum.randvars import Normal
>>> from probnum.problems.zoo.diffeq import threebody_jax, vanderpol_jax
>>> from probnum.statespace import IBM
Compute the initial values of the restricted three-body problem as follows
>>> f, t0, tmax, y0, df, *_ = astuple(threebody_jax())
>>> print(y0)
[ 0.994 0. 0. -2.00158511]
>>> prior = IBM(ordint=3, spatialdim=4)
>>> initrv = Normal(mean=np.zeros(prior.dimension), cov=np.eye(prior.dimension))
>>> improved_initrv = initialize_odefilter_with_taylormode(f, y0, t0, prior, initrv)
>>> print(prior.proj2coord(0) @ improved_initrv.mean)
[ 0.994 0. 0. -2.00158511]
>>> print(improved_initrv.mean)
[ 9.94000000e-01 0.00000000e+00 -3.15543023e+02 0.00000000e+00
0.00000000e+00 -2.00158511e+00 0.00000000e+00 9.99720945e+04
0.00000000e+00 -3.15543023e+02 0.00000000e+00 6.39028111e+07
-2.00158511e+00 0.00000000e+00 9.99720945e+04 0.00000000e+00]
Compute the initial values of the van-der-Pol oscillator as follows
>>> f, t0, tmax, y0, df, *_ = astuple(vanderpol_jax())
>>> print(y0)
[2. 0.]
>>> prior = IBM(ordint=3, spatialdim=2)
>>> initrv = Normal(mean=np.zeros(prior.dimension), cov=np.eye(prior.dimension))
>>> improved_initrv = initialize_odefilter_with_taylormode(f, y0, t0, prior, initrv)
>>> print(prior.proj2coord(0) @ improved_initrv.mean)
[2. 0.]
>>> print(improved_initrv.mean)
[ 2. 0. -2. 60. 0. -2. 60. -1798.]
>>> print(improved_initrv.std)
[0. 0. 0. 0. 0. 0. 0. 0.]
"""
try:
import jax.numpy as jnp
from jax.config import config
from jax.experimental.jet import jet
config.update("jax_enable_x64", True)
except ImportError as err:
raise ImportError(
"Cannot perform Taylor-mode initialisation without optional "
"dependencies jax and jaxlib. Try installing them via `pip install jax jaxlib`."
) from err
order = prior.ordint
def total_derivative(z_t):
"""Total derivative."""
z, t = jnp.reshape(z_t[:-1], z_shape), z_t[-1]
dz = jnp.ravel(f(t, z))
dt = jnp.array([1.0])
dz_t = jnp.concatenate((dz, dt))
return dz_t
z_shape = y0.shape
z_t = jnp.concatenate((jnp.ravel(y0), jnp.array([t0])))
derivs = []
derivs.extend(y0)
if order == 0:
all_derivs = pnss.Integrator._convert_derivwise_to_coordwise(
np.asarray(jnp.array(derivs)), ordint=0, spatialdim=len(y0)
)
return randvars.Normal(
np.asarray(all_derivs),
cov=np.asarray(jnp.diag(jnp.zeros(len(derivs)))),
cov_cholesky=np.asarray(jnp.diag(jnp.zeros(len(derivs)))),
)
(dy0, [*yns]) = jet(total_derivative, (z_t,), ((jnp.ones_like(z_t),),))
derivs.extend(dy0[:-1])
if order == 1:
all_derivs = pnss.Integrator._convert_derivwise_to_coordwise(
np.asarray(jnp.array(derivs)), ordint=1, spatialdim=len(y0)
)
return randvars.Normal(
np.asarray(all_derivs),
cov=np.asarray(jnp.diag(jnp.zeros(len(derivs)))),
cov_cholesky=np.asarray(jnp.diag(jnp.zeros(len(derivs)))),
)
for _ in range(1, order):
(dy0, [*yns]) = jet(total_derivative, (z_t,), ((dy0, *yns),))
derivs.extend(yns[-2][:-1])
all_derivs = pnss.Integrator._convert_derivwise_to_coordwise(
jnp.array(derivs), ordint=order, spatialdim=len(y0)
)
return randvars.Normal(
np.asarray(all_derivs),
cov=np.asarray(jnp.diag(jnp.zeros(len(derivs)))),
cov_cholesky=np.asarray(jnp.diag(jnp.zeros(len(derivs)))),
)
|
[
"jax.config.config.update",
"jax.numpy.array",
"jax.numpy.ones_like",
"jax.numpy.reshape",
"jax.experimental.jet.jet",
"probnum.randvars.Normal",
"jax.numpy.concatenate",
"numpy.asarray",
"scipy.integrate.solve_ivp",
"numpy.zeros",
"numpy.arange",
"jax.numpy.ravel",
"numpy.diag",
"probnum.statespace.DiscreteLTIGaussian",
"probnum.filtsmooth.Kalman",
"numpy.sqrt"
] |
[((3620, 3634), 'numpy.asarray', 'np.asarray', (['y0'], {}), '(y0)\n', (3630, 3634), True, 'import numpy as np\n'), ((3762, 3779), 'numpy.zeros', 'np.zeros', (['ode_dim'], {}), '(ode_dim)\n', (3770, 3779), True, 'import numpy as np\n'), ((3796, 3824), 'numpy.zeros', 'np.zeros', (['(ode_dim, ode_dim)'], {}), '((ode_dim, ode_dim))\n', (3804, 3824), True, 'import numpy as np\n'), ((3839, 4004), 'probnum.statespace.DiscreteLTIGaussian', 'pnss.DiscreteLTIGaussian', (['proj_to_y', 'zeros_shift', 'zeros_cov'], {'proc_noise_cov_cholesky': 'zeros_cov', 'forward_implementation': '"""sqrt"""', 'backward_implementation': '"""sqrt"""'}), "(proj_to_y, zeros_shift, zeros_cov,\n proc_noise_cov_cholesky=zeros_cov, forward_implementation='sqrt',\n backward_implementation='sqrt')\n", (3863, 4004), True, 'import probnum.statespace as pnss\n'), ((4217, 4261), 'numpy.arange', 'np.arange', (['t0', '(t0 + (num_steps + 1) * h0)', 'h0'], {}), '(t0, t0 + (num_steps + 1) * h0, h0)\n', (4226, 4261), True, 'import numpy as np\n'), ((4272, 4387), 'scipy.integrate.solve_ivp', 'sci.solve_ivp', (['f', '(t0, t0 + (num_steps + 1) * h0)'], {'y0': 'y0', 'atol': '(1e-12)', 'rtol': '(1e-12)', 't_eval': 't_eval', 'method': 'method'}), '(f, (t0, t0 + (num_steps + 1) * h0), y0=y0, atol=1e-12, rtol=\n 1e-12, t_eval=t_eval, method=method)\n', (4285, 4387), True, 'import scipy.integrate as sci\n'), ((4947, 4968), 'numpy.diag', 'np.diag', (['initcov_diag'], {}), '(initcov_diag)\n', (4954, 4968), True, 'import numpy as np\n'), ((5036, 5101), 'probnum.randvars.Normal', 'randvars.Normal', (['initmean', 'initcov'], {'cov_cholesky': 'initcov_cholesky'}), '(initmean, initcov, cov_cholesky=initcov_cholesky)\n', (5051, 5101), False, 'from probnum import randvars\n'), ((5115, 5150), 'probnum.filtsmooth.Kalman', 'pnfs.Kalman', (['prior', 'measmod', 'initrv'], {}), '(prior, measmod, initrv)\n', (5126, 5150), True, 'import probnum.filtsmooth as pnfs\n'), ((5000, 5021), 'numpy.sqrt', 'np.sqrt', (['initcov_diag'], {}), '(initcov_diag)\n', (5007, 5021), True, 'import numpy as np\n'), ((8173, 8210), 'jax.config.config.update', 'config.update', (['"""jax_enable_x64"""', '(True)'], {}), "('jax_enable_x64', True)\n", (8186, 8210), False, 'from jax.config import config\n'), ((8645, 8661), 'jax.numpy.array', 'jnp.array', (['[1.0]'], {}), '([1.0])\n', (8654, 8661), True, 'import jax.numpy as jnp\n'), ((8677, 8702), 'jax.numpy.concatenate', 'jnp.concatenate', (['(dz, dt)'], {}), '((dz, dt))\n', (8692, 8702), True, 'import jax.numpy as jnp\n'), ((9773, 9818), 'jax.experimental.jet.jet', 'jet', (['total_derivative', '(z_t,)', '((dy0, *yns),)'], {}), '(total_derivative, (z_t,), ((dy0, *yns),))\n', (9776, 9818), False, 'from jax.experimental.jet import jet\n'), ((9930, 9947), 'jax.numpy.array', 'jnp.array', (['derivs'], {}), '(derivs)\n', (9939, 9947), True, 'import jax.numpy as jnp\n'), ((10025, 10047), 'numpy.asarray', 'np.asarray', (['all_derivs'], {}), '(all_derivs)\n', (10035, 10047), True, 'import numpy as np\n'), ((4640, 4659), 'numpy.diag', 'np.diag', (['initrv.cov'], {}), '(initrv.cov)\n', (4647, 4659), True, 'import numpy as np\n'), ((8560, 8590), 'jax.numpy.reshape', 'jnp.reshape', (['z_t[:-1]', 'z_shape'], {}), '(z_t[:-1], z_shape)\n', (8571, 8590), True, 'import jax.numpy as jnp\n'), ((8774, 8787), 'jax.numpy.ravel', 'jnp.ravel', (['y0'], {}), '(y0)\n', (8783, 8787), True, 'import jax.numpy as jnp\n'), ((8789, 8804), 'jax.numpy.array', 'jnp.array', (['[t0]'], {}), '([t0])\n', (8798, 8804), True, 'import jax.numpy as jnp\n'), ((9063, 9085), 'numpy.asarray', 'np.asarray', (['all_derivs'], {}), '(all_derivs)\n', (9073, 9085), True, 'import numpy as np\n'), ((9551, 9573), 'numpy.asarray', 'np.asarray', (['all_derivs'], {}), '(all_derivs)\n', (9561, 9573), True, 'import numpy as np\n'), ((8959, 8976), 'jax.numpy.array', 'jnp.array', (['derivs'], {}), '(derivs)\n', (8968, 8976), True, 'import jax.numpy as jnp\n'), ((9283, 9301), 'jax.numpy.ones_like', 'jnp.ones_like', (['z_t'], {}), '(z_t)\n', (9296, 9301), True, 'import jax.numpy as jnp\n'), ((9447, 9464), 'jax.numpy.array', 'jnp.array', (['derivs'], {}), '(derivs)\n', (9456, 9464), True, 'import jax.numpy as jnp\n')]
|
import os
import platform
import subprocess
# # Creation de l'environnement virtual Venv et activation
version = subprocess.run(["python", "--version"], capture_output=True, text=True)
if "python 3" in version.stdout.lower():
python = "python"
else:
python = "python3"
createvenv = os.system(f"{python} -m venv venv")
if platform.system() == 'Windows':
activatevenv = os.system("source venv/Scripts/activate && pip install django")
else:
activatevenv = os.system("source venv/bin/activate && pip install django")
nomprojet = input('Entrez le nom de votre projet django : ')
activatevenv = os.system(f"source venv/bin/activate && django-admin startproject {nomprojet}")
creationrequirement = os.system(f"source venv/bin/activate && cd {nomprojet} && pip freeze >> requirements.txt")
|
[
"platform.system",
"subprocess.run",
"os.system"
] |
[((123, 194), 'subprocess.run', 'subprocess.run', (["['python', '--version']"], {'capture_output': '(True)', 'text': '(True)'}), "(['python', '--version'], capture_output=True, text=True)\n", (137, 194), False, 'import subprocess\n'), ((311, 346), 'os.system', 'os.system', (['f"""{python} -m venv venv"""'], {}), "(f'{python} -m venv venv')\n", (320, 346), False, 'import os\n'), ((354, 371), 'platform.system', 'platform.system', ([], {}), '()\n', (369, 371), False, 'import platform\n'), ((406, 469), 'os.system', 'os.system', (['"""source venv/Scripts/activate && pip install django"""'], {}), "('source venv/Scripts/activate && pip install django')\n", (415, 469), False, 'import os\n'), ((509, 569), 'os.system', 'os.system', (['"""source venv/bin/activate && pip install django"""'], {}), "('source venv/bin/activate && pip install django')\n", (518, 569), False, 'import os\n'), ((656, 742), 'os.system', 'os.system', (['f"""source venv/bin/activate && django-admin startproject {nomprojet}"""'], {}), "(\n f'source venv/bin/activate && django-admin startproject {nomprojet}')\n", (665, 742), False, 'import os\n'), ((765, 865), 'os.system', 'os.system', (['f"""source venv/bin/activate && cd {nomprojet} && pip freeze >> requirements.txt"""'], {}), "(\n f'source venv/bin/activate && cd {nomprojet} && pip freeze >> requirements.txt'\n )\n", (774, 865), False, 'import os\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 序列化
# import pickle
# d = dict(name='Bob', age=20, score=88)
# print(pickle.dumps(d))
# f = open('dump.txt', 'wb')
# pickle.dump(d, f)
# f.close()
# f = open('dump.txt', 'rb')
# d = pickle.load(f)
# f.close()
# print(d)
# JSON
# import json
# d = dict(name='Bob', age=20, score=88)
# print(json.dumps(d)) # <class 'str'>
#
# json_str = '{"age": 20, "score": 88, "name": "Bob"}'
# print(json.loads(json_str)) # <class 'dict'>
# JSON进阶
import json
# class Student(object):
# def __init__(self, name, age, score):
# self.name = name
# self.age = age
# self.score = score
#
# s = Student('Bob', 20, 88)
# print(json.dumps(s)) # TypeError
# class -> {}
# def student2dict(std):
# return {
# 'name': std.name,
# 'age': std.age,
# 'score': std.score
# }
#
# print(json.dumps(s, default=student2dict))
# class的__dict__就是一个dict,用来存储实例变量
# print(json.dumps(s, default=lambda obj: obj.__dict__))
# loads也需要函数转换
# def dict2student(d):
# return Student(d['name'], d['age'], d['score'])
#
# json_str = '{"age": 20, "score": 88, "name": "Bob"}'
# print(json.loads(json_str, object_hook=dict2student)) # 反序列化的Student实例对象
# test
import json
obj = dict(name='小明', age=20)
s = json.dumps(obj, ensure_ascii=False)
print(s)
|
[
"json.dumps"
] |
[((1280, 1315), 'json.dumps', 'json.dumps', (['obj'], {'ensure_ascii': '(False)'}), '(obj, ensure_ascii=False)\n', (1290, 1315), False, 'import json\n')]
|
# Solution of;
# Project Euler Problem 134: Prime pair connection
# https://projecteuler.net/problem=134
#
# Consider the consecutive primes p1 = 19 and p2 = 23. It can be verified that
# 1219 is the smallest number such that the last digits are formed by p1
# whilst also being divisible by p2. In fact, with the exception of p1 = 3 and
# p2 = 5, for every pair of consecutive primes, p2 > p1, there exist values of
# n for which the last digits are formed by p1 and n is divisible by p2. Let S
# be the smallest of these values of n. Find ∑ S for every pair of consecutive
# primes with 5 ≤ p1 ≤ 1000000.
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 134
timed.caller(dummy, n, i, prob_id)
|
[
"timed.caller"
] |
[((784, 818), 'timed.caller', 'timed.caller', (['dummy', 'n', 'i', 'prob_id'], {}), '(dummy, n, i, prob_id)\n', (796, 818), False, 'import timed\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 16 12:05:08 2018
@author: Alexandre
"""
###############################################################################
import numpy as np
###############################################################################
from pyro.dynamic import pendulum
from pyro.control import nonlinear
from pyro.control import robotcontrollers
from pyro.planning import plan
from pyro.analysis import simulation
###############################################################################
sys = pendulum.SinglePendulum()
###############################################################################
# Planning
traj = plan.load_trajectory('rrt.npy')
q_goal = np.array([-3.14])
###############################################################################
# P
kp = 5
kd = 0
ki = 0
p_ctl = robotcontrollers.JointPID( 1 , kp , ki, kd)
p_ctl.rbar = q_goal
# PD
kp = 5
kd = 2
ki = 0
pd_ctl = robotcontrollers.JointPID( 1 , kp , ki, kd)
pd_ctl.rbar = q_goal
# PID
kp = 5
kd = 2
ki = 1
pid_ctl = robotcontrollers.JointPID( 1 , kp , ki, kd)
pid_ctl.rbar = q_goal
# Computed Torque
ctc_ctl = nonlinear.ComputedTorqueController( sys )
ctc_ctl.rbar = q_goal
ctc_ctl.w0 = 2.0
ctc_ctl.zeta = 0.8
# Sliding Mode
sld_ctl = nonlinear.SlidingModeController( sys )
sld_ctl.lam = 1
sld_ctl.gain = 5
sld_ctl.rbar = q_goal
# OpenLoop with traj
traj_ctl = plan.OpenLoopController( traj )
# Computed Torque with traj
traj_ctc_ctl = nonlinear.ComputedTorqueController( sys , traj )
traj_ctc_ctl.rbar = q_goal
traj_ctc_ctl.w0 = 2.0
traj_ctc_ctl.zeta = 0.8
# Sliding Mode with traj
traj_sld_ctl = nonlinear.SlidingModeController( sys , traj )
traj_sld_ctl.lam = 1
traj_sld_ctl.gain = 5
traj_sld_ctl.rbar = q_goal
###############################################################################
# Controller selection
#ctl = p_ctl
#ctl = pd_ctl
#ctl = pid_ctl
#ctl = ctc_ctl
#ctl = sld_ctl
#ctl = traj_ctl
#ctl = traj_ctc_ctl
ctl = traj_sld_ctl
###############################################################################
# New cl-dynamic
cl_sys = ctl + sys
# Simultation
q0 = 0
tf = 10
cl_sys.sim = simulation.CLosedLoopSimulation( cl_sys , tf , tf * 1000 + 1 , 'euler' )
cl_sys.sim.x0 = np.array([q0,0])
cl_sys.sim.compute()
cl_sys.sim.plot('xu')
cl_sys.animate_simulation()
cl_sys.sim.phase_plane_trajectory(0,1)
|
[
"pyro.control.nonlinear.ComputedTorqueController",
"pyro.control.nonlinear.SlidingModeController",
"pyro.analysis.simulation.CLosedLoopSimulation",
"pyro.control.robotcontrollers.JointPID",
"pyro.dynamic.pendulum.SinglePendulum",
"pyro.planning.plan.load_trajectory",
"numpy.array",
"pyro.planning.plan.OpenLoopController"
] |
[((537, 562), 'pyro.dynamic.pendulum.SinglePendulum', 'pendulum.SinglePendulum', ([], {}), '()\n', (560, 562), False, 'from pyro.dynamic import pendulum\n'), ((666, 697), 'pyro.planning.plan.load_trajectory', 'plan.load_trajectory', (['"""rrt.npy"""'], {}), "('rrt.npy')\n", (686, 697), False, 'from pyro.planning import plan\n'), ((707, 724), 'numpy.array', 'np.array', (['[-3.14]'], {}), '([-3.14])\n', (715, 724), True, 'import numpy as np\n'), ((845, 885), 'pyro.control.robotcontrollers.JointPID', 'robotcontrollers.JointPID', (['(1)', 'kp', 'ki', 'kd'], {}), '(1, kp, ki, kd)\n', (870, 885), False, 'from pyro.control import robotcontrollers\n'), ((950, 990), 'pyro.control.robotcontrollers.JointPID', 'robotcontrollers.JointPID', (['(1)', 'kp', 'ki', 'kd'], {}), '(1, kp, ki, kd)\n', (975, 990), False, 'from pyro.control import robotcontrollers\n'), ((1058, 1098), 'pyro.control.robotcontrollers.JointPID', 'robotcontrollers.JointPID', (['(1)', 'kp', 'ki', 'kd'], {}), '(1, kp, ki, kd)\n', (1083, 1098), False, 'from pyro.control import robotcontrollers\n'), ((1158, 1197), 'pyro.control.nonlinear.ComputedTorqueController', 'nonlinear.ComputedTorqueController', (['sys'], {}), '(sys)\n', (1192, 1197), False, 'from pyro.control import nonlinear\n'), ((1293, 1329), 'pyro.control.nonlinear.SlidingModeController', 'nonlinear.SlidingModeController', (['sys'], {}), '(sys)\n', (1324, 1329), False, 'from pyro.control import nonlinear\n'), ((1421, 1450), 'pyro.planning.plan.OpenLoopController', 'plan.OpenLoopController', (['traj'], {}), '(traj)\n', (1444, 1450), False, 'from pyro.planning import plan\n'), ((1502, 1547), 'pyro.control.nonlinear.ComputedTorqueController', 'nonlinear.ComputedTorqueController', (['sys', 'traj'], {}), '(sys, traj)\n', (1536, 1547), False, 'from pyro.control import nonlinear\n'), ((1673, 1715), 'pyro.control.nonlinear.SlidingModeController', 'nonlinear.SlidingModeController', (['sys', 'traj'], {}), '(sys, traj)\n', (1704, 1715), False, 'from pyro.control import nonlinear\n'), ((2186, 2253), 'pyro.analysis.simulation.CLosedLoopSimulation', 'simulation.CLosedLoopSimulation', (['cl_sys', 'tf', '(tf * 1000 + 1)', '"""euler"""'], {}), "(cl_sys, tf, tf * 1000 + 1, 'euler')\n", (2217, 2253), False, 'from pyro.analysis import simulation\n'), ((2275, 2292), 'numpy.array', 'np.array', (['[q0, 0]'], {}), '([q0, 0])\n', (2283, 2292), True, 'import numpy as np\n')]
|
"""Abstract datasource for recipes"""
from os import path
import json
class Datasource:
"""foo"""
def __init__(self, data):
self.name = data["name"]
self.text = data["text"]
self.url = data["url"]
self.local = data["local"]
def validate(self):
"""Prove that data source is valid"""
if path.exists(self.local) is False:
errmsg_part1 = "missing local data source file %s;"
errmsg_part2 = "please download it from %s"
err = errmsg_part1 + errmsg_part2
raise Exception(err % (self.local, self.url))
return True
def load_local_json(self):
"""Load json structure from file"""
json_object = json.load(open(self.local))
return json_object
|
[
"os.path.exists"
] |
[((350, 373), 'os.path.exists', 'path.exists', (['self.local'], {}), '(self.local)\n', (361, 373), False, 'from os import path\n')]
|
import geoplot as gplt
import geopandas as gpd
import matplotlib.pyplot as plt
import pandas as pd
from src import constants
# data from cook county open data and us census
township_boundaries = gpd.read_file(constants.TOWNSHIP_POLYGONS_GEOJSON)
zip_code_data = gpd.read_file(constants.TOWNSHIP_POINTS_GEOJSON)
# call geoplot plot on polygons as ax
ax = gplt.polyplot(township_boundaries)
# plot point data with same axis, ax
gplt.pointplot(zip_code_data,s=1, ax=ax)
# set title
plt.title("Example Plot: IL State-wide Zip Codes and Cook Count Township Boundaries")
# display plot
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"geoplot.pointplot",
"geoplot.polyplot",
"geopandas.read_file"
] |
[((197, 247), 'geopandas.read_file', 'gpd.read_file', (['constants.TOWNSHIP_POLYGONS_GEOJSON'], {}), '(constants.TOWNSHIP_POLYGONS_GEOJSON)\n', (210, 247), True, 'import geopandas as gpd\n'), ((264, 312), 'geopandas.read_file', 'gpd.read_file', (['constants.TOWNSHIP_POINTS_GEOJSON'], {}), '(constants.TOWNSHIP_POINTS_GEOJSON)\n', (277, 312), True, 'import geopandas as gpd\n'), ((357, 391), 'geoplot.polyplot', 'gplt.polyplot', (['township_boundaries'], {}), '(township_boundaries)\n', (370, 391), True, 'import geoplot as gplt\n'), ((429, 470), 'geoplot.pointplot', 'gplt.pointplot', (['zip_code_data'], {'s': '(1)', 'ax': 'ax'}), '(zip_code_data, s=1, ax=ax)\n', (443, 470), True, 'import geoplot as gplt\n'), ((482, 572), 'matplotlib.pyplot.title', 'plt.title', (['"""Example Plot: IL State-wide Zip Codes and Cook Count Township Boundaries"""'], {}), "(\n 'Example Plot: IL State-wide Zip Codes and Cook Count Township Boundaries')\n", (491, 572), True, 'import matplotlib.pyplot as plt\n'), ((583, 593), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (591, 593), True, 'import matplotlib.pyplot as plt\n')]
|
import os
from mlflow_turing_scoring_server import scoring_server
from mlflow.pyfunc import load_model
app = scoring_server.init(load_model(os.environ[scoring_server._SERVER_MODEL_PATH]))
|
[
"mlflow.pyfunc.load_model"
] |
[((130, 187), 'mlflow.pyfunc.load_model', 'load_model', (['os.environ[scoring_server._SERVER_MODEL_PATH]'], {}), '(os.environ[scoring_server._SERVER_MODEL_PATH])\n', (140, 187), False, 'from mlflow.pyfunc import load_model\n')]
|
import setuptools
with open("readme.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="sharpcvi2",
version="1.0.0",
author="CVI2: Computer Vision, Imaging and Machine Intelligence Research Group",
author_email="<EMAIL>",
description="Routines for the SHARP Challenge, ECCV 2020",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://cvi2.uni.lu/sharp2020/",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
|
[
"setuptools.find_packages"
] |
[((470, 496), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (494, 496), False, 'import setuptools\n')]
|
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import torch
from torch import nn
import torch.nn.functional as F
class ConvNet(nn.Module):
def __init__(self, input_shape=(1,3,28,28)):
super(ConvNet, self).__init__()
self.cnn = nn.Sequential(nn.Conv2d(in_channels=3, out_channels=128, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(2),
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(2))
cs = self._calculate_cnn_output_shape(input_shape=input_shape)
cnn_flattened_size = cs[1] * cs[2] * cs[3]
self.dense = nn.Sequential(nn.Linear(cnn_flattened_size, 128),
nn.ReLU(),
nn.Linear(128, 10))
def forward(self, x):
x = self.cnn(x)
x = x.view(x.shape[0], -1)
return F.log_softmax(self.dense(x), dim=-1)
def _calculate_cnn_output_shape(self, input_shape=(10, 3, 28, 28)):
data = torch.rand(input_shape)
return list(self.cnn(data).size())
if __name__ == "__main__":
net = ConvNet()
imgs = torch.rand(10, 3, 28, 28)
with torch.no_grad():
print(net(imgs))
|
[
"torch.nn.ReLU",
"torch.nn.Conv2d",
"torch.nn.Linear",
"torch.rand",
"torch.nn.MaxPool2d",
"torch.no_grad"
] |
[((1340, 1365), 'torch.rand', 'torch.rand', (['(10)', '(3)', '(28)', '(28)'], {}), '(10, 3, 28, 28)\n', (1350, 1365), False, 'import torch\n'), ((1187, 1210), 'torch.rand', 'torch.rand', (['input_shape'], {}), '(input_shape)\n', (1197, 1210), False, 'import torch\n'), ((1375, 1390), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1388, 1390), False, 'import torch\n'), ((296, 364), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(3)', 'out_channels': '(128)', 'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels=3, out_channels=128, kernel_size=3, padding=1)\n', (305, 364), False, 'from torch import nn\n'), ((399, 408), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (406, 408), False, 'from torch import nn\n'), ((443, 458), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (455, 458), False, 'from torch import nn\n'), ((493, 563), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(128)', 'out_channels': '(256)', 'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels=128, out_channels=256, kernel_size=3, padding=1)\n', (502, 563), False, 'from torch import nn\n'), ((598, 607), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (605, 607), False, 'from torch import nn\n'), ((642, 657), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (654, 657), False, 'from torch import nn\n'), ((816, 850), 'torch.nn.Linear', 'nn.Linear', (['cnn_flattened_size', '(128)'], {}), '(cnn_flattened_size, 128)\n', (825, 850), False, 'from torch import nn\n'), ((887, 896), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (894, 896), False, 'from torch import nn\n'), ((933, 951), 'torch.nn.Linear', 'nn.Linear', (['(128)', '(10)'], {}), '(128, 10)\n', (942, 951), False, 'from torch import nn\n')]
|
import gzip
import Data_reader
##REQUIRES filename is a tabix file, names is the header of the file
##MODIFIES nothing
##EFFECTS finds the position of the minimum pvalue
def find_min_pvals(filename, filetype, num_minimums, region_buffer):
#create a file reader from the file
file_reader = Data_reader.Data_reader.factory(filename, filetype)
#skip the header
file_reader.skip_header()
#create the minimums dictionary
minimums = create_baseline_minimums(num_minimums)
#find the highest of the minimums
highest_min, highest_min_index = find_highest_min(minimums, num_minimums)
#loops through the lines in the file
line = file_reader.get_line()
while line != '#Genomic' or line != '':
if line == '' or line.split()[0] == '#Genomic':
break
#if the pvalue is not available
if file_reader.get_pval() == 'NA':
line = file_reader.get_line()
continue
#if the pvalue is equal to the highest minimum, we do not add it to dictionary
elif float(file_reader.get_pval()) >= highest_min:
line = file_reader.get_line()
continue
#lastly, we must check other attributes of this pval if we want to add it to the dictionary
else:
#determine if this pvalue shares a region with another minimum
shares_region, shared_index = index_of_shared_region(minimums, num_minimums, long(file_reader.get_pos()), region_buffer)
#if it does share a region:
if shares_region:
#determine which is smaller, and place the smaller minimum in the list
if float(file_reader.get_pval()) < minimums['value'][shared_index]:
minimums = replace_minimum(minimums, long(file_reader.get_pos()), float(file_reader.get_pval()), int(file_reader.get_chrom()), shared_index)
highest_min, highest_min_index = find_highest_min(minimums, num_minimums)
else:
line = file_reader.get_line()
continue
#if it does not share a region, place replace the previous highest minimum with the new minimum
else:
minimums = replace_minimum(minimums, long(file_reader.get_pos()), float(file_reader.get_pval()), int(file_reader.get_chrom()), highest_min_index)
highest_min, highest_min_index = find_highest_min(minimums, num_minimums)
line = file_reader.get_line()
minimums = sort_minimums(minimums, num_minimums)
return minimums
##REQUIRES minimums has at least two minimums
##MODIFIES minimums
##EFFECTS sorts (decreasing order) the dictionary of minimums based on pvalue
def sort_minimums(minimums, num_minimums):
new_minimums = create_baseline_minimums(num_minimums)
index = 0
for min in minimums['value']:
best = find_min_of_mins(minimums)
new_minimums['position'][index] = minimums['position'][best]
new_minimums['value'][index] = minimums['value'][best]
new_minimums['chromosome'][index] = minimums['chromosome'][best]
minimums['value'][best] = 1
index += 1
return new_minimums
##REQUIRES minimums has at least 1 minimum
##MODIFIES minimums
##EFFECTS returns an updated dictionary of minimums
def replace_minimum(minimums, position, pvalue, chromosome, index):
minimums['position'][index] = position
minimums['value'][index] = pvalue
minimums['chromosome'][index] = chromosome
return minimums
##REQUIRES minimums has at least 1 minimum
##MODIFIES nothing
##EFFECTS returns a bool and a index, denoting that the current position is within a certain buffer region of another minimum
def index_of_shared_region(minimums, num_minimums, position, region_buffer):
for x in range(0, num_minimums):
position_diff = abs( position - minimums['position'][x] )
if position_diff < region_buffer:
return True, x
return False, -1
##REQUIRES minimums has a least one 'minimum' in it
##MODIFIES
##EFFECTS returns the highest minimum and the index it is stored at
def find_highest_min(minimums, num_minimums):
current_max = 0
for x in range(0, num_minimums):
if minimums['value'][x] > current_max:
current_max = minimums['value'][x]
current_position = x
return current_max, current_position
##REQUIRES num_minimums is > 0
##MODIFIES nothing
##EFFECTS creates a minimums dictionary, including position, value and chromosome
def create_baseline_minimums(num_minimums):
minimums = {'position' : [], 'value' : [], 'chromosome' : [] }
for x in range( 0 , num_minimums ):
minimums['position'].append(-1000000)
minimums['value'].append(1)
minimums['chromosome'].append(0)
return minimums
##REQUIRES minimums is a dictionary of minimums
##MODIFIES nothing
##EFFECTS finds the index of the minimum of the minimums
def find_min_of_mins(minimums):
current_min = 1
counter = 0
for min in minimums['value']:
if current_min > min:
current_min = min
current_position = counter
counter += 1
return current_position
##REQUIRES: minimums is a dictionary of minimums
##MODIFIES nothing
##EFFECTS creats a top hits list
def create_hits(minimums):
hits = []
##create the hits list for flask
for x in range(0, 10):
chr = minimums['chromosome'][x]
chr = str(chr)
pos = minimums['position'][x]
pos = str(pos)
hits.append([chr + ":" + pos, chr + ":" + pos])
return hits
##REQUIRES
##MODIFIES
##EFFECTS
def get_basic_region(filename, filetype):
#create a file reader from the file
file_reader = Data_reader.Data_reader.factory(filename, filetype)
#skip the header
file_reader.skip_header()
#get a line
line = file_reader.get_line()
chrom = file_reader.get_chrom()
position = file_reader.get_pos()
return str(chrom) + ":" + str(position) + "-" + str(int(position) + 200000)
|
[
"Data_reader.Data_reader.factory"
] |
[((298, 349), 'Data_reader.Data_reader.factory', 'Data_reader.Data_reader.factory', (['filename', 'filetype'], {}), '(filename, filetype)\n', (329, 349), False, 'import Data_reader\n'), ((5362, 5413), 'Data_reader.Data_reader.factory', 'Data_reader.Data_reader.factory', (['filename', 'filetype'], {}), '(filename, filetype)\n', (5393, 5413), False, 'import Data_reader\n')]
|
"""
Classes for running lm inference
"""
import os
import torch
from torch.utils.data import (DataLoader, RandomSampler, SequentialSampler,
TensorDataset)
from reflex.models.reflex import Reflex
from dataclasses import dataclass
from reflex.utils import load_file, to_list
from reflex.structs import Sample
from reflex.models.pmi_filter import WordEmbeddingsPMIFilter
from reflex.squad_utils import convert_examples_to_features, read_input_examples, RawResult, get_predictions
from reflex.metrics import calculate_relation_metrics
from tqdm import tqdm
class ReflexRunner:
def __init__(self,
model_dir,
model_name,
device,
relations_filepath,
data_directory,
batch_size,
must_choose_answer,
l,
we_model,
spacy_model,
k,
expand,
hyperparams=None):
self.context_filter = WordEmbeddingsPMIFilter(we_model, spacy_model, l)
self.model = Reflex(model_dir, model_name, device, k, self.context_filter.nlp)
self.relations_filepath = relations_filepath # path to relations file
self.data_directory = data_directory # data directory path
self.batch_size = batch_size
self.must_choose_answer = must_choose_answer # For datasets where there is always an answer, setting this to true will ensure that QA models that can return "answer doesn't exist" will always return a span in the context
self.hyperparams = hyperparams
self.expand = expand
self.override_expand = False
self.e_list = []
self.override_l = False
def update_l(self, l):
self.context_filter = WordEmbeddingsPMIFilter(self.context_filter.word_emb, self.context_filter.nlp, l)
def predict(self):
# Load relations file
relations = load_file(self.relations_filepath)
# Iterate through relations file and predict for each relation
aggregate_em = aggregate_f1 = 0
per_relation_metrics = {}
for relation in relations:
# Check for per relation tuned hyperparams
if self.hyperparams is not None:
l, expand, _ = self.hyperparams[relation['relation']]
if not self.override_l:
self.update_l(l)
if not self.override_expand:
self.expand = expand
data_file = os.path.join(self.data_directory, relation['relation']) + '.jsonl'
data = load_file(data_file)
# Adding to set filters any accidental duplicates
samples_set = set()
for d in data:
samples_set.add(Sample(d['subject'], d['context'], d['object'], None, relation['template']))
samples = list(samples_set)
init_len = len(samples)
final_len = 1
if self.must_choose_answer:
print('Must choose answer is True. Skipping filtering step')
else:
print('Starting filtering')
samples = self.context_filter.filter(samples)
final_len = len(samples)
print(f'Filtering finished. Filtered {init_len - final_len}.')
all_results = []
if final_len != 0:
print(f'Loaded relation {relation["relation"]}. There are {len(samples)} test samples')
print('Batching samples')
batches, samples = self.model.batch(samples, self.batch_size)
print('Starting inference')
for batch in tqdm(batches):
results = self.model.predict(batch, self.expand)
all_results.extend(results)
else:
print('All samples were filtered. Skipping inference.')
# Now we need to readd all the filtered samples
filtered_samples = [s for s in samples_set if s not in samples]
samples = list(samples)
samples.extend(filtered_samples)
# Predict empty string for every sample
filtered_predictions = [''] * len(filtered_samples)
all_results.extend(filtered_predictions)
relation_em, relation_f1, per_relation_metrics, _, relation_e_list = calculate_relation_metrics(samples, all_results, per_relation_metrics, relation, single_error_list=None, reflex_e_list=True)
self.e_list.extend(relation_e_list)
aggregate_em += relation_em
aggregate_f1 += relation_f1
aggregate_em /= len(relations)
aggregate_f1 /= len(relations)
return aggregate_em, aggregate_f1, per_relation_metrics
|
[
"tqdm.tqdm",
"reflex.models.pmi_filter.WordEmbeddingsPMIFilter",
"reflex.metrics.calculate_relation_metrics",
"reflex.models.reflex.Reflex",
"reflex.utils.load_file",
"reflex.structs.Sample",
"os.path.join"
] |
[((1043, 1092), 'reflex.models.pmi_filter.WordEmbeddingsPMIFilter', 'WordEmbeddingsPMIFilter', (['we_model', 'spacy_model', 'l'], {}), '(we_model, spacy_model, l)\n', (1066, 1092), False, 'from reflex.models.pmi_filter import WordEmbeddingsPMIFilter\n'), ((1114, 1179), 'reflex.models.reflex.Reflex', 'Reflex', (['model_dir', 'model_name', 'device', 'k', 'self.context_filter.nlp'], {}), '(model_dir, model_name, device, k, self.context_filter.nlp)\n', (1120, 1179), False, 'from reflex.models.reflex import Reflex\n'), ((1815, 1901), 'reflex.models.pmi_filter.WordEmbeddingsPMIFilter', 'WordEmbeddingsPMIFilter', (['self.context_filter.word_emb', 'self.context_filter.nlp', 'l'], {}), '(self.context_filter.word_emb, self.context_filter.\n nlp, l)\n', (1838, 1901), False, 'from reflex.models.pmi_filter import WordEmbeddingsPMIFilter\n'), ((1971, 2005), 'reflex.utils.load_file', 'load_file', (['self.relations_filepath'], {}), '(self.relations_filepath)\n', (1980, 2005), False, 'from reflex.utils import load_file, to_list\n'), ((2629, 2649), 'reflex.utils.load_file', 'load_file', (['data_file'], {}), '(data_file)\n', (2638, 2649), False, 'from reflex.utils import load_file, to_list\n'), ((4404, 4532), 'reflex.metrics.calculate_relation_metrics', 'calculate_relation_metrics', (['samples', 'all_results', 'per_relation_metrics', 'relation'], {'single_error_list': 'None', 'reflex_e_list': '(True)'}), '(samples, all_results, per_relation_metrics,\n relation, single_error_list=None, reflex_e_list=True)\n', (4430, 4532), False, 'from reflex.metrics import calculate_relation_metrics\n'), ((2543, 2598), 'os.path.join', 'os.path.join', (['self.data_directory', "relation['relation']"], {}), "(self.data_directory, relation['relation'])\n", (2555, 2598), False, 'import os\n'), ((3714, 3727), 'tqdm.tqdm', 'tqdm', (['batches'], {}), '(batches)\n', (3718, 3727), False, 'from tqdm import tqdm\n'), ((2803, 2878), 'reflex.structs.Sample', 'Sample', (["d['subject']", "d['context']", "d['object']", 'None', "relation['template']"], {}), "(d['subject'], d['context'], d['object'], None, relation['template'])\n", (2809, 2878), False, 'from reflex.structs import Sample\n')]
|
# testsqlite.py
import sqlite3
conn = sqlite3.connect('vocab.db')
c = conn.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS vocab (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
vocab text,
meaning text,
score int)""")
def insert_vocab(vocab,meaning):
ID = None
score = 0
with conn:
c.execute("""INSERT INTO vocab VALUES (?,?,?,?)""",
(ID,vocab,meaning,score))
conn.commit()
print('Data was inserted')
def view_vocab():
with conn:
c.execute("SELECT * FROM vocab")
allvocab = c.fetchall()
print(allvocab)
return allvocab
view_vocab()
#insert_vocab('Cat','แมว')
|
[
"sqlite3.connect"
] |
[((44, 71), 'sqlite3.connect', 'sqlite3.connect', (['"""vocab.db"""'], {}), "('vocab.db')\n", (59, 71), False, 'import sqlite3\n')]
|
from collections import deque
class TreeNode(object):
def __init__(self,item):
self.val=item
self.left=None
self.right=None
# self.height=None
# Codec
class DFS_Serialize:
"""
基于 DFS 的 二叉树 的序列化 和 反序列化
递归实现
ref:
https://blog.csdn.net/Shenpibaipao/article/details/108378093
https://zhuanlan.zhihu.com/p/164408048
"""
def __serialize_preorder(self, p):
"""
将 树 序列化为 前序序列
:param p:
:return:
"""
if p != None:
c = p.val
self.preorder_list.append(c)
self.__serialize_preorder(p.left)
self.__serialize_preorder(p.right)
else:
self.preorder_list.append('#') # 使用 '#' 表示空节点
def serialize(self, root):
"""
将 树 序列化为 前序序列
:type preorder: str
"""
if root is None:
return ''
self.preorder_list = []
self.__serialize_preorder(root)
preorder_list = [str(ele) for ele in self.preorder_list]
res = ','.join(preorder_list)
return res
def __preorder_deSerialize(self, preorder):
"""
递归方法
:param preorder:
:param prev: 父亲节点
:return:
"""
c = preorder.popleft()
if c != '#':
p = TreeNode(c)
p.left = self.__preorder_deSerialize(preorder)
p.right = self.__preorder_deSerialize(preorder)
return p
def deserialize(self, str1):
"""
由 先序序列 反序列化 出 BST 二叉搜索树 (递归)
:param preorder:
:return:
"""
if len(str1) == 0:
return None
preorder = str1.split(',')
preorder = [ele.strip() for ele in preorder]
preorder = deque(preorder)
root = self.__preorder_deSerialize(preorder)
return root
# Codec
class DFS_Serialize_Stack:
"""
基于 DFS 的 二叉树 的序列化 和 反序列化
非递归实现
"""
def serialize(self, root):
"""
将 二叉树 序列化为 前序序列
"""
if root is None:
return ''
preorder_list = []
p = root
stack = []
stack.append(p)
while len(stack) > 0:
current = stack.pop()
if current is not None: # 当前节点不是 空节点
preorder_list.append(current.val)
if current.right is not None:
stack.append(current.right)
else:
stack.append(None) # 空节点 入栈
if current.left is not None:
stack.append(current.left)
else:
stack.append(None)
else: # 当前节点 是空节点
preorder_list.append('#') # 使用 '#' 表示空节点
preorder_list = [str(ele) for ele in preorder_list] # leetcode 的树 的节点是 int
preorder_str = ','.join(preorder_list)
return preorder_str
def deserialize(self, preorder_str):
"""
由 先序序列 反序列化 出 二叉树 ( 非递归 )
ref: https://blog.csdn.net/cyuyanenen/article/details/51589945
:param preorder:
:return:
"""
if len(preorder_str) == 0:
return None
preorder = preorder_str.split(',')
preorder = [ele.strip() for ele in preorder]
i = 0
root = TreeNode(preorder[i])
stack = []
stack.append(root)
i += 1
flag = 1
"""
flag = 1 表示现在需要创建当前节点的左孩子,
flag = 2 表示需要创建右孩子,
flag = 3 则表示当前节点的左右孩子都已经创建完毕,需要执行出栈操作,直到出栈节点不是当前栈顶节点的右孩子为止。
"""
while i < len(preorder):
if flag == 1:
if preorder[i] == '#':
flag = 2
else:
child_left = TreeNode(preorder[i])
current = stack[-1]
current.left = child_left
stack.append(child_left)
flag = 1
elif flag == 2:
if preorder[i] == '#':
flag = 3
else:
child_right = TreeNode(preorder[i])
current = stack[-1]
current.right = child_right
stack.append(child_right)
flag = 1
elif flag == 3:
top_ele = stack.pop()
while len(stack) > 0 and stack[-1].right == top_ele:
top_ele = stack.pop()
i -= 1
flag = 2
i += 1
return root
# Codec
class BFS_Serialize:
"""
基于 BFS 的 二叉树 的序列化 和 反序列化
"""
def serialize(self, root):
"""
将 二叉树 序列化为 层次序列
"""
if root is None:
return ''
h_list = []
p = root
queue = deque()
queue.append(p)
while len(queue) > 0:
current = queue.popleft()
if current is not None: # 当前节点不是 空节点
h_list.append(current.val)
if current.left is not None:
queue.append(current.left)
else:
queue.append(None)
if current.right is not None:
queue.append(current.right)
else:
queue.append(None) # 空节点 入栈
else: # 当前节点 是空节点
h_list.append('#') # h_list 使用 '#' 表示空节点
h_list = [str(ele) for ele in h_list] # leetcode 的树 的节点是 int
h_str = ','.join(h_list) # ',' 作为 分隔符
return h_str
def deserialize(self, h_str):
"""
由 先序序列 反序列化 出 二叉树 ( 非递归 )
:param preorder:
:return:
"""
if len(h_str) == 0:
return None
h_list = h_str.split(',')
h_list = [ele.strip() for ele in h_list]
i = 0
root = TreeNode(h_list[i])
i += 1
queue = deque()
queue.append(root)
while i < len(h_list) and len(queue) > 0:
current = queue.popleft()
if h_list[i] != '#':
left_child = TreeNode(h_list[i])
current.left = left_child
queue.append(left_child)
i += 1
if h_list[i] != '#':
right_child = TreeNode(h_list[i])
current.right = right_child
queue.append(right_child)
i += 1
return root
class Solution1(object):
"""
二叉树的链式存储法 表达二叉树
"""
def buildTree(self, preorder,inorder):
"""
用树的前序和中序遍历的结果来构建树
:type preorder: ['a','b','c','e','d']
:type inorder: ['c','b','e','a','d']
:rtype: TreeNode
"""
self.preorder = deque(preorder)
self.inorder = deque(inorder)
return self._buildTree(0, len(inorder))
def _buildTree(self, start, end):
if start<end:
root_val=self.preorder.popleft()
print("root: ",root_val )
root=TreeNode(root_val)
index=self.inorder.index(root_val,start,end) # 在数组的位置范围: [start,end) 中寻找 root_val
root.left=self._buildTree(start,index)
root.right=self._buildTree(index+1,end)
return root
def pre_order(self,root):
if root is not None:
print(root.val)
self.pre_order(root.left)
self.pre_order(root.right)
return
def in_order_depreatured(self,root):
"""
非递归 实现树的中序遍历
:param root:
:return:
"""
stack=[root]
p=root
res=[]
while len(stack)!=0 :
while (p!=None) and (p.left!=None) and (p.val not in res): #访问过的节点不要再入栈
p = p.left
stack.append(p)
p=stack.pop()
res.append(p.val)
if p.right!=None:
p=p.right
stack.append(p)
return res
def in_order(self, root):
"""
非递归 实现树的中序遍历
:param root:
:return:
"""
stack = []
p = root
res = []
while p!=None or len(stack)!=0:
if p!=None: # p 不为空就入栈
stack.append(p)
p=p.left #指向左节点
else: # 如果p 为空就弹出
p=stack.pop() # 访问中间节点
res.append(p.val)
p=p.right # 指针指向右子树
return res
def _depth_recursion(self,root):
if root is None:
return 0
left_depth= self._depth_recursion(root.left)
right_depth=self._depth_recursion(root.right)
return max(left_depth,right_depth)+1
def _depth(self, root):
"""
改进层次遍历 ,把树的各个层都切分出来,并能输出树的高度
:type root: TreeNode
:rtype: int
"""
Queue = deque()
Queue.append(root)
depth = 0
while (len(Queue) != 0):
depth += 1
n = len(Queue)
for i in range(n): # Stratified according to depth
target = Queue.popleft()
print(target.val)
print('depth: ', depth)
if target.left != None:
Queue.append(target.left)
if target.right != None:
Queue.append(target.right)
return depth
class Solution2(object):
"""
基于数组的顺序存储法 表达二叉树
"""
def pre_order(self, tree_array):
"""
前序遍历 中->左->右
:param tree_array:
:return:
"""
stack=[]
i=1
node=[tree_array[i],i]
stack.append(node)
result=[]
while ( len(stack)!=0 ):
current=stack.pop()
# print(current)
result.append(current[0])
i=current[1]
if 2*i+1<len(tree_array) and tree_array[2*i+1]!=None: # tree_array 越界 访问检查 : 2*i+1<len(tree_array)
node=[tree_array[2*i+1],2*i+1]
stack.append(node)
if 2*i<len(tree_array) and tree_array[2*i]!=None:
node = [tree_array[2 * i ], 2 * i]
stack.append(node)
return result
def post_order(self, tree_array):
"""
前序遍历 :中->左->右
前序遍历反过来 :中->右->左
前序遍历反过来再逆序 : 左 -> 右 ->中 (后序遍历)
https://www.cnblogs.com/bjwu/p/9284534.html
:param tree_array:
:return:
"""
stack=[]
i=1
node=[tree_array[i],i]
stack.append(node)
result=[]
while ( len(stack)!=0 ):
current=stack.pop()
# print(current)
result.append(current[0])
i=current[1]
if 2*i<len(tree_array) and tree_array[2*i]!=None:
node = [tree_array[2 * i ], 2 * i]
stack.append(node)
if 2*i+1<len(tree_array) and tree_array[2*i+1]!=None: # tree_array 越界 访问检查 : 2*i+1<len(tree_array)
node=[tree_array[2*i+1],2*i+1]
stack.append(node)
return result[::-1] # 逆序输出即为 后序遍历
def in_order_deprecated(self, tree_array):
stack=[]
i=1
result=[]
while ( i < len(tree_array) and tree_array[i] != None) or (len(stack) != 0): # ( i < len(tree_array) and tree_array[i] != None) 等价于 p != None
while (i < len(tree_array) and tree_array[i] != None):
node = [tree_array[i], i]
stack.append(node)
i = 2 * i # 左子树全部进栈
if (len(stack) != 0) :
current = stack.pop() #
# print(current)
result.append(current[0])
i = current[1]
i= 2*i+1 #尝试去访问右子树
return result
def in_order(self, tree_array):
"""
好理解
:param tree_array:
:return:
"""
stack=[]
i=1
result=[]
while ( i < len(tree_array) and tree_array[i] != None) or (len(stack) != 0):
if (i < len(tree_array) and tree_array[i] != None):
node = [tree_array[i], i]
stack.append(node)
i = 2 * i # 左子树全部进栈
else:
current = stack.pop() #
# print(current)
result.append(current[0])
i = current[1]
i= 2*i+1 #尝试去访问右子树
return result
def hierarchy_order(self, tree_array):
"""
树的层次遍历 (广度优先遍历)
:param tree_array:
:return:
"""
fifo=deque()
i=1
node=[tree_array[i],i]
fifo.appendleft(node)
result=[]
while ( len(fifo)!=0 ):
current=fifo.pop()
# print(current)
result.append(current[0])
i=current[1]
if 2*i<len(tree_array) and tree_array[2*i]!=None: # 左边
node = [tree_array[2 * i ], 2 * i]
fifo.appendleft(node)
if 2*i+1<len(tree_array) and tree_array[2*i+1]!=None: # 右边
node=[tree_array[2*i+1],2*i+1]
fifo.appendleft(node)
return result
class Test:
def test_DFS(self):
sol = DFS_Serialize_Stack()
preorder = '1,2,#,#,3,4,#,5,#,#,#'
tree = sol.deserialize(preorder)
print(sol.serialize(tree))
assert sol.serialize(tree) == preorder
preorder = ''
tree = sol.deserialize(preorder)
assert sol.serialize(tree) == preorder
preorder = '9,3,4,#,5,#,#,1,#,#,#'
tree = sol.deserialize(preorder)
print(sol.serialize(tree))
assert sol.serialize(tree) == preorder
def test_BFS(self):
sol = BFS_Serialize()
preorder = '8,6,10,5,7,9,11,#,#,#,#,#,#,#,#'
tree = sol.deserialize(preorder)
print(sol.serialize(tree))
assert sol.serialize(tree) == preorder
preorder = ''
tree = sol.deserialize(preorder)
assert sol.serialize(tree) == preorder
preorder = '8,6,10,#,#,9,11,#,#,#,#'
tree = sol.deserialize(preorder)
print(sol.serialize(tree))
assert sol.serialize(tree) == preorder
def test_solution1(self):
# solution1
preorder=['a','b','c','e','d']
inorder= ['c','b','e','a','d']
preorder=['A','B','D','F','G','C','E','H']
inorder=['F','D','G','B','A','E','H','C']
postorder= ['F','G','D','B','H','E','C','A']
solution=Solution1()
root=solution.buildTree(preorder,inorder)
solution.pre_order(root)
print(solution.in_order(root))
print(solution._depth(root))
print(solution._depth_recursion(root))
def test_solution2(self):
tree_array=[None,'A','B','C','D',None,'E',None,'F','G',None,None,None,'H']
solution2 = Solution2()
print('preorder: ',solution2.pre_order(tree_array))
print('inorder: ',solution2.in_order(tree_array))
print('postorder: ', solution2.post_order(tree_array))
print('hierarchy_order: ', solution2.hierarchy_order(tree_array))
if __name__ == "__main__":
t = Test()
t.test_BFS()
|
[
"collections.deque"
] |
[((1795, 1810), 'collections.deque', 'deque', (['preorder'], {}), '(preorder)\n', (1800, 1810), False, 'from collections import deque\n'), ((4838, 4845), 'collections.deque', 'deque', ([], {}), '()\n', (4843, 4845), False, 'from collections import deque\n'), ((5941, 5948), 'collections.deque', 'deque', ([], {}), '()\n', (5946, 5948), False, 'from collections import deque\n'), ((6766, 6781), 'collections.deque', 'deque', (['preorder'], {}), '(preorder)\n', (6771, 6781), False, 'from collections import deque\n'), ((6805, 6819), 'collections.deque', 'deque', (['inorder'], {}), '(inorder)\n', (6810, 6819), False, 'from collections import deque\n'), ((8836, 8843), 'collections.deque', 'deque', ([], {}), '()\n', (8841, 8843), False, 'from collections import deque\n'), ((12553, 12560), 'collections.deque', 'deque', ([], {}), '()\n', (12558, 12560), False, 'from collections import deque\n')]
|
import pytest
@pytest.fixture(scope='session')
def session_capabilities(session_capabilities):
session_capabilities['loggingPrefs'] = {'browser': 'ALL'}
session_capabilities['goog:loggingPrefs'] = {'browser': 'ALL'}
return session_capabilities
@pytest.fixture
def chrome_options(chrome_options):
chrome_options.headless = True
return chrome_options
|
[
"pytest.fixture"
] |
[((17, 48), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (31, 48), False, 'import pytest\n')]
|
from rlberry.agents import A2CAgent
from rlberry.envs.classic_control import MountainCar
from rlberry.envs.benchmarks.ball_exploration import PBall2D
from rlberry.seeding import seeding
render = True
seeding.set_global_seed(1223)
for env, n_episodes, horizon in zip([PBall2D(), MountainCar()],
[400, 40000], [256, 512]):
print("Running A2C on %s" % env.name)
agent = A2CAgent(env, n_episodes=n_episodes, horizon=horizon,
gamma=0.99, learning_rate=0.001, k_epochs=4)
agent.fit()
if render:
env.enable_rendering()
state = env.reset()
for tt in range(200):
action = agent.policy(state)
next_state, reward, done, _ = env.step(action)
state = next_state
env.render()
|
[
"rlberry.agents.A2CAgent",
"rlberry.seeding.seeding.set_global_seed",
"rlberry.envs.benchmarks.ball_exploration.PBall2D",
"rlberry.envs.classic_control.MountainCar"
] |
[((201, 230), 'rlberry.seeding.seeding.set_global_seed', 'seeding.set_global_seed', (['(1223)'], {}), '(1223)\n', (224, 230), False, 'from rlberry.seeding import seeding\n'), ((413, 515), 'rlberry.agents.A2CAgent', 'A2CAgent', (['env'], {'n_episodes': 'n_episodes', 'horizon': 'horizon', 'gamma': '(0.99)', 'learning_rate': '(0.001)', 'k_epochs': '(4)'}), '(env, n_episodes=n_episodes, horizon=horizon, gamma=0.99,\n learning_rate=0.001, k_epochs=4)\n', (421, 515), False, 'from rlberry.agents import A2CAgent\n'), ((269, 278), 'rlberry.envs.benchmarks.ball_exploration.PBall2D', 'PBall2D', ([], {}), '()\n', (276, 278), False, 'from rlberry.envs.benchmarks.ball_exploration import PBall2D\n'), ((280, 293), 'rlberry.envs.classic_control.MountainCar', 'MountainCar', ([], {}), '()\n', (291, 293), False, 'from rlberry.envs.classic_control import MountainCar\n')]
|
from collections import deque
rows, cols = [int(_) for _ in input().split()]
string = input()
matrix = [[0 for j in range(cols)] for i in range(rows)]
# matrix = []
#
# for i in range(rows):
# matrix.append([])
# for j in range(cols):
# matrix[i].append(0)
key_word = deque(string)
count = 0
for i in range(rows):
for j in range(cols):
if not key_word:
key_word = deque(string)
matrix[i][j] = key_word.popleft()
if count % 2 != 0:
matrix[i] = matrix[i][::-1]
count += 1
print('\n'.join([''.join(map(str, m)) for m in matrix]))
|
[
"collections.deque"
] |
[((287, 300), 'collections.deque', 'deque', (['string'], {}), '(string)\n', (292, 300), False, 'from collections import deque\n'), ((407, 420), 'collections.deque', 'deque', (['string'], {}), '(string)\n', (412, 420), False, 'from collections import deque\n')]
|
import os
import sys
import django
PROJDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, PROJDIR)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "foodbank.settings")
django.setup()
|
[
"os.path.abspath",
"django.setup",
"os.environ.setdefault",
"sys.path.insert"
] |
[((107, 134), 'sys.path.insert', 'sys.path.insert', (['(0)', 'PROJDIR'], {}), '(0, PROJDIR)\n', (122, 134), False, 'import sys\n'), ((135, 203), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""foodbank.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'foodbank.settings')\n", (156, 203), False, 'import os\n'), ((205, 219), 'django.setup', 'django.setup', ([], {}), '()\n', (217, 219), False, 'import django\n'), ((79, 104), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (94, 104), False, 'import os\n')]
|
import traceback
import pytest
from plenum.test.testing_utils import setupTestLogging
setupTestLogging()
def run(test, stopOnFail=True, maxTimes=None):
count = 0
passes = 0
fails = 0
while maxTimes is None or count < maxTimes:
exitcode = pytest.main(test)
count += 1
if exitcode:
fails += 1
print("Test failed!")
traceback.print_exc()
if stopOnFail:
break
else:
passes += 1
print("Test passed.")
print("current stats: successes: {} fails: {}".format(passes, fails))
run("monitoring/test_instance_change_with_Delta.py",
stopOnFail=False, maxTimes=100)
|
[
"plenum.test.testing_utils.setupTestLogging",
"traceback.print_exc",
"pytest.main"
] |
[((89, 107), 'plenum.test.testing_utils.setupTestLogging', 'setupTestLogging', ([], {}), '()\n', (105, 107), False, 'from plenum.test.testing_utils import setupTestLogging\n'), ((267, 284), 'pytest.main', 'pytest.main', (['test'], {}), '(test)\n', (278, 284), False, 'import pytest\n'), ((394, 415), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (413, 415), False, 'import traceback\n')]
|
from dataclasses import dataclass
from datasets import load_dataset
from torch.utils.data import DataLoader
from transformers import PreTrainedTokenizerBase
from bsmetadata.input_pipeline import DataConfig
@dataclass
class DataCollatorForCLM:
tokenizer: PreTrainedTokenizerBase
pad_to_multiple_of: int = 16
def __call__(self, batch):
batch = self.tokenizer(
[x["text"] for x in batch],
truncation=True,
padding="max_length",
max_length=512, # TODO: make this configurable
return_tensors="pt",
pad_to_multiple_of=self.pad_to_multiple_of,
)
labels = batch["input_ids"].clone()
# force an error in no pad_token
# if self.tokenizer.pad_token_id is not None:
labels[labels == self.tokenizer.pad_token_id] = -100
batch["labels"] = labels
return batch
def get_dataloaders(tokenizer, cfg: DataConfig):
datasets = load_dataset("wikitext", "wikitext-2-raw-v1")
data_collator = DataCollatorForCLM(tokenizer)
train_dataloader = DataLoader(
datasets["train"],
shuffle=True,
collate_fn=data_collator,
batch_size=cfg.per_device_train_batch_size,
num_workers=1,
)
eval_dataloader = DataLoader(
datasets["validation"],
collate_fn=data_collator,
batch_size=cfg.per_device_eval_batch_size,
num_workers=1,
)
return train_dataloader, {"val": eval_dataloader}
|
[
"datasets.load_dataset",
"torch.utils.data.DataLoader"
] |
[((965, 1010), 'datasets.load_dataset', 'load_dataset', (['"""wikitext"""', '"""wikitext-2-raw-v1"""'], {}), "('wikitext', 'wikitext-2-raw-v1')\n", (977, 1010), False, 'from datasets import load_dataset\n'), ((1084, 1216), 'torch.utils.data.DataLoader', 'DataLoader', (["datasets['train']"], {'shuffle': '(True)', 'collate_fn': 'data_collator', 'batch_size': 'cfg.per_device_train_batch_size', 'num_workers': '(1)'}), "(datasets['train'], shuffle=True, collate_fn=data_collator,\n batch_size=cfg.per_device_train_batch_size, num_workers=1)\n", (1094, 1216), False, 'from torch.utils.data import DataLoader\n'), ((1282, 1405), 'torch.utils.data.DataLoader', 'DataLoader', (["datasets['validation']"], {'collate_fn': 'data_collator', 'batch_size': 'cfg.per_device_eval_batch_size', 'num_workers': '(1)'}), "(datasets['validation'], collate_fn=data_collator, batch_size=cfg\n .per_device_eval_batch_size, num_workers=1)\n", (1292, 1405), False, 'from torch.utils.data import DataLoader\n')]
|
import argparse
from tqdm import tqdm
from spacy.tokens import DocBin
import spacy_stanza
nlp = spacy_stanza.load_pipeline("en", use_gpu=True)
def swap_one(document, summary):
try:
source_doc = nlp(document)
except RecursionError:
source_doc = nlp(document[:2000])
summary_doc = nlp(summary)
return source_doc, summary_doc
def main():
parser = argparse.ArgumentParser()
parser.add_argument('document')
parser.add_argument('summary')
parser.add_argument('out_prefix')
args = parser.parse_args()
with open(args.document) as f:
documents = [line.strip() for line in f]
with open(args.summary) as f:
summaries = [line.strip() for line in f]
source_doc_bin = DocBin(['LEMMA', 'POS', 'DEP', 'ENT_IOB', 'ENT_TYPE', 'IS_STOP', 'HEAD'])
summary_doc_bin = DocBin(['LEMMA', 'POS', 'DEP', 'ENT_IOB', 'ENT_TYPE', 'IS_STOP', 'HEAD'])
for document, summary in tqdm(zip(documents, summaries)):
source_doc, summary_doc = swap_one(document, summary)
source_doc_bin.add(source_doc)
summary_doc_bin.add(summary_doc)
with open(args.out_prefix + '.source', 'wb') as f:
f.write(source_doc_bin.to_bytes())
with open(args.out_prefix + '.target', 'wb') as f:
f.write(summary_doc_bin.to_bytes())
if __name__ == '__main__':
main()
|
[
"spacy_stanza.load_pipeline",
"argparse.ArgumentParser",
"spacy.tokens.DocBin"
] |
[((98, 144), 'spacy_stanza.load_pipeline', 'spacy_stanza.load_pipeline', (['"""en"""'], {'use_gpu': '(True)'}), "('en', use_gpu=True)\n", (124, 144), False, 'import spacy_stanza\n'), ((388, 413), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (411, 413), False, 'import argparse\n'), ((745, 818), 'spacy.tokens.DocBin', 'DocBin', (["['LEMMA', 'POS', 'DEP', 'ENT_IOB', 'ENT_TYPE', 'IS_STOP', 'HEAD']"], {}), "(['LEMMA', 'POS', 'DEP', 'ENT_IOB', 'ENT_TYPE', 'IS_STOP', 'HEAD'])\n", (751, 818), False, 'from spacy.tokens import DocBin\n'), ((841, 914), 'spacy.tokens.DocBin', 'DocBin', (["['LEMMA', 'POS', 'DEP', 'ENT_IOB', 'ENT_TYPE', 'IS_STOP', 'HEAD']"], {}), "(['LEMMA', 'POS', 'DEP', 'ENT_IOB', 'ENT_TYPE', 'IS_STOP', 'HEAD'])\n", (847, 914), False, 'from spacy.tokens import DocBin\n')]
|
from __future__ import absolute_import, division, print_function
from functools import partial
from ...external.qt import QtGui
from ...external.qt.QtCore import Qt
from ...core import message as msg
from ...clients.histogram_client import HistogramClient
from ..widget_properties import (connect_int_spin, ButtonProperty,
FloatLineProperty,
ValueProperty)
from ..glue_toolbar import GlueToolbar
from ..mouse_mode import HRangeMode
from .data_viewer import DataViewer
from .mpl_widget import MplWidget, defer_draw
from ..qtutil import pretty_number, load_ui
__all__ = ['HistogramWidget']
WARN_SLOW = 10000000
def _hash(x):
return str(id(x))
class HistogramWidget(DataViewer):
LABEL = "Histogram"
_property_set = DataViewer._property_set + \
'component xlog ylog normed cumulative autoscale xmin xmax nbins'.split(
)
xmin = FloatLineProperty('ui.xmin', 'Minimum value')
xmax = FloatLineProperty('ui.xmax', 'Maximum value')
normed = ButtonProperty('ui.normalized_box', 'Normalized?')
autoscale = ButtonProperty('ui.autoscale_box',
'Autoscale view to histogram?')
cumulative = ButtonProperty('ui.cumulative_box', 'Cumulative?')
nbins = ValueProperty('ui.binSpinBox', 'Number of bins')
xlog = ButtonProperty('ui.xlog_box', 'Log-scale the x axis?')
ylog = ButtonProperty('ui.ylog_box', 'Log-scale the y axis?')
def __init__(self, session, parent=None):
super(HistogramWidget, self).__init__(session, parent)
self.central_widget = MplWidget()
self.setCentralWidget(self.central_widget)
self.option_widget = QtGui.QWidget()
self.ui = load_ui('histogramwidget', self.option_widget)
self._tweak_geometry()
self.client = HistogramClient(self._data,
self.central_widget.canvas.fig,
artist_container=self._container)
self._init_limits()
self.make_toolbar()
self._connect()
# maps _hash(componentID) -> componentID
self._component_hashes = {}
@staticmethod
def _get_default_tools():
return []
def _init_limits(self):
validator = QtGui.QDoubleValidator(None)
validator.setDecimals(7)
self.ui.xmin.setValidator(validator)
self.ui.xmax.setValidator(validator)
lo, hi = self.client.xlimits
self.ui.xmin.setText(str(lo))
self.ui.xmax.setText(str(hi))
def _tweak_geometry(self):
self.central_widget.resize(600, 400)
self.resize(self.central_widget.size())
def _connect(self):
ui = self.ui
cl = self.client
ui.attributeCombo.currentIndexChanged.connect(
self._set_attribute_from_combo)
ui.attributeCombo.currentIndexChanged.connect(
self._update_minmax_labels)
connect_int_spin(cl, 'nbins', ui.binSpinBox)
ui.normalized_box.toggled.connect(partial(setattr, cl, 'normed'))
ui.autoscale_box.toggled.connect(partial(setattr, cl, 'autoscale'))
ui.cumulative_box.toggled.connect(partial(setattr, cl, 'cumulative'))
ui.xlog_box.toggled.connect(partial(setattr, cl, 'xlog'))
ui.ylog_box.toggled.connect(partial(setattr, cl, 'ylog'))
ui.xmin.editingFinished.connect(self._set_limits)
ui.xmax.editingFinished.connect(self._set_limits)
@defer_draw
def _set_limits(self):
lo = float(self.ui.xmin.text())
hi = float(self.ui.xmax.text())
self.client.xlimits = lo, hi
def _update_minmax_labels(self):
lo, hi = pretty_number(self.client.xlimits)
self.ui.xmin.setText(lo)
self.ui.xmax.setText(hi)
def make_toolbar(self):
result = GlueToolbar(self.central_widget.canvas, self,
name='Histogram')
for mode in self._mouse_modes():
result.add_mode(mode)
self.addToolBar(result)
return result
def _mouse_modes(self):
axes = self.client.axes
def apply_mode(mode):
return self.apply_roi(mode.roi())
rect = HRangeMode(axes, roi_callback=apply_mode)
return [rect]
@defer_draw
def _update_attributes(self):
"""Repopulate the combo box that selects the quantity to plot"""
combo = self.ui.attributeCombo
component = self.component
new = self.client.component or component
combo.blockSignals(True)
combo.clear()
# implementation note:
# PySide doesn't robustly store python objects with setData
# use _hash(x) instead
model = QtGui.QStandardItemModel()
data_ids = set(_hash(d) for d in self._data)
self._component_hashes = dict((_hash(c), c) for d in self._data
for c in d.components)
found = False
for d in self._data:
if d not in self._container:
continue
item = QtGui.QStandardItem(d.label)
item.setData(_hash(d), role=Qt.UserRole)
assert item.data(Qt.UserRole) == _hash(d)
item.setFlags(item.flags() & ~Qt.ItemIsEnabled)
model.appendRow(item)
for c in d.visible_components:
if not d.get_component(c).numeric:
continue
if c is new:
found = True
item = QtGui.QStandardItem(c.label)
item.setData(_hash(c), role=Qt.UserRole)
model.appendRow(item)
combo.setModel(model)
# separators below data items
for i in range(combo.count()):
if combo.itemData(i) in data_ids:
combo.insertSeparator(i + 1)
combo.blockSignals(False)
if found:
self.component = new
else:
combo.setCurrentIndex(2) # skip first data + separator
self._set_attribute_from_combo()
@property
def component(self):
combo = self.ui.attributeCombo
index = combo.currentIndex()
return self._component_hashes.get(combo.itemData(index), None)
@component.setter
def component(self, component):
combo = self.ui.attributeCombo
if combo.count() == 0: # cold start problem, when restoring
self._update_attributes()
# combo.findData doesn't seem to work robustly
for i in range(combo.count()):
data = combo.itemData(i)
if data == _hash(component):
combo.setCurrentIndex(i)
return
raise IndexError("Component not present: %s" % component)
@defer_draw
def _set_attribute_from_combo(self, *args):
self.client.set_component(self.component)
self.update_window_title()
@defer_draw
def add_data(self, data):
""" Add data item to combo box.
If first addition, also update attributes """
if self.data_present(data):
return True
if data.size > WARN_SLOW and not self._confirm_large_data(data):
return False
self.client.add_layer(data)
self._update_attributes()
self._update_minmax_labels()
return True
def add_subset(self, subset):
pass
def _remove_data(self, data):
""" Remove data item from the combo box """
pass
def data_present(self, data):
return data in self._container
def register_to_hub(self, hub):
super(HistogramWidget, self).register_to_hub(hub)
self.client.register_to_hub(hub)
hub.subscribe(self,
msg.DataCollectionDeleteMessage,
handler=lambda x: self._remove_data(x.data))
hub.subscribe(self,
msg.DataUpdateMessage,
handler=lambda *args: self._update_labels())
hub.subscribe(self,
msg.ComponentsChangedMessage,
handler=lambda x: self._update_attributes())
def unregister(self, hub):
super(HistogramWidget, self).unregister(hub)
self.client.unregister(hub)
hub.unsubscribe_all(self)
@property
def window_title(self):
c = self.client.component
if c is not None:
label = str(c.label)
else:
label = 'Histogram'
return label
def _update_labels(self):
self.update_window_title()
self._update_attributes()
def __str__(self):
return "Histogram Widget"
def options_widget(self):
return self.option_widget
|
[
"functools.partial"
] |
[((3042, 3072), 'functools.partial', 'partial', (['setattr', 'cl', '"""normed"""'], {}), "(setattr, cl, 'normed')\n", (3049, 3072), False, 'from functools import partial\n'), ((3115, 3148), 'functools.partial', 'partial', (['setattr', 'cl', '"""autoscale"""'], {}), "(setattr, cl, 'autoscale')\n", (3122, 3148), False, 'from functools import partial\n'), ((3192, 3226), 'functools.partial', 'partial', (['setattr', 'cl', '"""cumulative"""'], {}), "(setattr, cl, 'cumulative')\n", (3199, 3226), False, 'from functools import partial\n'), ((3264, 3292), 'functools.partial', 'partial', (['setattr', 'cl', '"""xlog"""'], {}), "(setattr, cl, 'xlog')\n", (3271, 3292), False, 'from functools import partial\n'), ((3330, 3358), 'functools.partial', 'partial', (['setattr', 'cl', '"""ylog"""'], {}), "(setattr, cl, 'ylog')\n", (3337, 3358), False, 'from functools import partial\n')]
|
from unittest import TestCase
from rec_to_nwb.processing.tools.file_sorter import FileSorter
class TestFilenameSorter(TestCase):
def setUp(self):
self.strings_to_sort = ['name01', 'name11', 'name10', 'name02', 'name21']
self.correct_order_of_strings = ['name01', 'name02', 'name10', 'name11', 'name21']
self.filenames_to_sort = ['20190718_beans_01_s1.nt0' + str(i) + '.mda' for i in range(1, 10)]
self.filenames_to_sort.extend(['20190718_beans_01_s1.nt' + str(i) + '.mda' for i in range(10, 64)])
self.file_sorter = FileSorter()
self.sorted_strings = self.file_sorter.sort_filenames(self.strings_to_sort)
self.sorted_filenames = self.file_sorter.sort_filenames(self.filenames_to_sort)
def test_string_sorting(self):
self.assertEqual(self.sorted_strings, self.correct_order_of_strings)
def test_filename_sorting(self):
self.assertEqual(self.sorted_filenames[1], '20190718_beans_01_s1.nt02.mda')
self.assertEqual(self.sorted_filenames[9], '20190718_beans_01_s1.nt10.mda')
self.assertEqual(self.sorted_filenames[18], '20190718_beans_01_s1.nt19.mda')
self.assertEqual(self.sorted_filenames[19], '20190718_beans_01_s1.nt20.mda')
|
[
"rec_to_nwb.processing.tools.file_sorter.FileSorter"
] |
[((563, 575), 'rec_to_nwb.processing.tools.file_sorter.FileSorter', 'FileSorter', ([], {}), '()\n', (573, 575), False, 'from rec_to_nwb.processing.tools.file_sorter import FileSorter\n')]
|
import sqlite3
import io
import json
import os
import shutil
import nska_deserialize as nd
import scripts.artifacts.artGlobals
from packaging import version
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly
def get_imoHD_Chat(files_found, report_folder, seeker):
for file_found in files_found:
file_found = str(file_found)
if file_found.endswith('.sqlite'):
break
db = open_sqlite_db_readonly(file_found)
cursor = db.cursor()
cursor.execute('''
select
case ZIMOCHATMSG.ZTS
when 0 then ''
else datetime(ZTS/1000000000,'unixepoch')
end as "Timestamp",
ZIMOCONTACT.ZDISPLAY as "Sender Display Name",
ZIMOCHATMSG.ZALIAS as "Sender Alias",
ZIMOCONTACT.ZDIGIT_PHONE,
ZIMOCHATMSG.ZTEXT as "Message",
case ZIMOCHATMSG.ZISSENT
when 0 then 'Received'
when 1 then 'Sent'
end as "Message Status",
ZIMOCHATMSG.ZIMDATA
from ZIMOCHATMSG
left join ZIMOCONTACT ON ZIMOCONTACT.ZBUID = ZIMOCHATMSG.ZA_UID
''')
all_rows = cursor.fetchall()
usageentries = len(all_rows)
data_list = []
if usageentries > 0:
for row in all_rows:
plist = ''
timestamp = row[0]
senderName = row[1]
senderAlias = row[2]
senderPhone = row[3]
message = row[4]
messageStatus = row[5]
itemAction = ''
attachmentURL = ''
thumb = ''
plist_file_object = io.BytesIO(row[6])
if row[6] is None:
pass
else:
if row[6].find(b'NSKeyedArchiver') == -1:
if sys.version_info >= (3, 9):
plist = plistlib.load(plist_file_object)
else:
plist = biplist.readPlist(plist_file_object)
else:
try:
plist = nd.deserialize_plist(plist_file_object)
except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException,
nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex:
logfunc(f'Failed to read plist for {row[0]}, error was:' + str(ex))
itemAction = plist['type']
#Check for Attachments
if plist.get('objects') is not None:
attachmentName = plist['objects'][0]['object_id']
attachmentURL = "https://cdn.imoim.us/s/object/" + attachmentName + "/"
for match in files_found:
if attachmentName in match:
shutil.copy2(match, report_folder)
data_file_name = os.path.basename(match)
thumb = f'<img src="{report_folder}/{data_file_name}"></img>'
else:
attachmentURL = ''
data_list.append((timestamp, senderName, senderAlias, senderPhone, message, messageStatus, itemAction, attachmentURL, thumb))
description = 'IMO HD Chat - Messages'
report = ArtifactHtmlReport('IMO HD Chat - Messages')
report.start_artifact_report(report_folder, 'IMO HD Chat - Messages')
report.add_script()
data_headers = (
'Timestamp', 'Sender Name', 'Sender Alias', 'Sender Phone', 'Message', 'Message Status', 'Item Action',
'Attachment URL', 'Attachment') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Attachment'])
report.end_artifact_report()
tsvname = f'IMO HD Chat - Messages'
tsv(report_folder, data_headers, data_list, tsvname)
tlactivity = f'IMO HD Chat - Messages'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc('IMO HD Chat - Messages data available')
cursor.execute('''
select
ZPH_NAME,
ZALIAS,
ZPHONE,
"https://cdn.imoim.us/s/object/" || ZICON_ID || "/" as "Profile Pic",
ZBUID
from ZIMOCONTACT
''')
all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
description = 'IMO HD Chat - Contacts'
report = ArtifactHtmlReport('IMO HD Chat - Contacts')
report.start_artifact_report(report_folder, 'IMO HD Chat - Contacts')
report.add_script()
data_headers = ('Contact Name','Contact Alias','Contact Phone','Profile Pic URL','User ID') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
data_list = []
for row in all_rows:
data_list.append((row[0],row[1],row[2],row[3],row[4]))
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
tsvname = f'IMO HD Chat - Contacts'
tsv(report_folder, data_headers, data_list, tsvname)
tlactivity = f'IMO HD Chat - Contacts'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc('IMO HD Chat - Contacts data available')
db.close()
|
[
"scripts.ilapfuncs.open_sqlite_db_readonly",
"io.BytesIO",
"os.path.basename",
"scripts.ilapfuncs.tsv",
"scripts.ilapfuncs.timeline",
"shutil.copy2",
"scripts.ilapfuncs.logfunc",
"nska_deserialize.deserialize_plist",
"scripts.artifact_report.ArtifactHtmlReport"
] |
[((576, 611), 'scripts.ilapfuncs.open_sqlite_db_readonly', 'open_sqlite_db_readonly', (['file_found'], {}), '(file_found)\n', (599, 611), False, 'from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly\n'), ((3587, 3631), 'scripts.artifact_report.ArtifactHtmlReport', 'ArtifactHtmlReport', (['"""IMO HD Chat - Messages"""'], {}), "('IMO HD Chat - Messages')\n", (3605, 3631), False, 'from scripts.artifact_report import ArtifactHtmlReport\n'), ((4242, 4294), 'scripts.ilapfuncs.tsv', 'tsv', (['report_folder', 'data_headers', 'data_list', 'tsvname'], {}), '(report_folder, data_headers, data_list, tsvname)\n', (4245, 4294), False, 'from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly\n'), ((4362, 4422), 'scripts.ilapfuncs.timeline', 'timeline', (['report_folder', 'tlactivity', 'data_list', 'data_headers'], {}), '(report_folder, tlactivity, data_list, data_headers)\n', (4370, 4422), False, 'from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly\n'), ((4453, 4501), 'scripts.ilapfuncs.logfunc', 'logfunc', (['"""IMO HD Chat - Messages data available"""'], {}), "('IMO HD Chat - Messages data available')\n", (4460, 4501), False, 'from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly\n'), ((4873, 4917), 'scripts.artifact_report.ArtifactHtmlReport', 'ArtifactHtmlReport', (['"""IMO HD Chat - Contacts"""'], {}), "('IMO HD Chat - Contacts')\n", (4891, 4917), False, 'from scripts.artifact_report import ArtifactHtmlReport\n'), ((5523, 5575), 'scripts.ilapfuncs.tsv', 'tsv', (['report_folder', 'data_headers', 'data_list', 'tsvname'], {}), '(report_folder, data_headers, data_list, tsvname)\n', (5526, 5575), False, 'from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly\n'), ((5643, 5703), 'scripts.ilapfuncs.timeline', 'timeline', (['report_folder', 'tlactivity', 'data_list', 'data_headers'], {}), '(report_folder, tlactivity, data_list, data_headers)\n', (5651, 5703), False, 'from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly\n'), ((5724, 5772), 'scripts.ilapfuncs.logfunc', 'logfunc', (['"""IMO HD Chat - Contacts data available"""'], {}), "('IMO HD Chat - Contacts data available')\n", (5731, 5772), False, 'from scripts.ilapfuncs import logfunc, logdevinfo, timeline, kmlgen, tsv, is_platform_windows, open_sqlite_db_readonly\n'), ((1735, 1753), 'io.BytesIO', 'io.BytesIO', (['row[6]'], {}), '(row[6])\n', (1745, 1753), False, 'import io\n'), ((2183, 2222), 'nska_deserialize.deserialize_plist', 'nd.deserialize_plist', (['plist_file_object'], {}), '(plist_file_object)\n', (2203, 2222), True, 'import nska_deserialize as nd\n'), ((3069, 3103), 'shutil.copy2', 'shutil.copy2', (['match', 'report_folder'], {}), '(match, report_folder)\n', (3081, 3103), False, 'import shutil\n'), ((3150, 3173), 'os.path.basename', 'os.path.basename', (['match'], {}), '(match)\n', (3166, 3173), False, 'import os\n')]
|
from concurrent import futures
from functools import wraps
from typing import Callable, Optional
from executor_exporter.exporter import ExecutorExporter
from executor_exporter.proxy import InstrumentedExecutorProxy
class ThreadPoolExecutor(InstrumentedExecutorProxy, futures.ThreadPoolExecutor):
def __init__(
self,
max_workers=None,
thread_name_prefix="",
initializer=None,
initargs=(),
executor_id: Optional[str] = None,
) -> None:
exporter = ExecutorExporter(futures.ThreadPoolExecutor, executor_id)
initializer = _wrap_initializer(initializer, exporter)
executor = futures.ThreadPoolExecutor(
max_workers, thread_name_prefix, initializer, initargs
)
super().__init__(
executor,
exporter,
executor._max_workers, # type: ignore # should be public
)
class ProcessPoolExecutor(InstrumentedExecutorProxy, futures.ProcessPoolExecutor):
def __init__(
self,
max_workers=None,
mp_context=None,
initializer=None,
initargs=(),
executor_id: Optional[str] = None,
) -> None:
exporter = ExecutorExporter(futures.ProcessPoolExecutor, executor_id)
initializer = _wrap_initializer(initializer, exporter)
executor = futures.ProcessPoolExecutor(
max_workers, mp_context, initializer, initargs
)
super().__init__(
executor,
exporter,
executor._max_workers, # type: ignore # should be public
)
def _wrap_initializer(initializer: Callable, exporter: ExecutorExporter):
@wraps(initializer)
def wrapper(*args, **kwargs):
exporter.inc_initialized_workers()
if initializer is not None and not callable(initializer):
initializer(*args, **kwargs)
return wrapper
|
[
"concurrent.futures.ThreadPoolExecutor",
"concurrent.futures.ProcessPoolExecutor",
"executor_exporter.exporter.ExecutorExporter",
"functools.wraps"
] |
[((1669, 1687), 'functools.wraps', 'wraps', (['initializer'], {}), '(initializer)\n', (1674, 1687), False, 'from functools import wraps\n'), ((512, 569), 'executor_exporter.exporter.ExecutorExporter', 'ExecutorExporter', (['futures.ThreadPoolExecutor', 'executor_id'], {}), '(futures.ThreadPoolExecutor, executor_id)\n', (528, 569), False, 'from executor_exporter.exporter import ExecutorExporter\n'), ((652, 738), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', (['max_workers', 'thread_name_prefix', 'initializer', 'initargs'], {}), '(max_workers, thread_name_prefix, initializer,\n initargs)\n', (678, 738), False, 'from concurrent import futures\n'), ((1199, 1257), 'executor_exporter.exporter.ExecutorExporter', 'ExecutorExporter', (['futures.ProcessPoolExecutor', 'executor_id'], {}), '(futures.ProcessPoolExecutor, executor_id)\n', (1215, 1257), False, 'from executor_exporter.exporter import ExecutorExporter\n'), ((1340, 1415), 'concurrent.futures.ProcessPoolExecutor', 'futures.ProcessPoolExecutor', (['max_workers', 'mp_context', 'initializer', 'initargs'], {}), '(max_workers, mp_context, initializer, initargs)\n', (1367, 1415), False, 'from concurrent import futures\n')]
|
#!/usr/bin/python3
import sys
import os
import time
import os as sistema
# Set color
R = '\033[31m' # Red
N = '\033[1;37m' # White
G = '\033[32m' # Green
O = '\033[0;33m' # Orange
B = '\033[1;34m' #Blue
print (""+O+"")
os.system('clear')
def pedirOpcionCorrecta():
correcto=False
num=0
while(not correcto):
try:
num = int(input("Elige una opcion: "))
correcto=True
except ValueError:
print('Error, Elige una opcion correcta: ')
return num
salir = False
opcion = 0
while not salir:
print ("""
_ _ _ _ __ __ __ _
/ ) /_| / / ) / / / /_| / ) / ) /__) /_|
(__ ( | (__ (__ (__/ (__ ( | /(_/ (__/ / ( ( | """)
print ("")
print ("""
_ _ _ _ _ _ _ _ _ _ _
| |
| 1. SUMA |
| |
| 2. RESTA |
| |
| 3. MULTIPLICACION |
| |
| 4. DIVISION |
| |
| 5. POTENCIACION |
| |
| 6. SALIR |
|_ _ _ _ _ _ _ _ _ _ _|""")
print ("")
opcion = pedirOpcionCorrecta()
if opcion == 1:
print ("")
print ("Suma")
print ("")
a = int(input("Ingresa el primer valor: "))
b = int(input("Ingresa el segundo valor: "))
suma = a + b
print ("El resultado es:", suma)
time.sleep(6)
os.system('clear')
elif opcion == 2:
print ("")
print ("Resta")
print ("")
a = int(input("Ingresa el primer valor: "))
b = int(input("Ingresa el segundo valor: "))
resta = a - b
print ("El resultado es:", resta)
time.sleep(6)
os.system('clear')
elif opcion == 3:
print ("")
print ("Multiplicacion")
print ("")
a = int(input("Ingresa el primer valor: "))
b = int(input("Ingresa el segundo valor: "))
multi = a * b
print ("El resultado es:", multi)
time.sleep(6)
os.system('clear')
elif opcion == 4:
print ("")
print ("Division")
print ("")
a = int(input("Ingresa el primer valor: "))
b = int(input("Ingresa el segundo valor: "))
divi = a / b
print ("El resultado es:", divi)
time.sleep(6)
os.system('clear')
elif opcion == 5:
print ("")
print ("Potenciacion")
print ("")
a = int(input("Ingresa el primer valor: "))
b = int(input("Ingresa el segundo valor: "))
poten = a ** b
print ("El resultado es:", poten)
time.sleep(6)
os.system('clear')
elif opcion == 6:
salir = True
else:
print ("Introduce un numero entre 1 y 6")
print ("¡Fín!, espero le haya gustado esta herranienta, hasta luego")
print ("")
|
[
"os.system",
"time.sleep"
] |
[((222, 240), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (231, 240), False, 'import os\n'), ((1348, 1361), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (1358, 1361), False, 'import time\n'), ((1364, 1382), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1373, 1382), False, 'import os\n'), ((1593, 1606), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (1603, 1606), False, 'import time\n'), ((1609, 1627), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1618, 1627), False, 'import os\n'), ((1847, 1860), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (1857, 1860), False, 'import time\n'), ((1863, 1881), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1872, 1881), False, 'import os\n'), ((2093, 2106), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (2103, 2106), False, 'import time\n'), ((2109, 2127), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (2118, 2127), False, 'import os\n'), ((2346, 2359), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (2356, 2359), False, 'import time\n'), ((2362, 2380), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (2371, 2380), False, 'import os\n')]
|
import re
f = open("regex.txt", "r")
content = f.readlines()
# s = 'A message from <EMAIL> to <EMAIL>'
for i in range(len(content)):
if re.findall('[\w\.]+@[\w\.]+', content[i]):
print(content[i], end='')
|
[
"re.findall"
] |
[((143, 188), 're.findall', 're.findall', (['"""[\\\\w\\\\.]+@[\\\\w\\\\.]+"""', 'content[i]'], {}), "('[\\\\w\\\\.]+@[\\\\w\\\\.]+', content[i])\n", (153, 188), False, 'import re\n')]
|
from pysony import SonyAPI, ControlPoint
import time
flask_app = None
try:
import flask
from flask import Flask
flask_app = Flask(__name__)
except ImportError:
print("Cannot import `flask`, liveview on web is not available")
if flask_app:
flask_app.get_frame_handle = None
flask_app.config['DEBUG'] = False
@flask_app.route("/")
def index():
return flask.render_template_string("""
<html>
<head>
<title>SONY Camera LiveView Streaming</title>
</head>
<body>
<h1>SONY LiveView Streaming</h1>
<img src="{{ url_for('video_feed') }}">
</body>
</html>
""")
def gen():
while True:
if flask_app.get_frame_handle is not None:
frame = flask_app.get_frame_handle()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@flask_app.route('/video_feed')
def video_feed():
return flask.Response(gen(), mimetype='multipart/x-mixed-replace; boundary=frame')
def liveview():
# Connect and set-up camera
search = ControlPoint()
cameras = search.discover(5)
if len(cameras):
camera = SonyAPI(QX_ADDR=cameras[0])
else:
print("No camera found, aborting")
quit()
mode = camera.getAvailableApiList()
# some cameras need `startRecMode` before we can use liveview
# For those camera which doesn't require this, just comment out the following 2 lines
if 'startRecMode' in (mode['result'])[0]:
camera.startRecMode()
time.sleep(2)
sizes = camera.getLiveviewSize()
print('Supported liveview size:', sizes)
# url = camera.liveview("M")
url = camera.liveview()
lst = SonyAPI.LiveviewStreamThread(url)
lst.start()
print('[i] LiveviewStreamThread started.')
return lst.get_latest_view
if __name__ == "__main__":
handler = liveview()
if flask_app:
flask_app.get_frame_handle = handler
flask_app.run()
|
[
"flask.Flask",
"flask.render_template_string",
"time.sleep",
"pysony.SonyAPI",
"pysony.ControlPoint",
"pysony.SonyAPI.LiveviewStreamThread"
] |
[((138, 153), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (143, 153), False, 'from flask import Flask\n'), ((1217, 1231), 'pysony.ControlPoint', 'ControlPoint', ([], {}), '()\n', (1229, 1231), False, 'from pysony import SonyAPI, ControlPoint\n'), ((1854, 1887), 'pysony.SonyAPI.LiveviewStreamThread', 'SonyAPI.LiveviewStreamThread', (['url'], {}), '(url)\n', (1882, 1887), False, 'from pysony import SonyAPI, ControlPoint\n'), ((394, 753), 'flask.render_template_string', 'flask.render_template_string', (['"""\n <html>\n <head>\n <title>SONY Camera LiveView Streaming</title>\n </head>\n <body>\n <h1>SONY LiveView Streaming</h1>\n <img src="{{ url_for(\'video_feed\') }}">\n </body>\n </html>\n """'], {}), '(\n """\n <html>\n <head>\n <title>SONY Camera LiveView Streaming</title>\n </head>\n <body>\n <h1>SONY LiveView Streaming</h1>\n <img src="{{ url_for(\'video_feed\') }}">\n </body>\n </html>\n """\n )\n', (422, 753), False, 'import flask\n'), ((1305, 1332), 'pysony.SonyAPI', 'SonyAPI', ([], {'QX_ADDR': 'cameras[0]'}), '(QX_ADDR=cameras[0])\n', (1312, 1332), False, 'from pysony import SonyAPI, ControlPoint\n'), ((1685, 1698), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1695, 1698), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/ui_remotesensing_dockwidget.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_RemoteSensingDockWidget(object):
def setupUi(self, RemoteSensingDockWidget):
RemoteSensingDockWidget.setObjectName("RemoteSensingDockWidget")
RemoteSensingDockWidget.resize(465, 146)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(RemoteSensingDockWidget.sizePolicy().hasHeightForWidth())
RemoteSensingDockWidget.setSizePolicy(sizePolicy)
self.verticalLayout = QtWidgets.QVBoxLayout(RemoteSensingDockWidget)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.lbObsAngle = QtWidgets.QLabel(RemoteSensingDockWidget)
self.lbObsAngle.setObjectName("lbObsAngle")
self.horizontalLayout.addWidget(self.lbObsAngle)
self.dsbObsAngleAzimuth = QtWidgets.QDoubleSpinBox(RemoteSensingDockWidget)
self.dsbObsAngleAzimuth.setDecimals(0)
self.dsbObsAngleAzimuth.setMinimum(-180.0)
self.dsbObsAngleAzimuth.setMaximum(180.0)
self.dsbObsAngleAzimuth.setSingleStep(15.0)
self.dsbObsAngleAzimuth.setObjectName("dsbObsAngleAzimuth")
self.horizontalLayout.addWidget(self.dsbObsAngleAzimuth)
self.label = QtWidgets.QLabel(RemoteSensingDockWidget)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.dsbObsAngleElevation = QtWidgets.QDoubleSpinBox(RemoteSensingDockWidget)
self.dsbObsAngleElevation.setMinimum(-90.0)
self.dsbObsAngleElevation.setMaximum(90.0)
self.dsbObsAngleElevation.setObjectName("dsbObsAngleElevation")
self.horizontalLayout.addWidget(self.dsbObsAngleElevation)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.cbDrawTangents = QtWidgets.QCheckBox(RemoteSensingDockWidget)
self.cbDrawTangents.setMinimumSize(QtCore.QSize(145, 0))
self.cbDrawTangents.setObjectName("cbDrawTangents")
self.horizontalLayout_5.addWidget(self.cbDrawTangents)
self.btTangentsColour = QtWidgets.QPushButton(RemoteSensingDockWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btTangentsColour.sizePolicy().hasHeightForWidth())
self.btTangentsColour.setSizePolicy(sizePolicy)
self.btTangentsColour.setMinimumSize(QtCore.QSize(135, 0))
self.btTangentsColour.setLayoutDirection(QtCore.Qt.LeftToRight)
self.btTangentsColour.setObjectName("btTangentsColour")
self.horizontalLayout_5.addWidget(self.btTangentsColour)
self.dsbTangentHeight = QtWidgets.QDoubleSpinBox(RemoteSensingDockWidget)
self.dsbTangentHeight.setMinimumSize(QtCore.QSize(0, 0))
self.dsbTangentHeight.setPrefix("")
self.dsbTangentHeight.setObjectName("dsbTangentHeight")
self.horizontalLayout_5.addWidget(self.dsbTangentHeight)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout_5)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.cbShowSolarAngle = QtWidgets.QCheckBox(RemoteSensingDockWidget)
self.cbShowSolarAngle.setMinimumSize(QtCore.QSize(145, 0))
self.cbShowSolarAngle.setObjectName("cbShowSolarAngle")
self.horizontalLayout_6.addWidget(self.cbShowSolarAngle)
self.cbSolarAngleType = QtWidgets.QComboBox(RemoteSensingDockWidget)
self.cbSolarAngleType.setObjectName("cbSolarAngleType")
self.cbSolarAngleType.addItem("")
self.cbSolarAngleType.addItem("")
self.cbSolarAngleType.addItem("")
self.horizontalLayout_6.addWidget(self.cbSolarAngleType)
self.cbSolarBody = QtWidgets.QComboBox(RemoteSensingDockWidget)
self.cbSolarBody.setMinimumSize(QtCore.QSize(170, 0))
self.cbSolarBody.setObjectName("cbSolarBody")
self.cbSolarBody.addItem("")
self.cbSolarBody.addItem("")
self.cbSolarBody.addItem("")
self.cbSolarBody.addItem("")
self.horizontalLayout_6.addWidget(self.cbSolarBody)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem2)
self.verticalLayout.addLayout(self.horizontalLayout_6)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.lbSolarCmap = QtWidgets.QLabel(RemoteSensingDockWidget)
self.lbSolarCmap.setObjectName("lbSolarCmap")
self.horizontalLayout_2.addWidget(self.lbSolarCmap)
self.verticalLayout.addLayout(self.horizontalLayout_2)
spacerItem3 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem3)
self.retranslateUi(RemoteSensingDockWidget)
QtCore.QMetaObject.connectSlotsByName(RemoteSensingDockWidget)
def retranslateUi(self, RemoteSensingDockWidget):
_translate = QtCore.QCoreApplication.translate
RemoteSensingDockWidget.setWindowTitle(_translate("RemoteSensingDockWidget", "Remote Sensing"))
self.lbObsAngle.setToolTip(_translate("RemoteSensingDockWidget", "View direction of the remote sensing instrument.\n"
"0 degree is towards flight direction."))
self.lbObsAngle.setText(_translate("RemoteSensingDockWidget", "Viewing direction: azimuth"))
self.label.setText(_translate("RemoteSensingDockWidget", "elevation"))
self.cbDrawTangents.setToolTip(_translate("RemoteSensingDockWidget", "Tangent points in viewing direction at the specified altitude.\n"
"Aircraft altitude is taken from the flight path."))
self.cbDrawTangents.setText(_translate("RemoteSensingDockWidget", "draw tangent points"))
self.btTangentsColour.setText(_translate("RemoteSensingDockWidget", "colour"))
self.dsbTangentHeight.setSuffix(_translate("RemoteSensingDockWidget", " km"))
self.cbShowSolarAngle.setToolTip(_translate("RemoteSensingDockWidget", "dark green if below horizon; otherwise reds: 0,5,10,15, purples: 15,25,35,45,60, greens: 60,90,135,180"))
self.cbShowSolarAngle.setText(_translate("RemoteSensingDockWidget", "show angle (degree)"))
self.cbSolarAngleType.setItemText(0, _translate("RemoteSensingDockWidget", "sun"))
self.cbSolarAngleType.setItemText(1, _translate("RemoteSensingDockWidget", "moon"))
self.cbSolarAngleType.setItemText(2, _translate("RemoteSensingDockWidget", "venus"))
self.cbSolarBody.setItemText(0, _translate("RemoteSensingDockWidget", "total (horizon)"))
self.cbSolarBody.setItemText(1, _translate("RemoteSensingDockWidget", "total"))
self.cbSolarBody.setItemText(2, _translate("RemoteSensingDockWidget", "azimuth"))
self.cbSolarBody.setItemText(3, _translate("RemoteSensingDockWidget", "elevation"))
self.lbSolarCmap.setText(_translate("RemoteSensingDockWidget", "fill me"))
|
[
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QSizePolicy",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QCheckBox",
"PyQt5.QtCore.QSize",
"PyQt5.QtWidgets.QSpacerItem",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QDoubleSpinBox",
"PyQt5.QtCore.QMetaObject.connectSlotsByName"
] |
[((493, 585), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Preferred)\n', (514, 585), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((848, 894), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (869, 894), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((987, 1010), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (1008, 1010), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1101, 1142), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (1117, 1142), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1286, 1335), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (1310, 1335), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1690, 1731), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (1706, 1731), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1862, 1911), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (1886, 1911), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2175, 2273), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(40)', '(20)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (2196, 2273), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2414, 2437), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (2435, 2437), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2536, 2580), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (2555, 2580), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2801, 2847), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (2822, 2847), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2869, 2948), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (2890, 2948), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3482, 3531), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (3506, 3531), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3792, 3890), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(40)', '(20)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (3813, 3890), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4036, 4059), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (4057, 4059), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4160, 4204), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (4179, 4204), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4433, 4477), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (4452, 4477), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4760, 4804), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (4779, 4804), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5151, 5249), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(40)', '(20)', 'QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Minimum'], {}), '(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Minimum)\n', (5172, 5249), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5395, 5418), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (5416, 5418), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5514, 5555), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (5530, 5555), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5755, 5853), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(20)', '(40)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Expanding'], {}), '(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Expanding)\n', (5776, 5853), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5959, 6021), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['RemoteSensingDockWidget'], {}), '(RemoteSensingDockWidget)\n', (5996, 6021), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2624, 2644), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(145)', '(0)'], {}), '(145, 0)\n', (2636, 2644), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3227, 3247), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(135)', '(0)'], {}), '(135, 0)\n', (3239, 3247), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3577, 3595), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (3589, 3595), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4250, 4270), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(145)', '(0)'], {}), '(145, 0)\n', (4262, 4270), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4845, 4865), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(170)', '(0)'], {}), '(170, 0)\n', (4857, 4865), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
|
"""py.test testing fixtures"""
import pytest
# Websauna
from websauna.blog.models import Post
from websauna.utils.time import now
@pytest.fixture
def unpublished_post(dbsession):
post = Post()
post.title = "Hello world"
post.body = "All roads lead to Toholampi"
post.tags = "mytag,mytag2"
post.ensure_slug(dbsession)
dbsession.add(post)
dbsession.flush()
return post
@pytest.fixture
def published_post(unpublished_post):
unpublished_post.published_at = now()
return unpublished_post
|
[
"websauna.blog.models.Post",
"websauna.utils.time.now"
] |
[((194, 200), 'websauna.blog.models.Post', 'Post', ([], {}), '()\n', (198, 200), False, 'from websauna.blog.models import Post\n'), ((495, 500), 'websauna.utils.time.now', 'now', ([], {}), '()\n', (498, 500), False, 'from websauna.utils.time import now\n')]
|
from ament_index_python.packages import get_package_prefix
from launch import LaunchDescription
from launch_ros.actions import Node
from os import environ as env
def generate_launch_description():
pkg_name = "phidget_spatial"
pkg_share_path = get_package_prefix(pkg_name)
return LaunchDescription([
Node(
package='phidget_spatial',
executable='phidget_spatial_node',
name='phidget_spatial_node',
namespace=(env.get("DRONE_DEVICE_ID", env.get("USER"))),
parameters=[
pkg_share_path + "/config/phidget_spatial/phidget_spatial.yaml"
]
)
])
|
[
"os.environ.get",
"ament_index_python.packages.get_package_prefix"
] |
[((256, 284), 'ament_index_python.packages.get_package_prefix', 'get_package_prefix', (['pkg_name'], {}), '(pkg_name)\n', (274, 284), False, 'from ament_index_python.packages import get_package_prefix\n'), ((508, 523), 'os.environ.get', 'env.get', (['"""USER"""'], {}), "('USER')\n", (515, 523), True, 'from os import environ as env\n')]
|
from typing import TYPE_CHECKING, Any, Dict, List
from aiopoke.objects.utility import Effect, NamedResource, VerboseEffect
from aiopoke.objects.utility.common_models import Name
from aiopoke.utils.minimal_resources import MinimalResource
from aiopoke.utils.resource import Resource
if TYPE_CHECKING:
from aiopoke.objects.resources import Generation, VersionGroup
from aiopoke.objects.resources.pokemon import Pokemon
from aiopoke.objects.utility import Language
class Ability(NamedResource):
effect_changes: List["AbilityEffectChange"]
effect_entries: List["VerboseEffect"]
flavor_text_entries: List["AbilityFlavorText"]
generation: MinimalResource["Generation"]
is_main_series: bool
names: List["Name"]
pokemon: List["AbilityPokemon"]
def __init__(
self,
*,
id: int,
name: str,
effect_changes: List[Dict[str, Any]],
effect_entries: List[Dict[str, Any]],
flavor_text_entries: List[Dict[str, Any]],
generation: Dict[str, Any],
is_main_series: bool,
names: List[Dict[str, Any]],
pokemon: List[Dict[str, Any]],
) -> None:
super().__init__(id=id, name=name)
self.effect_changes = [
AbilityEffectChange(**effect_change) for effect_change in effect_changes
]
self.effect_entries = [
VerboseEffect(**effect_entry) for effect_entry in effect_entries
]
self.flavor_text_entries = [
AbilityFlavorText(**flavor_text_entry)
for flavor_text_entry in flavor_text_entries
]
self.generation = MinimalResource(**generation)
self.is_main_series = is_main_series
self.names = [Name(**name) for name in names]
self.pokemon = [AbilityPokemon(**pokemon) for pokemon in pokemon]
class AbilityPokemon(Resource):
is_hidden: bool
slot: int
pokemon: MinimalResource["Pokemon"]
def __init__(self, *, is_hidden: bool, slot: int, pokemon: Dict[str, Any]) -> None:
self.is_hidden = is_hidden
self.slot = slot
self.pokemon = MinimalResource(**pokemon)
class AbilityEffectChange(Resource):
effect_entries: List["Effect"]
version_group: MinimalResource["VersionGroup"]
def __init__(
self, *, effect_entries: List[Dict[str, Any]], version_group: Dict[str, Any]
) -> None:
self.effect_entries = [
Effect(**effect_entry) for effect_entry in effect_entries
]
self.version_group = MinimalResource(**version_group)
class AbilityFlavorText(Resource):
flavor_text: str
language: MinimalResource["Language"]
version_group: MinimalResource["VersionGroup"]
def __init__(
self,
*,
flavor_text: str,
language: Dict[str, Any],
version_group: Dict[str, Any],
) -> None:
self.flavor_text = flavor_text
self.language = MinimalResource(**language)
self.version_group = MinimalResource(**version_group)
|
[
"aiopoke.utils.minimal_resources.MinimalResource",
"aiopoke.objects.utility.common_models.Name",
"aiopoke.objects.utility.VerboseEffect",
"aiopoke.objects.utility.Effect"
] |
[((1630, 1659), 'aiopoke.utils.minimal_resources.MinimalResource', 'MinimalResource', ([], {}), '(**generation)\n', (1645, 1659), False, 'from aiopoke.utils.minimal_resources import MinimalResource\n'), ((2113, 2139), 'aiopoke.utils.minimal_resources.MinimalResource', 'MinimalResource', ([], {}), '(**pokemon)\n', (2128, 2139), False, 'from aiopoke.utils.minimal_resources import MinimalResource\n'), ((2525, 2557), 'aiopoke.utils.minimal_resources.MinimalResource', 'MinimalResource', ([], {}), '(**version_group)\n', (2540, 2557), False, 'from aiopoke.utils.minimal_resources import MinimalResource\n'), ((2930, 2957), 'aiopoke.utils.minimal_resources.MinimalResource', 'MinimalResource', ([], {}), '(**language)\n', (2945, 2957), False, 'from aiopoke.utils.minimal_resources import MinimalResource\n'), ((2987, 3019), 'aiopoke.utils.minimal_resources.MinimalResource', 'MinimalResource', ([], {}), '(**version_group)\n', (3002, 3019), False, 'from aiopoke.utils.minimal_resources import MinimalResource\n'), ((1374, 1403), 'aiopoke.objects.utility.VerboseEffect', 'VerboseEffect', ([], {}), '(**effect_entry)\n', (1387, 1403), False, 'from aiopoke.objects.utility import Effect, NamedResource, VerboseEffect\n'), ((1727, 1739), 'aiopoke.objects.utility.common_models.Name', 'Name', ([], {}), '(**name)\n', (1731, 1739), False, 'from aiopoke.objects.utility.common_models import Name\n'), ((2428, 2450), 'aiopoke.objects.utility.Effect', 'Effect', ([], {}), '(**effect_entry)\n', (2434, 2450), False, 'from aiopoke.objects.utility import Effect, NamedResource, VerboseEffect\n')]
|
import coloredlogs
import logging
import os
logging.basicConfig(
filename="plex_doctor.log",
level=logging.DEBUG,
format='%(levelname)s: "%(asctime)s - %(message)s',
)
log = logging.getLogger("PLEX-DOCTOR")
log.setLevel(logging.DEBUG)
LOGLEVEL = os.environ.get("LOGLEVEL", "INFO").upper()
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(
logging.Formatter('%(levelname)s: "%(asctime)s - %(message)s')
)
log.addHandler(stream_handler)
coloredlogs.install(LOGLEVEL, logger=log)
|
[
"logging.basicConfig",
"coloredlogs.install",
"logging.StreamHandler",
"logging.Formatter",
"os.environ.get",
"logging.getLogger"
] |
[((45, 170), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""plex_doctor.log"""', 'level': 'logging.DEBUG', 'format': '"""%(levelname)s: "%(asctime)s - %(message)s"""'}), '(filename=\'plex_doctor.log\', level=logging.DEBUG, format\n =\'%(levelname)s: "%(asctime)s - %(message)s\')\n', (64, 170), False, 'import logging\n'), ((188, 220), 'logging.getLogger', 'logging.getLogger', (['"""PLEX-DOCTOR"""'], {}), "('PLEX-DOCTOR')\n", (205, 220), False, 'import logging\n'), ((321, 344), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (342, 344), False, 'import logging\n'), ((476, 517), 'coloredlogs.install', 'coloredlogs.install', (['LOGLEVEL'], {'logger': 'log'}), '(LOGLEVEL, logger=log)\n', (495, 517), False, 'import coloredlogs\n'), ((378, 440), 'logging.Formatter', 'logging.Formatter', (['"""%(levelname)s: "%(asctime)s - %(message)s"""'], {}), '(\'%(levelname)s: "%(asctime)s - %(message)s\')\n', (395, 440), False, 'import logging\n'), ((261, 295), 'os.environ.get', 'os.environ.get', (['"""LOGLEVEL"""', '"""INFO"""'], {}), "('LOGLEVEL', 'INFO')\n", (275, 295), False, 'import os\n')]
|
import unittest
import tohil
class TestMethods(unittest.TestCase):
def test_convert1(self):
"""exercise tohil.convert with no to= and with to=str"""
self.assertEqual(tohil.convert(10), "10")
self.assertEqual(tohil.convert(10, to=str), "10")
self.assertEqual(tohil.convert("10"), "10")
self.assertEqual(tohil.convert("10", to=str), "10")
def test_convert2(self):
"""exercise tohil.convert and to=int and to=float"""
self.assertEqual(tohil.convert("10", to=int), 10)
self.assertEqual(tohil.convert("10", to=float), 10.0)
def test_convert3(self):
"""exercise tohil.convert to=bool"""
self.assertEqual(tohil.convert(True, to=bool), True)
self.assertEqual(tohil.convert("t", to=bool), True)
self.assertEqual(tohil.convert("1", to=bool), True)
self.assertEqual(tohil.convert(1, to=bool), True)
self.assertEqual(tohil.convert(False, to=bool), False)
self.assertEqual(tohil.convert("f", to=bool), False)
self.assertEqual(tohil.convert("0", to=bool), False)
self.assertEqual(tohil.convert(0, to=bool), False)
def test_convert4(self):
"""exercise tohil.convert to=list"""
self.assertEqual(tohil.convert("1 2 3 4 5", to=list), ["1", "2", "3", "4", "5"])
def test_convert5(self):
"""exercise tohil.convert and to=dict"""
self.assertEqual(
tohil.convert("a 1 b 2 c 3 d 4", to=dict),
{"a": "1", "b": "2", "c": "3", "d": "4"},
)
def test_convert6(self):
"""exercise tohil.convert and to=tuple"""
self.assertEqual(
tohil.convert("a 1 b 2 c 3 d 4", to=tuple),
("a", "1", "b", "2", "c", "3", "d", "4"),
)
def test_convert7(self):
"""exercise tohil.convert and to=set"""
self.assertEqual(
sorted(tohil.convert("1 2 3 4 5 6 6", to=set)),
["1", "2", "3", "4", "5", "6"],
)
def test_convert8(self):
"""exercise tohil.convert and to=tohil.tclobj"""
self.assertEqual(
repr(tohil.convert("1 2 3", to=tohil.tclobj)), "<tohil.tclobj: '1 2 3'>"
)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"tohil.convert"
] |
[((2226, 2241), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2239, 2241), False, 'import unittest\n'), ((189, 206), 'tohil.convert', 'tohil.convert', (['(10)'], {}), '(10)\n', (202, 206), False, 'import tohil\n'), ((239, 264), 'tohil.convert', 'tohil.convert', (['(10)'], {'to': 'str'}), '(10, to=str)\n', (252, 264), False, 'import tohil\n'), ((297, 316), 'tohil.convert', 'tohil.convert', (['"""10"""'], {}), "('10')\n", (310, 316), False, 'import tohil\n'), ((349, 376), 'tohil.convert', 'tohil.convert', (['"""10"""'], {'to': 'str'}), "('10', to=str)\n", (362, 376), False, 'import tohil\n'), ((500, 527), 'tohil.convert', 'tohil.convert', (['"""10"""'], {'to': 'int'}), "('10', to=int)\n", (513, 527), False, 'import tohil\n'), ((558, 587), 'tohil.convert', 'tohil.convert', (['"""10"""'], {'to': 'float'}), "('10', to=float)\n", (571, 587), False, 'import tohil\n'), ((695, 723), 'tohil.convert', 'tohil.convert', (['(True)'], {'to': 'bool'}), '(True, to=bool)\n', (708, 723), False, 'import tohil\n'), ((756, 783), 'tohil.convert', 'tohil.convert', (['"""t"""'], {'to': 'bool'}), "('t', to=bool)\n", (769, 783), False, 'import tohil\n'), ((816, 843), 'tohil.convert', 'tohil.convert', (['"""1"""'], {'to': 'bool'}), "('1', to=bool)\n", (829, 843), False, 'import tohil\n'), ((876, 901), 'tohil.convert', 'tohil.convert', (['(1)'], {'to': 'bool'}), '(1, to=bool)\n', (889, 901), False, 'import tohil\n'), ((934, 963), 'tohil.convert', 'tohil.convert', (['(False)'], {'to': 'bool'}), '(False, to=bool)\n', (947, 963), False, 'import tohil\n'), ((997, 1024), 'tohil.convert', 'tohil.convert', (['"""f"""'], {'to': 'bool'}), "('f', to=bool)\n", (1010, 1024), False, 'import tohil\n'), ((1058, 1085), 'tohil.convert', 'tohil.convert', (['"""0"""'], {'to': 'bool'}), "('0', to=bool)\n", (1071, 1085), False, 'import tohil\n'), ((1119, 1144), 'tohil.convert', 'tohil.convert', (['(0)'], {'to': 'bool'}), '(0, to=bool)\n', (1132, 1144), False, 'import tohil\n'), ((1253, 1288), 'tohil.convert', 'tohil.convert', (['"""1 2 3 4 5"""'], {'to': 'list'}), "('1 2 3 4 5', to=list)\n", (1266, 1288), False, 'import tohil\n'), ((1434, 1475), 'tohil.convert', 'tohil.convert', (['"""a 1 b 2 c 3 d 4"""'], {'to': 'dict'}), "('a 1 b 2 c 3 d 4', to=dict)\n", (1447, 1475), False, 'import tohil\n'), ((1659, 1701), 'tohil.convert', 'tohil.convert', (['"""a 1 b 2 c 3 d 4"""'], {'to': 'tuple'}), "('a 1 b 2 c 3 d 4', to=tuple)\n", (1672, 1701), False, 'import tohil\n'), ((1890, 1928), 'tohil.convert', 'tohil.convert', (['"""1 2 3 4 5 6 6"""'], {'to': 'set'}), "('1 2 3 4 5 6 6', to=set)\n", (1903, 1928), False, 'import tohil\n'), ((2115, 2154), 'tohil.convert', 'tohil.convert', (['"""1 2 3"""'], {'to': 'tohil.tclobj'}), "('1 2 3', to=tohil.tclobj)\n", (2128, 2154), False, 'import tohil\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2018 <EMAIL>
# Licensed under the MIT license (http://opensource.org/licenses/MIT)
from sexpr import sexp
import pprint
import copy
import hexdump
DEBUG = 0
def u8(x):
return x & 0xff
def i16(x):
return x & 0xffff
class LEDVMError(Exception):
pass
class OpCodeInfo(object):
def __init__(self, name, data_len, arg_type):
self.name = name
self.data_len = data_len
self.arg_type = arg_type
ARG_NONE = 0
ARG_REFRENCES = 1
class OpCode(object):
SHOW_HSV = 0x00
SHOW_RGB = 0x01
LOAD_PIXEL = 0x02
ADD_VEC3 = 0x03
SUB_VEC3 = 0x04
IF_EQ = 0x05
OP_CODE_TABLE = {
# CODE , MENOMIC , DATA_SIZE
SHOW_HSV : OpCodeInfo("SHOW_HSV" , 0 , OpCodeInfo.ARG_NONE) ,
SHOW_RGB : OpCodeInfo("SHOW_RGB" , 0 , OpCodeInfo.ARG_NONE) ,
LOAD_PIXEL : OpCodeInfo("LOAD_PIXEL" , 3 , OpCodeInfo.ARG_REFRENCES) ,
ADD_VEC3 : OpCodeInfo("ADD_VEC3" , 3 , OpCodeInfo.ARG_REFRENCES) ,
SUB_VEC3 : OpCodeInfo("SUB_VEC3" , 3 , OpCodeInfo.ARG_REFRENCES) ,
IF_EQ : OpCodeInfo("IF_EQ" , 3 , OpCodeInfo.ARG_REFRENCES) ,
}
@staticmethod
def to_string(code):
if code in OpCode.OP_CODE_TABLE:
name = OpCode.OP_CODE_TABLE[code].name
return "{}<{}>".format(name, code)
else:
return "{}<{}>".format("UnknownOpCode", code)
def __init__(self, name, data_len=0):
self.name = name
self.data_len = data_len
class Register(object):
# Register codes
PIXEL_NUM = 0
OUTPUT_TYPE = 1
KEY_STATE = 2
MOUSE_X = 3
MOUSE_Y = 4
OUTPUT_TYPE_RGB = 0
OUTPUT_TYPE_HSV = 1
def __init__(self, name, default_value=0):
self.name = name
self.value = default_value
self.default_value = default_value
class LEDEffectVM(object):
REGISTER_TABLE = {
Register.PIXEL_NUM : Register("PIXEL_NUM", 0),
Register.OUTPUT_TYPE : Register("OUTPUT_TYPE", 0),
Register.KEY_STATE : Register("KEY_STATE", 0),
Register.MOUSE_X : Register("MOUSE_X", 0),
Register.MOUSE_Y : Register("MOUSE_Y", 0),
}
def __init__(self, led_program_table={'main': []}, num_pixels=None):
self.pixels = [(0, 0, 0)] * num_pixels
self.led_program_table = led_program_table
self.set_active_progarm('main')
self.instr_ptr = 0
self.registers = {}
for reg in self.REGISTER_TABLE:
self.registers[reg] = self.REGISTER_TABLE[reg].default_value
def set_active_progarm(self, name):
self._current_program_name = name
self.current_program = self.led_program_table[name]
def goto_start(self):
self.instr_ptr = 0
def rel_jump(self, offset):
self.instr_ptr += (offset)
def get_next_word(self):
if self.instr_ptr >= len(self.current_program):
return None
result = self.current_program[self.instr_ptr]
self.instr_ptr += 1
return result
def read_op_code(self):
code = self.get_next_word()
if code == None:
return None, None
self.vm_assert(code in OpCode.OP_CODE_TABLE, "Invalid OpCode: {}".format(code))
op_code = OpCode.OP_CODE_TABLE[code]
data = []
for i in range(op_code.data_len):
data.append(self.get_next_word())
# if DEBUG >= 1
if DEBUG >= 5:
print("Instruction: {}".format(self.instr_ptr))
print("Current code: {}, data:{}".format(
OpCode.to_string(code), data
)
)
return code, data
REFERENCE_TYPE_IMMEDIATE = 0
REFERENCE_TYPE_REGISTER = 1
REFERENCE_TYPE_PIXEL = 2
def lookup_refrence(self, ref):
# Refrences either an immediate value or another register value
# Format of refrence values (in hex):
# * 00xx -> Single byte immediate value
# * 01xx -> Single byte immediate value
value = (ref >> 0) & 0xff
ref_type = (ref >> 8) & 0xff
if ref_type == self.REFERENCE_TYPE_IMMEDIATE:
return value
elif ref_type == self.REFERENCE_TYPE_PIXEL:
assert(value < 3)
return self.get_current_pixel()[value]
elif ref_type == self.REFERENCE_TYPE_REGISTER:
assert(value in self.REGISTER_TABLE)
return self.registers[value]
def get_pixel(self, pixel_num):
return self.pixels[pixel_num]
def get_pixel_type(self, pixel_num):
return self.registers[Register.OUTPUT_TYPE]
def get_current_pixel(self):
return self.pixels[self.registers[Register.PIXEL_NUM]]
def set_current_pixel(self, x, y, z):
self.pixels[self.registers[Register.PIXEL_NUM]] = (x, y, z)
def execute_op_code(self, code, data):
"""
Return True if the program has finished executing
"""
if code == OpCode.SHOW_HSV:
self.registers[Register.OUTPUT_TYPE] = Register.OUTPUT_TYPE_HSV
return True
elif code == OpCode.SHOW_RGB:
self.registers[Register.OUTPUT_TYPE] = Register.OUTPUT_TYPE_RGB
return True
elif code == OpCode.LOAD_PIXEL:
self.set_current_pixel(
self.lookup_refrence(data[0]),
self.lookup_refrence(data[1]),
self.lookup_refrence(data[2])
)
elif code == OpCode.ADD_VEC3:
old_value = self.get_current_pixel()
self.set_current_pixel(
u8(old_value[0] + self.lookup_refrence(data[0])),
u8(old_value[1] + self.lookup_refrence(data[1])),
u8(old_value[2] + self.lookup_refrence(data[2]))
)
elif code == OpCode.SUB_VEC3:
old_value = self.get_current_pixel()
self.set_current_pixel(
u8(old_value[0] - self.lookup_refrence(data[0])),
u8(old_value[1] - self.lookup_refrence(data[1])),
u8(old_value[2] - self.lookup_refrence(data[2]))
)
elif code == OpCode.IF_EQ:
lhs = self.lookup_refrence(data[0])
rhs = self.lookup_refrence(data[1])
jmp_pos = self.lookup_refrence(data[2])
if DEBUG >= 5:
print("lhs, rhs, == :", lhs, rhs, lhs == rhs)
if lhs != rhs:
self.rel_jump(jmp_pos)
else:
raise LEDVMError("Unknown opcode {}".format(code))
return False
def execute_program(self, program_name):
self.set_active_progarm(program_name)
for (pixel_i, _) in enumerate(self.pixels):
self.execute_program_pixel(pixel_i)
def execute_program_pixel(self, pixel_number):
self.goto_start()
self.registers[Register.PIXEL_NUM] = pixel_number
is_running = True
if DEBUG:
print("Starting program for pixel: {}".format(pixel_number))
while is_running:
(code, data) = self.read_op_code()
if code == None:
break;
if DEBUG:
print("(OpCode {}, Data {})".format(code, data))
is_running = not self.execute_op_code(code, data)
def vm_assert(self, exp, msg=""):
if exp != True:
self.print_core_dump(msg)
if msg == "":
LEDVMError("LEDVMError: unspecified error")
else:
LEDVMError("LEDVMError: {}".format(msg))
def print_core_dump(self, error_msg):
print(
"\n"
"Core dump while executing program '{}':\n"
"Error message: {}\n"
"instr_ptr: {}\n"
"program: {}\n"
.format(
self._current_program_name,
error_msg,
self.instr_ptr,
self.current_program
)
)
class LEDEffectVMParser(object):
def __init__(self):
# The Parser needs the inverse mappings of the op_code/register lookup
# tables, so generate them here
self.op_code_lookup_table = {}
for code in OpCode.OP_CODE_TABLE:
name = OpCode.OP_CODE_TABLE[code].name
self.op_code_lookup_table[name] = code
self.register_lookup_table = {}
for reg in LEDEffectVM.REGISTER_TABLE:
name = LEDEffectVM.REGISTER_TABLE[reg].name
self.register_lookup_table[name] = reg
# def exp_as_arrays(self, exp):
# print(exp)
# arr = exp[0]
# result = []
# for child in arr:
# result.append(self.exp_as_arrays(child))
# return result
def parse_asm(self, program_str):
sexpression = sexp.parseString(program_str, parseAll=True)
if DEBUG:
print(sexpression)
pprint.pprint(sexpression)
# sexpression = self.exp_as_arrays(sexpression)
byte_code = []
byte_code += self.parse_program(sexpression)
return byte_code
def generate_ref(self, ref):
if isinstance(ref, int):
assert(ref <= 255)
ref_type = LEDEffectVM.REFERENCE_TYPE_IMMEDIATE
value = ref
elif isinstance(ref, str):
if ref in self.register_lookup_table:
ref_type = LEDEffectVM.REFERENCE_TYPE_REGISTER
value = self.register_lookup_table[ref]
elif ref in ('r', 'g', 'b', 'h', 's', 'v'):
ref_type = LEDEffectVM.REFERENCE_TYPE_PIXEL
value = {
'r': 0,
'h': 0,
'g': 1,
's': 1,
'b': 2,
'v': 2,
}[ref]
else:
raise LEDVMError("Unknown reference: {}".format(ref))
else:
return None
lo_byte = (value << 0)
hi_byte = (ref_type << 8)
return [lo_byte | hi_byte]
def parse_instruction(self, exp):
if DEBUG:
print("Parse Instruction: ", exp)
name = exp[0]
result = []
if not name in self.op_code_lookup_table:
raise LEDVMError("Unknown opcode menomic: {}".format(name))
op_code = self.op_code_lookup_table[name]
op_info = OpCode.OP_CODE_TABLE[op_code]
# Add the op_code to the result
result += [op_code]
OP_CODE_POS = 1
data = exp[OP_CODE_POS:]
if len(data) != op_info.data_len:
raise LEDVMError("Expected {} arguments to opcode {}, got {}".format(
op_info.data_len,
name,
len(data)
)
)
if op_code == OpCode.IF_EQ:
print(data)
print(data[0], data[1], data[2])
LHS_POS = 0
RHS_POS = 1
JUMP_POS = 2
result += self.generate_ref(data[LHS_POS])
result += self.generate_ref(data[RHS_POS])
if_block_exp = data[JUMP_POS]
ref_data = self.generate_ref(if_block_exp)
if ref_data != None:
result += ref_data
else:
print('ifblock:', if_block_exp)
if_block = self.parse_instruction_list(if_block_exp)
jmp_offset = i16(len(if_block))
result += [jmp_offset]
result += if_block
print('ifBlockResult:', result)
elif op_info.arg_type == OpCodeInfo.ARG_NONE:
pass # Don't need to add data
elif op_info.arg_type == OpCodeInfo.ARG_REFRENCES:
for ref in data:
result += self.generate_ref(ref)
return result
def parse_instruction_list(self, instruction_list):
result = []
for instruction in instruction_list:
result += self.parse_instruction(instruction)
return result
def parse_program(self, exp):
if DEBUG:
print("Parse program: ", exp)
exp = exp[0]
# pprint.pprint(exp)
return self.parse_instruction_list(exp)
if __name__ == "__main__":
init_prog = """
(
(LOAD_PIXEL PIXEL_NUM 255 200)
)
"""
# main_prog = """
# (
# (LOAD_PIXEL r 255 200)
# (ADD_VEC3 1 0 0)
# (IF_EQ v 199
# (
# (ADD_VEC3 1 0 0)
# )
# )
# (IF_EQ v 200
# (
# (SUB_VEC3 1 0 0)
# )
# )
# (SHOW_HSV)
# )
# """
main_prog = """
(
(IF_EQ h 0
(
(LOAD_PIXEL h 255 199)
)
)
(IF_EQ h 255
(
(LOAD_PIXEL h 255 200)
)
)
(IF_EQ v 200
(
(SUB_VEC3 1 0 0)
)
)
(IF_EQ v 199
(
(ADD_VEC3 1 0 0)
)
)
(SHOW_HSV)
)
"""
vm_parser = LEDEffectVMParser()
led_programs = {
"init": vm_parser.parse_asm(init_prog),
"main": vm_parser.parse_asm(main_prog),
}
vm = LEDEffectVM(led_programs, num_pixels=64)
for prog in led_programs:
print(prog, led_programs[prog])
byte_code_as_bytes = bytes([])
for word in led_programs[prog]:
byte_code_as_bytes += bytes([word & 0xff, word>>8 & 0xff])
hexdump.hexdump(byte_code_as_bytes)
vm.execute_program('init')
for i in range(300):
vm.execute_program('main')
print(vm.pixels)
|
[
"sexpr.sexp.parseString",
"hexdump.hexdump",
"pprint.pprint"
] |
[((8866, 8910), 'sexpr.sexp.parseString', 'sexp.parseString', (['program_str'], {'parseAll': '(True)'}), '(program_str, parseAll=True)\n', (8882, 8910), False, 'from sexpr import sexp\n'), ((13579, 13614), 'hexdump.hexdump', 'hexdump.hexdump', (['byte_code_as_bytes'], {}), '(byte_code_as_bytes)\n', (13594, 13614), False, 'import hexdump\n'), ((8972, 8998), 'pprint.pprint', 'pprint.pprint', (['sexpression'], {}), '(sexpression)\n', (8985, 8998), False, 'import pprint\n')]
|
from report_vitals import report_battery_vitals
from filter_values import filterOut_safe_vitals
from process_battery_data import process_data
from controller_actions import get_actions
def is_battery_ok(bms_attributes):
data = process_data(bms_attributes)
report_battery_vitals(data)
get_actions(data)
value = list(filter(filterOut_safe_vitals,data))
return len(value) == 0
if __name__ == '__main__':
assert(is_battery_ok({'temperature': 25,'Soc': 70, 'Charge_rate': 0.7}) is True) #all values in limit
assert(is_battery_ok({'Temperature': 46,'soc': 23, 'Charge_rate': 0.77}) is False) #high temp warning,low soc warning,charge_rate high warnings
|
[
"controller_actions.get_actions",
"process_battery_data.process_data",
"report_vitals.report_battery_vitals"
] |
[((238, 266), 'process_battery_data.process_data', 'process_data', (['bms_attributes'], {}), '(bms_attributes)\n', (250, 266), False, 'from process_battery_data import process_data\n'), ((271, 298), 'report_vitals.report_battery_vitals', 'report_battery_vitals', (['data'], {}), '(data)\n', (292, 298), False, 'from report_vitals import report_battery_vitals\n'), ((303, 320), 'controller_actions.get_actions', 'get_actions', (['data'], {}), '(data)\n', (314, 320), False, 'from controller_actions import get_actions\n')]
|
# -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2017 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import paddle.v2 as paddle
import gzip
import sys
import data_provider
import numpy as np
def param():
return paddle.attr.Param(
initial_std=0.01,
initial_mean=0
)
def encoder(x_):
x_ = paddle.layer.fc(
input=x_,
size=512,
act=paddle.activation.Sigmoid(),
param_attr=param(),
bias_attr=param()
)
x_ = paddle.layer.fc(
input=x_,
size=256,
act=paddle.activation.Relu(),
param_attr=param(),
bias_attr=param()
)
x_ = paddle.layer.fc(
input=x_,
size=128,
act=paddle.activation.Relu(),
param_attr=param(),
bias_attr=param()
)
return x_
def decoder(x_):
x_ = paddle.layer.fc(
input=x_,
size=128,
act=paddle.activation.Sigmoid(),
param_attr=param(),
bias_attr=param()
)
x_ = paddle.layer.fc(
input=x_,
size=256,
act=paddle.activation.Relu(),
param_attr=param(),
bias_attr=param()
)
x_ = paddle.layer.fc(
input=x_,
size=512,
act=paddle.activation.Relu(),
param_attr=param(),
bias_attr=param()
)
return x_
def output(x_):
return paddle.layer.fc(
input=x_,
size=784,
act=paddle.activation.Relu(),
param_attr=param(),
bias_attr=param()
)
paddle.init(use_gpu=False, trainer_count=1)
x = paddle.layer.data(
name='x',
type=paddle.data_type.dense_vector(784)
)
y = encoder(x)
y = decoder(y)
y = output(y)
def train():
optimizer = paddle.optimizer.RMSProp(
learning_rate=1e-3,
regularization=paddle.optimizer.L2Regularization(rate=8e-4)
)
loss = paddle.layer.mse_cost(label=x, input=y)
parameters = paddle.parameters.create(loss)
trainer = paddle.trainer.SGD(
cost=loss,
parameters=parameters,
update_equation=optimizer
)
feeding = {'x': 0}
def event_handler(event):
if isinstance(event, paddle.event.EndIteration):
if event.batch_id % 50 == 0:
print ("\n pass %d, Batch: %d cost: %f"
% (event.pass_id, event.batch_id, event.cost))
else:
sys.stdout.write('.')
sys.stdout.flush()
if isinstance(event, paddle.event.EndPass):
with gzip.open('output/params_pass_%d.tar.gz' % event.pass_id, 'w') as f:
parameters.to_tar(f)
reader = data_provider.create_reader('train', 60000)
trainer.train(
paddle.batch(
reader=reader,
batch_size=128
),
feeding=feeding,
num_passes=20,
event_handler=event_handler
)
def test(model_path):
with gzip.open(model_path, 'r') as openFile:
parameters = paddle.parameters.Parameters.from_tar(openFile)
testset = [[x] for x in data_provider.fetch_testingset()['images'][:10]]
# 使用infer进行预测
result = paddle.infer(
input=testset,
parameters=parameters,
output_layer=y,
feeding={'x': 0}
)
return result, np.array(testset)
if __name__ == '__main__':
origin, result = test('output/params_pass_19.tar.gz')
np.save('origin.dat', origin)
np.save('result.dat', result)
|
[
"sys.stdout.write",
"paddle.v2.layer.mse_cost",
"sys.stdout.flush",
"data_provider.create_reader",
"paddle.v2.activation.Sigmoid",
"data_provider.fetch_testingset",
"paddle.v2.activation.Relu",
"paddle.v2.parameters.create",
"paddle.v2.init",
"numpy.save",
"paddle.v2.attr.Param",
"paddle.v2.data_type.dense_vector",
"paddle.v2.infer",
"paddle.v2.batch",
"paddle.v2.optimizer.L2Regularization",
"gzip.open",
"paddle.v2.trainer.SGD",
"numpy.array",
"paddle.v2.parameters.Parameters.from_tar"
] |
[((2494, 2537), 'paddle.v2.init', 'paddle.init', ([], {'use_gpu': '(False)', 'trainer_count': '(1)'}), '(use_gpu=False, trainer_count=1)\n', (2505, 2537), True, 'import paddle.v2 as paddle\n'), ((1207, 1258), 'paddle.v2.attr.Param', 'paddle.attr.Param', ([], {'initial_std': '(0.01)', 'initial_mean': '(0)'}), '(initial_std=0.01, initial_mean=0)\n', (1224, 1258), True, 'import paddle.v2 as paddle\n'), ((2838, 2877), 'paddle.v2.layer.mse_cost', 'paddle.layer.mse_cost', ([], {'label': 'x', 'input': 'y'}), '(label=x, input=y)\n', (2859, 2877), True, 'import paddle.v2 as paddle\n'), ((2896, 2926), 'paddle.v2.parameters.create', 'paddle.parameters.create', (['loss'], {}), '(loss)\n', (2920, 2926), True, 'import paddle.v2 as paddle\n'), ((2942, 3021), 'paddle.v2.trainer.SGD', 'paddle.trainer.SGD', ([], {'cost': 'loss', 'parameters': 'parameters', 'update_equation': 'optimizer'}), '(cost=loss, parameters=parameters, update_equation=optimizer)\n', (2960, 3021), True, 'import paddle.v2 as paddle\n'), ((3611, 3654), 'data_provider.create_reader', 'data_provider.create_reader', (['"""train"""', '(60000)'], {}), "('train', 60000)\n", (3638, 3654), False, 'import data_provider\n'), ((4101, 4190), 'paddle.v2.infer', 'paddle.infer', ([], {'input': 'testset', 'parameters': 'parameters', 'output_layer': 'y', 'feeding': "{'x': 0}"}), "(input=testset, parameters=parameters, output_layer=y, feeding=\n {'x': 0})\n", (4113, 4190), True, 'import paddle.v2 as paddle\n'), ((4352, 4381), 'numpy.save', 'np.save', (['"""origin.dat"""', 'origin'], {}), "('origin.dat', origin)\n", (4359, 4381), True, 'import numpy as np\n'), ((4386, 4415), 'numpy.save', 'np.save', (['"""result.dat"""', 'result'], {}), "('result.dat', result)\n", (4393, 4415), True, 'import numpy as np\n'), ((2584, 2618), 'paddle.v2.data_type.dense_vector', 'paddle.data_type.dense_vector', (['(784)'], {}), '(784)\n', (2613, 2618), True, 'import paddle.v2 as paddle\n'), ((3682, 3725), 'paddle.v2.batch', 'paddle.batch', ([], {'reader': 'reader', 'batch_size': '(128)'}), '(reader=reader, batch_size=128)\n', (3694, 3725), True, 'import paddle.v2 as paddle\n'), ((3884, 3910), 'gzip.open', 'gzip.open', (['model_path', '"""r"""'], {}), "(model_path, 'r')\n", (3893, 3910), False, 'import gzip\n'), ((3945, 3992), 'paddle.v2.parameters.Parameters.from_tar', 'paddle.parameters.Parameters.from_tar', (['openFile'], {}), '(openFile)\n', (3982, 3992), True, 'import paddle.v2 as paddle\n'), ((4243, 4260), 'numpy.array', 'np.array', (['testset'], {}), '(testset)\n', (4251, 4260), True, 'import numpy as np\n'), ((1374, 1401), 'paddle.v2.activation.Sigmoid', 'paddle.activation.Sigmoid', ([], {}), '()\n', (1399, 1401), True, 'import paddle.v2 as paddle\n'), ((1537, 1561), 'paddle.v2.activation.Relu', 'paddle.activation.Relu', ([], {}), '()\n', (1559, 1561), True, 'import paddle.v2 as paddle\n'), ((1697, 1721), 'paddle.v2.activation.Relu', 'paddle.activation.Relu', ([], {}), '()\n', (1719, 1721), True, 'import paddle.v2 as paddle\n'), ((1890, 1917), 'paddle.v2.activation.Sigmoid', 'paddle.activation.Sigmoid', ([], {}), '()\n', (1915, 1917), True, 'import paddle.v2 as paddle\n'), ((2053, 2077), 'paddle.v2.activation.Relu', 'paddle.activation.Relu', ([], {}), '()\n', (2075, 2077), True, 'import paddle.v2 as paddle\n'), ((2213, 2237), 'paddle.v2.activation.Relu', 'paddle.activation.Relu', ([], {}), '()\n', (2235, 2237), True, 'import paddle.v2 as paddle\n'), ((2407, 2431), 'paddle.v2.activation.Relu', 'paddle.activation.Relu', ([], {}), '()\n', (2429, 2431), True, 'import paddle.v2 as paddle\n'), ((2775, 2821), 'paddle.v2.optimizer.L2Regularization', 'paddle.optimizer.L2Regularization', ([], {'rate': '(0.0008)'}), '(rate=0.0008)\n', (2808, 2821), True, 'import paddle.v2 as paddle\n'), ((3365, 3386), 'sys.stdout.write', 'sys.stdout.write', (['"""."""'], {}), "('.')\n", (3381, 3386), False, 'import sys\n'), ((3403, 3421), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3419, 3421), False, 'import sys\n'), ((3491, 3553), 'gzip.open', 'gzip.open', (["('output/params_pass_%d.tar.gz' % event.pass_id)", '"""w"""'], {}), "('output/params_pass_%d.tar.gz' % event.pass_id, 'w')\n", (3500, 3553), False, 'import gzip\n'), ((4021, 4053), 'data_provider.fetch_testingset', 'data_provider.fetch_testingset', ([], {}), '()\n', (4051, 4053), False, 'import data_provider\n')]
|
from mstrio.users_and_groups import list_users
from mstrio.api.projects import get_projects
from mstrio.distribution_services.subscription.subscription_manager import SubscriptionManager
from mstrio.connection import Connection
def delete_subscriptions_of_departed_users(connection: "Connection") -> None:
"""Delete all subscription in all projects which owners are departed users.
Args:
Args:
connection: MicroStrategy connection object returned by
`connection.Connection()`
"""
# get all projects that the authenticated user has access to
response = get_projects(connection, whitelist=[('ERR014', 403)])
prjcts = response.json() if response.ok else []
# get all disabled users
all_usrs = list_users(connection=connection)
dsbld_usrs = [u for u in all_usrs if not u.enabled]
for prjct in prjcts:
project_id = prjct['id']
sub_mngr = SubscriptionManager(connection=connection, project_id=project_id)
for usr in dsbld_usrs:
subs = sub_mngr.list_subscriptions(owner={'id': usr.id})
msg = f"subscriptions of user with ID: {usr.id}"
msg += f" in project {prjct.name} with ID: {prjct.id}"
# call of the function below returns True if all passed
# subscriptions were deleted
if sub_mngr.delete(subscriptions=subs, force=True):
print("All " + msg + " were deleted.")
else:
print("Not all " + msg + " were deleted or there was no subsscriptions.")
|
[
"mstrio.api.projects.get_projects",
"mstrio.users_and_groups.list_users",
"mstrio.distribution_services.subscription.subscription_manager.SubscriptionManager"
] |
[((604, 657), 'mstrio.api.projects.get_projects', 'get_projects', (['connection'], {'whitelist': "[('ERR014', 403)]"}), "(connection, whitelist=[('ERR014', 403)])\n", (616, 657), False, 'from mstrio.api.projects import get_projects\n'), ((754, 787), 'mstrio.users_and_groups.list_users', 'list_users', ([], {'connection': 'connection'}), '(connection=connection)\n', (764, 787), False, 'from mstrio.users_and_groups import list_users\n'), ((922, 987), 'mstrio.distribution_services.subscription.subscription_manager.SubscriptionManager', 'SubscriptionManager', ([], {'connection': 'connection', 'project_id': 'project_id'}), '(connection=connection, project_id=project_id)\n', (941, 987), False, 'from mstrio.distribution_services.subscription.subscription_manager import SubscriptionManager\n')]
|
from coverage import coverage
import unittest
cov = coverage(branch=True, include=['app/*'])
cov.set_option('report:show_missing', True)
cov.erase()
cov.start()
from .client_test import ClientTestCase
from .features_test import FeatureTestCase
from .product_area_test import ProductAreaTestCase
if __name__ == '__main__':
tests = unittest.TestLoader().discover('./tests', pattern='*test.py')
unittest.TextTestRunner(verbosity=1).run(tests)
cov.stop()
cov.save()
print("\n\nCoverage Report:\n")
cov.report()
|
[
"unittest.TextTestRunner",
"unittest.TestLoader",
"coverage.coverage"
] |
[((54, 94), 'coverage.coverage', 'coverage', ([], {'branch': '(True)', 'include': "['app/*']"}), "(branch=True, include=['app/*'])\n", (62, 94), False, 'from coverage import coverage\n'), ((338, 359), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (357, 359), False, 'import unittest\n'), ((404, 440), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(1)'}), '(verbosity=1)\n', (427, 440), False, 'import unittest\n')]
|
# -*- coding: utf-8 -*-
# @Time : 2020/9/26
# @Author : <NAME>
# @Email : 暂无
# @File : command.py
# @Project : Flask-Demo
import os
import logging
from logging.handlers import RotatingFileHandler
from flask import request
basedir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
project_name = os.path.split(os.path.dirname(__file__))[1]
def register_logging(app):
class RequestFormatter(logging.Formatter):
# 通过继承,修改打印信息: 报错的url 与 远程地址
def format(self, record):
record.url = request.url
record.remote_addr = request.remote_addr
return super(RequestFormatter, self).format(record)
request_formatter = RequestFormatter(
'[%(asctime)s] %(remote_addr)s requested %(url)s\n'
'%(levelname)s in %(module)s: %(message)s'
)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
log_path = os.path.join(basedir, f'logs/{project_name}')
if not os.path.exists(log_path):
os.mkdir(log_path)
file_handler = RotatingFileHandler("{}/career_plan.log".format(log_path),
maxBytes=10 * 1024 * 1024, backupCount=10)
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.INFO)
# 需要设置整个日志的等级,开发调试模式下,默认为debug; 没有设置会导致无法输出日志
app.logger.setLevel(logging.INFO)
if not app.debug:
# 生产模式下,需要设置合适等级
# app.logger.setLevel(logging.ERROR)
app.logger.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
|
[
"os.mkdir",
"os.path.dirname",
"os.path.exists",
"logging.Formatter",
"os.path.join"
] |
[((830, 903), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (847, 903), False, 'import logging\n'), ((920, 965), 'os.path.join', 'os.path.join', (['basedir', 'f"""logs/{project_name}"""'], {}), "(basedir, f'logs/{project_name}')\n", (932, 965), False, 'import os\n'), ((266, 291), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (281, 291), False, 'import os\n'), ((323, 348), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (338, 348), False, 'import os\n'), ((977, 1001), 'os.path.exists', 'os.path.exists', (['log_path'], {}), '(log_path)\n', (991, 1001), False, 'import os\n'), ((1011, 1029), 'os.mkdir', 'os.mkdir', (['log_path'], {}), '(log_path)\n', (1019, 1029), False, 'import os\n')]
|
import csv
from django.http import HttpResponse
class ExportCsvMixin:
def export_as_csv(self, request, queryset):
meta = self.model._meta
field_names = [field.name for field in meta.fields]
response = HttpResponse(content_type="text/csv")
response["Content-Disposition"] = "attachment; filename={}.csv".format(meta)
writer = csv.writer(response)
writer.writerow(field_names)
# for obj in queryset:
# row = writer.writerow([getattr(obj, field) for field in field_names])
return response
export_as_csv.short_description = "Export to csv"
def all_complete(self, request, queryset):
self.model.objects.all().update(completed=True)
self.message_user(request, "All task are set as completed now")
def all_not_complete(self, request, queryset):
self.model.objects.all().update(completed=False)
self.message_user(request, "All task are set as uncompleted now")
|
[
"csv.writer",
"django.http.HttpResponse"
] |
[((234, 271), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""text/csv"""'}), "(content_type='text/csv')\n", (246, 271), False, 'from django.http import HttpResponse\n'), ((374, 394), 'csv.writer', 'csv.writer', (['response'], {}), '(response)\n', (384, 394), False, 'import csv\n')]
|
from datetime import datetime, timezone
from itertools import cycle
from .lame import LAME
from .mt import MT
def filetime_to_dt(timestamp: int) -> datetime:
return datetime.fromtimestamp(timestamp // 100000000, timezone.utc)
def bytes_to_bitstring(data: bytes) -> str:
return "".join(bin(x)[2:].zfill(8) for x in data)
class BitStream:
def __init__(self, data: bytes) -> None:
self.data = bytes_to_bitstring(data)
def get_bits(self, num: int) -> int:
out = int(self.data[:num], 2)
self.data = self.data[num:]
return out
def xor(data: bytes, key: bytes) -> bytes:
return bytes(a ^ b for a, b in zip(data, cycle(key)))
def decrypt_lame(data: bytes, seed: int) -> bytes:
lame = LAME()
lame.srand(seed)
return bytes([x ^ lame.get_next() for x in data])
def decrypt_mt(data: bytes, seed: int) -> bytes:
key = MT(seed).get_bytes(len(data))
return xor(data, key)
def crc_data(data: bytes) -> int:
if len(data) == 0:
return 0
dwKey_ECX = 0
dwKey_ESI = 1
for b in data:
dwKey_ESI = (b + dwKey_ESI) % 0xFFF1
dwKey_ECX = (dwKey_ECX + dwKey_ESI) % 0xFFF1
return (dwKey_ECX << 0x10) + dwKey_ESI
|
[
"itertools.cycle",
"datetime.datetime.fromtimestamp"
] |
[((172, 232), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(timestamp // 100000000)', 'timezone.utc'], {}), '(timestamp // 100000000, timezone.utc)\n', (194, 232), False, 'from datetime import datetime, timezone\n'), ((667, 677), 'itertools.cycle', 'cycle', (['key'], {}), '(key)\n', (672, 677), False, 'from itertools import cycle\n')]
|
#!/usr/bin/env python3
import sys
import os
import argparse
def parseArguments():
parser = argparse.ArgumentParser(description='transform file and header')
parser.add_argument("--list_file", help="", type=str,required=True)
parser.add_argument('--use_rs',type=str,help="if need to be limited at some rs", default=0)
parser.add_argument("--out", help="output format ldsc, default none", type=str,required=True)
args = parser.parse_args()
return args
args=parseArguments()
splfile=args.list_file.split(',')
DicByRs={}
listRs=list([])
listChrBp={}
rsissue=''
listrsissue=list([])
listchrissue=list([])
for File in splfile :
print(File)
Fread=open(File)
FreadL=Fread.readline().split()
Fread.close()
Fread=open(File)
if len(FreadL)==3 :
for line in Fread :
splt=line.replace('\n', '').split()
if splt[0] not in listRs :
DicByRs[splt[0]]=[None,None,splt[1],splt[2],None]
else :
RsInfo=DirRes[splt[0]]
##
print(RsInfo)
balisegood= (splt[1]==RsInfo[2] and splt[2]==RsInfo[3]) or (splt[1]==RsInfo[3] and splt[2]==RsInfo[2])
if balisegood ==False:
listrsissue.add(splt[1])
elif len(FreadL)==6:
# writenew.write('rsID\tChro\tPos\tA1\tA2\tnewRs\n')
for line in Fread :
splt=line.replace('\n', '').split()
NewRs=splt[5]
if splt[0] not in listRs :
DicByRs[splt[0]]=[splt[1],splt[2],splt[3],splt[4], splt[5]]
else :
balisegood= (splt[1]==RsInfo[2] and splt[2]==RsInfo[3]) or (splt[1]==RsInfo[3] and splt[2]==RsInfo[2])
RsInfo=DirRes[splt[0]]
if balisegood ==False:
listrsissue.add(splt[1])
listchrissue.add()
# check pos and chr
if RsInfo[0] :
if RsInfo[0] != splt[1] and RsInfo[1] != splt[2] :
listrsissue.add(splt[0])
else :
RsInfo[0]=splt[1]
RsInfo[1]=splt[2]
RsInfo[4]=splt[5]
else :
print("colomn error number :"+str(len(FreadL)))
sys.exit(3)
writeRs=open(args.out, 'w')
writeRs2=open(args.out+'_allinfo', 'w')
for rs in DicByRs:
RsInfo=DicByRs[rs]
if rs not in listrsissue :
if args.use_rs==1 :
writeRs.write(rs+'\t'+RsInfo[3]+'\t'+RsInfo[4]+'\n')
else :
writeRs.write(rs+'\t'+'\t'.join(RsInfo)+'\n')
writeRs2.write(rs+'\t'+'\t'.join(RsInfo)+'\n')
writeRsError=open(args.out+'_issue', 'w')
for rs in listrsissue :
RsInfo=DicByRs[rs]
writeRs.write(rs+'\t'+'\t'.join(RsInfo)+'\n')
|
[
"argparse.ArgumentParser",
"sys.exit"
] |
[((97, 161), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""transform file and header"""'}), "(description='transform file and header')\n", (120, 161), False, 'import argparse\n'), ((2147, 2158), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (2155, 2158), False, 'import sys\n')]
|