max_stars_repo_path
stringlengths 4
286
| max_stars_repo_name
stringlengths 5
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.03M
| content_cleaned
stringlengths 6
1.03M
| language
stringclasses 111
values | language_score
float64 0.03
1
| comments
stringlengths 0
556k
| edu_score
float64 0.32
5.03
| edu_int_score
int64 0
5
|
|---|---|---|---|---|---|---|---|---|---|---|
homeassistant/components/brother/const.py
|
tbarbette/core
| 1
|
6626851
|
<reponame>tbarbette/core<filename>homeassistant/components/brother/const.py<gh_stars>1-10
"""Constants for Brother integration."""
from homeassistant.const import ATTR_ICON, PERCENTAGE
ATTR_BELT_UNIT_REMAINING_LIFE = "belt_unit_remaining_life"
ATTR_BLACK_DRUM_COUNTER = "black_drum_counter"
ATTR_BLACK_DRUM_REMAINING_LIFE = "black_drum_remaining_life"
ATTR_BLACK_DRUM_REMAINING_PAGES = "black_drum_remaining_pages"
ATTR_BLACK_INK_REMAINING = "black_ink_remaining"
ATTR_BLACK_TONER_REMAINING = "black_toner_remaining"
ATTR_BW_COUNTER = "b/w_counter"
ATTR_COLOR_COUNTER = "color_counter"
ATTR_CYAN_DRUM_COUNTER = "cyan_drum_counter"
ATTR_CYAN_DRUM_REMAINING_LIFE = "cyan_drum_remaining_life"
ATTR_CYAN_DRUM_REMAINING_PAGES = "cyan_drum_remaining_pages"
ATTR_CYAN_INK_REMAINING = "cyan_ink_remaining"
ATTR_CYAN_TONER_REMAINING = "cyan_toner_remaining"
ATTR_DRUM_COUNTER = "drum_counter"
ATTR_DRUM_REMAINING_LIFE = "drum_remaining_life"
ATTR_DRUM_REMAINING_PAGES = "drum_remaining_pages"
ATTR_DUPLEX_COUNTER = "duplex_unit_pages_counter"
ATTR_ENABLED = "enabled"
ATTR_FUSER_REMAINING_LIFE = "fuser_remaining_life"
ATTR_LABEL = "label"
ATTR_LASER_REMAINING_LIFE = "laser_remaining_life"
ATTR_MAGENTA_DRUM_COUNTER = "magenta_drum_counter"
ATTR_MAGENTA_DRUM_REMAINING_LIFE = "magenta_drum_remaining_life"
ATTR_MAGENTA_DRUM_REMAINING_PAGES = "magenta_drum_remaining_pages"
ATTR_MAGENTA_INK_REMAINING = "magenta_ink_remaining"
ATTR_MAGENTA_TONER_REMAINING = "magenta_toner_remaining"
ATTR_MANUFACTURER = "Brother"
ATTR_PAGE_COUNTER = "page_counter"
ATTR_PF_KIT_1_REMAINING_LIFE = "pf_kit_1_remaining_life"
ATTR_PF_KIT_MP_REMAINING_LIFE = "pf_kit_mp_remaining_life"
ATTR_STATUS = "status"
ATTR_UNIT = "unit"
ATTR_UPTIME = "uptime"
ATTR_YELLOW_DRUM_COUNTER = "yellow_drum_counter"
ATTR_YELLOW_DRUM_REMAINING_LIFE = "yellow_drum_remaining_life"
ATTR_YELLOW_DRUM_REMAINING_PAGES = "yellow_drum_remaining_pages"
ATTR_YELLOW_INK_REMAINING = "yellow_ink_remaining"
ATTR_YELLOW_TONER_REMAINING = "yellow_toner_remaining"
DATA_CONFIG_ENTRY = "config_entry"
DOMAIN = "brother"
UNIT_PAGES = "p"
PRINTER_TYPES = ["laser", "ink"]
SNMP = "snmp"
SENSOR_TYPES = {
ATTR_STATUS: {
ATTR_ICON: "mdi:printer",
ATTR_LABEL: ATTR_STATUS.title(),
ATTR_UNIT: None,
ATTR_ENABLED: True,
},
ATTR_PAGE_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_PAGE_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_BW_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_BW_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_COLOR_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_COLOR_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_DUPLEX_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_DUPLEX_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BLACK_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_BLACK_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_CYAN_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_CYAN_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_MAGENTA_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_MAGENTA_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_YELLOW_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_YELLOW_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BELT_UNIT_REMAINING_LIFE: {
ATTR_ICON: "mdi:current-ac",
ATTR_LABEL: ATTR_BELT_UNIT_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_FUSER_REMAINING_LIFE: {
ATTR_ICON: "mdi:water-outline",
ATTR_LABEL: ATTR_FUSER_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_LASER_REMAINING_LIFE: {
ATTR_ICON: "mdi:spotlight-beam",
ATTR_LABEL: ATTR_LASER_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_PF_KIT_1_REMAINING_LIFE: {
ATTR_ICON: "mdi:printer-3d",
ATTR_LABEL: ATTR_PF_KIT_1_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_PF_KIT_MP_REMAINING_LIFE: {
ATTR_ICON: "mdi:printer-3d",
ATTR_LABEL: ATTR_PF_KIT_MP_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BLACK_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_BLACK_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_CYAN_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_CYAN_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_MAGENTA_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_MAGENTA_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_YELLOW_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_YELLOW_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BLACK_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_BLACK_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_CYAN_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_CYAN_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_MAGENTA_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_MAGENTA_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_YELLOW_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_YELLOW_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_UPTIME: {
ATTR_ICON: None,
ATTR_LABEL: ATTR_UPTIME.title(),
ATTR_UNIT: None,
ATTR_ENABLED: False,
},
}
|
"""Constants for Brother integration."""
from homeassistant.const import ATTR_ICON, PERCENTAGE
ATTR_BELT_UNIT_REMAINING_LIFE = "belt_unit_remaining_life"
ATTR_BLACK_DRUM_COUNTER = "black_drum_counter"
ATTR_BLACK_DRUM_REMAINING_LIFE = "black_drum_remaining_life"
ATTR_BLACK_DRUM_REMAINING_PAGES = "black_drum_remaining_pages"
ATTR_BLACK_INK_REMAINING = "black_ink_remaining"
ATTR_BLACK_TONER_REMAINING = "black_toner_remaining"
ATTR_BW_COUNTER = "b/w_counter"
ATTR_COLOR_COUNTER = "color_counter"
ATTR_CYAN_DRUM_COUNTER = "cyan_drum_counter"
ATTR_CYAN_DRUM_REMAINING_LIFE = "cyan_drum_remaining_life"
ATTR_CYAN_DRUM_REMAINING_PAGES = "cyan_drum_remaining_pages"
ATTR_CYAN_INK_REMAINING = "cyan_ink_remaining"
ATTR_CYAN_TONER_REMAINING = "cyan_toner_remaining"
ATTR_DRUM_COUNTER = "drum_counter"
ATTR_DRUM_REMAINING_LIFE = "drum_remaining_life"
ATTR_DRUM_REMAINING_PAGES = "drum_remaining_pages"
ATTR_DUPLEX_COUNTER = "duplex_unit_pages_counter"
ATTR_ENABLED = "enabled"
ATTR_FUSER_REMAINING_LIFE = "fuser_remaining_life"
ATTR_LABEL = "label"
ATTR_LASER_REMAINING_LIFE = "laser_remaining_life"
ATTR_MAGENTA_DRUM_COUNTER = "magenta_drum_counter"
ATTR_MAGENTA_DRUM_REMAINING_LIFE = "magenta_drum_remaining_life"
ATTR_MAGENTA_DRUM_REMAINING_PAGES = "magenta_drum_remaining_pages"
ATTR_MAGENTA_INK_REMAINING = "magenta_ink_remaining"
ATTR_MAGENTA_TONER_REMAINING = "magenta_toner_remaining"
ATTR_MANUFACTURER = "Brother"
ATTR_PAGE_COUNTER = "page_counter"
ATTR_PF_KIT_1_REMAINING_LIFE = "pf_kit_1_remaining_life"
ATTR_PF_KIT_MP_REMAINING_LIFE = "pf_kit_mp_remaining_life"
ATTR_STATUS = "status"
ATTR_UNIT = "unit"
ATTR_UPTIME = "uptime"
ATTR_YELLOW_DRUM_COUNTER = "yellow_drum_counter"
ATTR_YELLOW_DRUM_REMAINING_LIFE = "yellow_drum_remaining_life"
ATTR_YELLOW_DRUM_REMAINING_PAGES = "yellow_drum_remaining_pages"
ATTR_YELLOW_INK_REMAINING = "yellow_ink_remaining"
ATTR_YELLOW_TONER_REMAINING = "yellow_toner_remaining"
DATA_CONFIG_ENTRY = "config_entry"
DOMAIN = "brother"
UNIT_PAGES = "p"
PRINTER_TYPES = ["laser", "ink"]
SNMP = "snmp"
SENSOR_TYPES = {
ATTR_STATUS: {
ATTR_ICON: "mdi:printer",
ATTR_LABEL: ATTR_STATUS.title(),
ATTR_UNIT: None,
ATTR_ENABLED: True,
},
ATTR_PAGE_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_PAGE_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_BW_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_BW_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_COLOR_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_COLOR_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_DUPLEX_COUNTER: {
ATTR_ICON: "mdi:file-document-outline",
ATTR_LABEL: ATTR_DUPLEX_COUNTER.replace("_", " ").title(),
ATTR_UNIT: UNIT_PAGES,
ATTR_ENABLED: True,
},
ATTR_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BLACK_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_BLACK_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_CYAN_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_CYAN_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_MAGENTA_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_MAGENTA_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_YELLOW_DRUM_REMAINING_LIFE: {
ATTR_ICON: "mdi:chart-donut",
ATTR_LABEL: ATTR_YELLOW_DRUM_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BELT_UNIT_REMAINING_LIFE: {
ATTR_ICON: "mdi:current-ac",
ATTR_LABEL: ATTR_BELT_UNIT_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_FUSER_REMAINING_LIFE: {
ATTR_ICON: "mdi:water-outline",
ATTR_LABEL: ATTR_FUSER_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_LASER_REMAINING_LIFE: {
ATTR_ICON: "mdi:spotlight-beam",
ATTR_LABEL: ATTR_LASER_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_PF_KIT_1_REMAINING_LIFE: {
ATTR_ICON: "mdi:printer-3d",
ATTR_LABEL: ATTR_PF_KIT_1_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_PF_KIT_MP_REMAINING_LIFE: {
ATTR_ICON: "mdi:printer-3d",
ATTR_LABEL: ATTR_PF_KIT_MP_REMAINING_LIFE.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BLACK_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_BLACK_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_CYAN_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_CYAN_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_MAGENTA_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_MAGENTA_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_YELLOW_TONER_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_YELLOW_TONER_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_BLACK_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_BLACK_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_CYAN_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_CYAN_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_MAGENTA_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_MAGENTA_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_YELLOW_INK_REMAINING: {
ATTR_ICON: "mdi:printer-3d-nozzle",
ATTR_LABEL: ATTR_YELLOW_INK_REMAINING.replace("_", " ").title(),
ATTR_UNIT: PERCENTAGE,
ATTR_ENABLED: True,
},
ATTR_UPTIME: {
ATTR_ICON: None,
ATTR_LABEL: ATTR_UPTIME.title(),
ATTR_UNIT: None,
ATTR_ENABLED: False,
},
}
|
en
| 0.754871
|
Constants for Brother integration.
| 1.361719
| 1
|
test/integration/test_connectors.py
|
cool-RR/py2neo
| 0
|
6626852
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2011-2020, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from collections import deque
from pytest import mark, skip
from py2neo.data import Record
from py2neo.database import Cursor, CypherStats, CypherPlan
@mark.skip
def test_bolt_connection_pool_usage_for_autocommit(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
cursor = connector.auto_run(None, "RETURN 1")
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
cursor.summary()
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
@mark.skip
def test_bolt_connection_reuse_for_autocommit(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
cursor = connector.auto_run(None, "RETURN 1")
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
cursor.summary()
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
cursor = connector.auto_run(None, "RETURN 1")
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 1
cursor.summary()
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
@mark.skip
def test_bolt_connection_pool_usage_for_begin_commit(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
tx = connector.begin()
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
connector.commit(tx)
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
@mark.skip
def test_bolt_connection_pool_usage_for_begin_rollback(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
tx = connector.begin()
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
connector.rollback(tx)
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
def test_keys(connector):
cursor = Cursor(connector.auto_run(None, "RETURN 'Alice' AS name, 33 AS age"))
expected = ["name", "age"]
actual = cursor.keys()
assert expected == actual
def test_records(connector):
cursor = Cursor(connector.auto_run(None, "UNWIND range(1, $x) AS n RETURN n, n * n AS n_sq", {"x": 3}))
expected = deque([(1, 1), (2, 4), (3, 9)])
for actual_record in cursor:
expected_record = Record(zip(["n", "n_sq"], expected.popleft()))
assert expected_record == actual_record
def test_stats(connector):
cursor = Cursor(connector.auto_run(None, "CREATE ()", {}))
expected = CypherStats(nodes_created=1)
actual = cursor.stats()
assert expected == actual
# def test_explain_plan(connector, neo4j_minor_version):
# cursor = Cursor(connector.run("EXPLAIN RETURN $x", {"x": 1}))
# expected = CypherPlan(
# operator_type='ProduceResults',
# identifiers=['$x'],
# children=[
# CypherPlan(
# operator_type='Projection',
# identifiers=['$x'],
# children=[],
# args={
# 'estimated_rows': 1.0,
# 'expressions': '{$x : $x}',
# },
# ),
# ],
# args={
# 'estimated_rows': 1.0,
# 'planner': 'COST',
# 'planner_impl': 'IDP',
# 'planner_version': neo4j_minor_version,
# 'runtime': 'COMPILED',
# 'runtime_impl': 'COMPILED',
# 'runtime_version': neo4j_minor_version,
# 'version': 'CYPHER %s' % neo4j_minor_version,
# },
# )
# actual = cursor.plan()
# assert expected == actual
# def test_profile_plan(connector, neo4j_version):
# cursor = Cursor(connector.run("PROFILE RETURN $x", {"x": 1}))
# actual = cursor.plan()
# expected = CypherPlan(
# operator_type='ProduceResults',
# identifiers=['$x'],
# children=[
# CypherPlan(
# operator_type='Projection',
# identifiers=['$x'],
# children=[],
# args={
# 'db_hits': 0,
# 'estimated_rows': 1.0,
# 'expressions': '{$x : $x}',
# 'page_cache_hit_ratio': 0.0,
# 'page_cache_hits': 0,
# 'page_cache_misses': 0,
# 'rows': 1,
# 'time': actual.children[0].args["time"],
# },
# ),
# ],
# args={
# 'db_hits': 0,
# 'estimated_rows': 1.0,
# 'page_cache_hit_ratio': 0.0,
# 'page_cache_hits': 0,
# 'page_cache_misses': 0,
# 'planner': 'COST',
# 'planner_impl': 'IDP',
# 'planner_version': neo4j_version,
# 'rows': 1,
# 'runtime': 'COMPILED',
# 'runtime_impl': 'COMPILED',
# 'runtime_version': neo4j_version,
# 'time': actual.args["time"],
# 'version': 'CYPHER %s' % neo4j_version,
# },
# )
# assert expected == actual
# def skip_if_no_multidb_support(graph):
# if graph.service.kernel_version < (4, 0):
# skip("MultiDB tests are only valid for Neo4j 4.0+")
#
#
# def test_db_extra(graph, connector):
# skip_if_no_multidb_support(graph)
# cursor = Cursor(connector.run("RETURN 1", {}, db="system"))
# expected = CypherStats(nodes_created=1)
# actual = cursor.stats()
# assert expected == actual
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2011-2020, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from collections import deque
from pytest import mark, skip
from py2neo.data import Record
from py2neo.database import Cursor, CypherStats, CypherPlan
@mark.skip
def test_bolt_connection_pool_usage_for_autocommit(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
cursor = connector.auto_run(None, "RETURN 1")
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
cursor.summary()
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
@mark.skip
def test_bolt_connection_reuse_for_autocommit(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
cursor = connector.auto_run(None, "RETURN 1")
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
cursor.summary()
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
cursor = connector.auto_run(None, "RETURN 1")
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 1
cursor.summary()
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
@mark.skip
def test_bolt_connection_pool_usage_for_begin_commit(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
tx = connector.begin()
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
connector.commit(tx)
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
@mark.skip
def test_bolt_connection_pool_usage_for_begin_rollback(connector):
if "bolt" not in connector.scheme:
skip("Bolt tests are only valid for Bolt connectors")
pool = connector.pool
address = connector.connection_data["host"], connector.connection_data["port"]
n = len(pool.connections)
assert pool.in_use_connection_count(address) == 0
tx = connector.begin()
assert 1 <= len(pool.connections) <= n + 1
assert pool.in_use_connection_count(address) == 1
n = len(pool.connections)
connector.rollback(tx)
assert len(pool.connections) == n
assert pool.in_use_connection_count(address) == 0
def test_keys(connector):
cursor = Cursor(connector.auto_run(None, "RETURN 'Alice' AS name, 33 AS age"))
expected = ["name", "age"]
actual = cursor.keys()
assert expected == actual
def test_records(connector):
cursor = Cursor(connector.auto_run(None, "UNWIND range(1, $x) AS n RETURN n, n * n AS n_sq", {"x": 3}))
expected = deque([(1, 1), (2, 4), (3, 9)])
for actual_record in cursor:
expected_record = Record(zip(["n", "n_sq"], expected.popleft()))
assert expected_record == actual_record
def test_stats(connector):
cursor = Cursor(connector.auto_run(None, "CREATE ()", {}))
expected = CypherStats(nodes_created=1)
actual = cursor.stats()
assert expected == actual
# def test_explain_plan(connector, neo4j_minor_version):
# cursor = Cursor(connector.run("EXPLAIN RETURN $x", {"x": 1}))
# expected = CypherPlan(
# operator_type='ProduceResults',
# identifiers=['$x'],
# children=[
# CypherPlan(
# operator_type='Projection',
# identifiers=['$x'],
# children=[],
# args={
# 'estimated_rows': 1.0,
# 'expressions': '{$x : $x}',
# },
# ),
# ],
# args={
# 'estimated_rows': 1.0,
# 'planner': 'COST',
# 'planner_impl': 'IDP',
# 'planner_version': neo4j_minor_version,
# 'runtime': 'COMPILED',
# 'runtime_impl': 'COMPILED',
# 'runtime_version': neo4j_minor_version,
# 'version': 'CYPHER %s' % neo4j_minor_version,
# },
# )
# actual = cursor.plan()
# assert expected == actual
# def test_profile_plan(connector, neo4j_version):
# cursor = Cursor(connector.run("PROFILE RETURN $x", {"x": 1}))
# actual = cursor.plan()
# expected = CypherPlan(
# operator_type='ProduceResults',
# identifiers=['$x'],
# children=[
# CypherPlan(
# operator_type='Projection',
# identifiers=['$x'],
# children=[],
# args={
# 'db_hits': 0,
# 'estimated_rows': 1.0,
# 'expressions': '{$x : $x}',
# 'page_cache_hit_ratio': 0.0,
# 'page_cache_hits': 0,
# 'page_cache_misses': 0,
# 'rows': 1,
# 'time': actual.children[0].args["time"],
# },
# ),
# ],
# args={
# 'db_hits': 0,
# 'estimated_rows': 1.0,
# 'page_cache_hit_ratio': 0.0,
# 'page_cache_hits': 0,
# 'page_cache_misses': 0,
# 'planner': 'COST',
# 'planner_impl': 'IDP',
# 'planner_version': neo4j_version,
# 'rows': 1,
# 'runtime': 'COMPILED',
# 'runtime_impl': 'COMPILED',
# 'runtime_version': neo4j_version,
# 'time': actual.args["time"],
# 'version': 'CYPHER %s' % neo4j_version,
# },
# )
# assert expected == actual
# def skip_if_no_multidb_support(graph):
# if graph.service.kernel_version < (4, 0):
# skip("MultiDB tests are only valid for Neo4j 4.0+")
#
#
# def test_db_extra(graph, connector):
# skip_if_no_multidb_support(graph)
# cursor = Cursor(connector.run("RETURN 1", {}, db="system"))
# expected = CypherStats(nodes_created=1)
# actual = cursor.stats()
# assert expected == actual
|
en
| 0.368014
|
#!/usr/bin/env python # -*- encoding: utf-8 -*- # Copyright 2011-2020, <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # def test_explain_plan(connector, neo4j_minor_version): # cursor = Cursor(connector.run("EXPLAIN RETURN $x", {"x": 1})) # expected = CypherPlan( # operator_type='ProduceResults', # identifiers=['$x'], # children=[ # CypherPlan( # operator_type='Projection', # identifiers=['$x'], # children=[], # args={ # 'estimated_rows': 1.0, # 'expressions': '{$x : $x}', # }, # ), # ], # args={ # 'estimated_rows': 1.0, # 'planner': 'COST', # 'planner_impl': 'IDP', # 'planner_version': neo4j_minor_version, # 'runtime': 'COMPILED', # 'runtime_impl': 'COMPILED', # 'runtime_version': neo4j_minor_version, # 'version': 'CYPHER %s' % neo4j_minor_version, # }, # ) # actual = cursor.plan() # assert expected == actual # def test_profile_plan(connector, neo4j_version): # cursor = Cursor(connector.run("PROFILE RETURN $x", {"x": 1})) # actual = cursor.plan() # expected = CypherPlan( # operator_type='ProduceResults', # identifiers=['$x'], # children=[ # CypherPlan( # operator_type='Projection', # identifiers=['$x'], # children=[], # args={ # 'db_hits': 0, # 'estimated_rows': 1.0, # 'expressions': '{$x : $x}', # 'page_cache_hit_ratio': 0.0, # 'page_cache_hits': 0, # 'page_cache_misses': 0, # 'rows': 1, # 'time': actual.children[0].args["time"], # }, # ), # ], # args={ # 'db_hits': 0, # 'estimated_rows': 1.0, # 'page_cache_hit_ratio': 0.0, # 'page_cache_hits': 0, # 'page_cache_misses': 0, # 'planner': 'COST', # 'planner_impl': 'IDP', # 'planner_version': neo4j_version, # 'rows': 1, # 'runtime': 'COMPILED', # 'runtime_impl': 'COMPILED', # 'runtime_version': neo4j_version, # 'time': actual.args["time"], # 'version': 'CYPHER %s' % neo4j_version, # }, # ) # assert expected == actual # def skip_if_no_multidb_support(graph): # if graph.service.kernel_version < (4, 0): # skip("MultiDB tests are only valid for Neo4j 4.0+") # # # def test_db_extra(graph, connector): # skip_if_no_multidb_support(graph) # cursor = Cursor(connector.run("RETURN 1", {}, db="system")) # expected = CypherStats(nodes_created=1) # actual = cursor.stats() # assert expected == actual
| 2.223907
| 2
|
chapter17/0_starting_point/plot_imu_fusion.py
|
dannystaple/Learn-Robotics-Programming-Second-Edition
| 19
|
6626853
|
<reponame>dannystaple/Learn-Robotics-Programming-Second-Edition<filename>chapter17/0_starting_point/plot_imu_fusion.py<gh_stars>10-100
import vpython as vp
from robot_imu import RobotImu, ImuFusion
from delta_timer import DeltaTimer
import imu_settings
imu = RobotImu(gyro_offsets=imu_settings.gyro_offsets,
mag_offsets=imu_settings.mag_offsets)
fusion = ImuFusion(imu)
vp.graph(xmin=0, xmax=60, scroll=True)
graph_pitch = vp.gcurve(color=vp.color.red)
graph_roll = vp.gcurve(color=vp.color.green)
graph_yaw = vp.gcurve(color=vp.color.blue)
timer = DeltaTimer()
while True:
vp.rate(100)
dt, elapsed = timer.update()
fusion.update(dt)
graph_pitch.plot(elapsed, fusion.pitch)
graph_roll.plot(elapsed, fusion.roll)
graph_yaw.plot(elapsed, fusion.yaw)
|
import vpython as vp
from robot_imu import RobotImu, ImuFusion
from delta_timer import DeltaTimer
import imu_settings
imu = RobotImu(gyro_offsets=imu_settings.gyro_offsets,
mag_offsets=imu_settings.mag_offsets)
fusion = ImuFusion(imu)
vp.graph(xmin=0, xmax=60, scroll=True)
graph_pitch = vp.gcurve(color=vp.color.red)
graph_roll = vp.gcurve(color=vp.color.green)
graph_yaw = vp.gcurve(color=vp.color.blue)
timer = DeltaTimer()
while True:
vp.rate(100)
dt, elapsed = timer.update()
fusion.update(dt)
graph_pitch.plot(elapsed, fusion.pitch)
graph_roll.plot(elapsed, fusion.roll)
graph_yaw.plot(elapsed, fusion.yaw)
|
none
| 1
| 2.406602
| 2
|
|
src/image_loader.py
|
BastiHz/epicycles
| 1
|
6626854
|
<filename>src/image_loader.py
# TODO: Load an image file. Ideally the path should be a single pixel wide
# and must be of a single color. Construct the point list either by using
# nearest neighbor search or try finding the shortest path using
# a travelling salesman approach.
# Return the sorted list of coordinates.
|
<filename>src/image_loader.py
# TODO: Load an image file. Ideally the path should be a single pixel wide
# and must be of a single color. Construct the point list either by using
# nearest neighbor search or try finding the shortest path using
# a travelling salesman approach.
# Return the sorted list of coordinates.
|
en
| 0.813379
|
# TODO: Load an image file. Ideally the path should be a single pixel wide # and must be of a single color. Construct the point list either by using # nearest neighbor search or try finding the shortest path using # a travelling salesman approach. # Return the sorted list of coordinates.
| 3.156735
| 3
|
src/visual_encoders.py
|
lukoshkin/text2video
| 20
|
6626855
|
<filename>src/visual_encoders.py
import torch
from torch import nn
from torch.nn.utils import spectral_norm
from blocks import DBlock
from convgru import ConvGRU
from functools import partial
def SN(sn):
return spectral_norm if sn else lambda x: x
class VideoEncoder(nn.Module):
def __init__(self, in_colors=3, base_width=32, bn=True, sn=False):
super().__init__()
block2d = partial(DBlock, '2d', bn=bn, sn=sn)
block3d = partial(DBlock, '3d', bn=bn, sn=sn)
self.downsampler1 = nn.Sequential(
SN(sn)(nn.Conv3d(in_colors, base_width, 1)),
block3d(base_width, base_width*2, 2),
block3d(base_width*2, base_width*4, (1,2,2)))
self.cgru = ConvGRU(
base_width*4, base_width*4, 3, spectral_norm=sn)
self.downsampler2 = nn.Sequential(
block2d(base_width*4, base_width*8, 2),
block2d(base_width*8, base_width*16, 2),
block2d(base_width*16, base_width*32, 2))
def forward(self, video):
H = self.downsampler1(video)
_, last = self.cgru(H)
H = self.downsampler2(last)
return H.view(H.size(0), -1)
class ProjectionVideoDiscriminator(VideoEncoder):
def __init__(self, cond_size, in_colors=3, base_width=32, logits=True):
super().__init__(in_colors, base_width, bn=False, sn=True)
self.proj = nn.Sequential(
SN(True)(nn.Linear(cond_size, base_width*32)),
nn.LeakyReLU(.2, inplace=True))
self.pool = SN(True)(nn.Linear(base_width*32, 1))
if logits: self.activation = nn.Sequential()
else: self.activation = torch.sigmoid
def forward(self, video, embedding):
E = self.proj(embedding)
H = super().forward(video)
out = self.pool(H).squeeze(1)
out += torch.einsum('ij,ij->i', E, H)
return self.activation(out)
class ImageEncoder(nn.Module):
def __init__(self, in_colors=3, base_width=32, bn=True, sn=False):
super().__init__()
block2d = partial(DBlock, '2d', bn=bn, sn=sn)
self.downsampler = nn.Sequential(
SN(sn)(nn.Conv2d(in_colors, base_width, 1)),
block2d(base_width, base_width*2, 2),
block2d(base_width*2, base_width*4, 2),
block2d(base_width*4, base_width*8, 2),
block2d(base_width*8, base_width*16, 2),
block2d(base_width*16, base_width*32, 2))
def forward(self, images):
"""
images
"""
k = images.size(1)
images = torch.flatten(images, 0, 1)
H = self.downsampler(images)
# images.shape (N, k, C, H, W)
# images.shape (N*k, C, H, W)
# H.shape (N*k, base_width*32, 1, 1)
# output.shape (N, k, base_width*32)
return H.view(H.size(0)//k, k, -1)
class ProjectionImageDiscriminator(ImageEncoder):
def __init__(self, cond_size, in_colors=3, base_width=32, logits=True):
super().__init__(in_colors, base_width, bn=False, sn=True)
self.proj = nn.Sequential(
SN(True)(nn.Linear(cond_size, base_width*32)),
nn.LeakyReLU(.2, inplace=True))
self.pool = SN(True)(nn.Linear(base_width*32, 1))
if logits: self.activation = nn.Sequential()
else: self.activation = torch.sigmoid
def forward(self, video, embedding):
E = self.proj(embedding)
H = super().forward(video)
out = self.pool(H).sum([1, 2])
out += torch.einsum('ij,ikj->i', E, H)
return self.activation(out)
|
<filename>src/visual_encoders.py
import torch
from torch import nn
from torch.nn.utils import spectral_norm
from blocks import DBlock
from convgru import ConvGRU
from functools import partial
def SN(sn):
return spectral_norm if sn else lambda x: x
class VideoEncoder(nn.Module):
def __init__(self, in_colors=3, base_width=32, bn=True, sn=False):
super().__init__()
block2d = partial(DBlock, '2d', bn=bn, sn=sn)
block3d = partial(DBlock, '3d', bn=bn, sn=sn)
self.downsampler1 = nn.Sequential(
SN(sn)(nn.Conv3d(in_colors, base_width, 1)),
block3d(base_width, base_width*2, 2),
block3d(base_width*2, base_width*4, (1,2,2)))
self.cgru = ConvGRU(
base_width*4, base_width*4, 3, spectral_norm=sn)
self.downsampler2 = nn.Sequential(
block2d(base_width*4, base_width*8, 2),
block2d(base_width*8, base_width*16, 2),
block2d(base_width*16, base_width*32, 2))
def forward(self, video):
H = self.downsampler1(video)
_, last = self.cgru(H)
H = self.downsampler2(last)
return H.view(H.size(0), -1)
class ProjectionVideoDiscriminator(VideoEncoder):
def __init__(self, cond_size, in_colors=3, base_width=32, logits=True):
super().__init__(in_colors, base_width, bn=False, sn=True)
self.proj = nn.Sequential(
SN(True)(nn.Linear(cond_size, base_width*32)),
nn.LeakyReLU(.2, inplace=True))
self.pool = SN(True)(nn.Linear(base_width*32, 1))
if logits: self.activation = nn.Sequential()
else: self.activation = torch.sigmoid
def forward(self, video, embedding):
E = self.proj(embedding)
H = super().forward(video)
out = self.pool(H).squeeze(1)
out += torch.einsum('ij,ij->i', E, H)
return self.activation(out)
class ImageEncoder(nn.Module):
def __init__(self, in_colors=3, base_width=32, bn=True, sn=False):
super().__init__()
block2d = partial(DBlock, '2d', bn=bn, sn=sn)
self.downsampler = nn.Sequential(
SN(sn)(nn.Conv2d(in_colors, base_width, 1)),
block2d(base_width, base_width*2, 2),
block2d(base_width*2, base_width*4, 2),
block2d(base_width*4, base_width*8, 2),
block2d(base_width*8, base_width*16, 2),
block2d(base_width*16, base_width*32, 2))
def forward(self, images):
"""
images
"""
k = images.size(1)
images = torch.flatten(images, 0, 1)
H = self.downsampler(images)
# images.shape (N, k, C, H, W)
# images.shape (N*k, C, H, W)
# H.shape (N*k, base_width*32, 1, 1)
# output.shape (N, k, base_width*32)
return H.view(H.size(0)//k, k, -1)
class ProjectionImageDiscriminator(ImageEncoder):
def __init__(self, cond_size, in_colors=3, base_width=32, logits=True):
super().__init__(in_colors, base_width, bn=False, sn=True)
self.proj = nn.Sequential(
SN(True)(nn.Linear(cond_size, base_width*32)),
nn.LeakyReLU(.2, inplace=True))
self.pool = SN(True)(nn.Linear(base_width*32, 1))
if logits: self.activation = nn.Sequential()
else: self.activation = torch.sigmoid
def forward(self, video, embedding):
E = self.proj(embedding)
H = super().forward(video)
out = self.pool(H).sum([1, 2])
out += torch.einsum('ij,ikj->i', E, H)
return self.activation(out)
|
en
| 0.286377
|
images # images.shape (N, k, C, H, W) # images.shape (N*k, C, H, W) # H.shape (N*k, base_width*32, 1, 1) # output.shape (N, k, base_width*32)
| 2.253807
| 2
|
Scripts/articletts.py
|
SainathPoojary/PyAutomate
| 14
|
6626856
|
from os import system
from time import sleep
import requests
from bs4 import BeautifulSoup
from gtts import gTTS
from playsound import playsound
another_article = ""
while another_article != "n":
system("clear")
url = input("Enter article url: ")
page = requests.get(url).text
soup = BeautifulSoup(page, features="lxml")
title = soup.find("h1").get_text()
p_tags = soup.find_all("p")
p_text = [tag.get_text().strip() for tag in p_tags]
sentence_list = [sentence for sentence in p_text if not "\n" in sentence]
sentence_list_final = [
sentence for sentence in sentence_list if "." in sentence
]
article_text = " ".join(sentence_list_final)
tts = gTTS(article_text, lang="en")
tts.save("article.mp3")
playsound("article.mp3")
another_article = input("Wold you like to play another article? [y/n]: ")
|
from os import system
from time import sleep
import requests
from bs4 import BeautifulSoup
from gtts import gTTS
from playsound import playsound
another_article = ""
while another_article != "n":
system("clear")
url = input("Enter article url: ")
page = requests.get(url).text
soup = BeautifulSoup(page, features="lxml")
title = soup.find("h1").get_text()
p_tags = soup.find_all("p")
p_text = [tag.get_text().strip() for tag in p_tags]
sentence_list = [sentence for sentence in p_text if not "\n" in sentence]
sentence_list_final = [
sentence for sentence in sentence_list if "." in sentence
]
article_text = " ".join(sentence_list_final)
tts = gTTS(article_text, lang="en")
tts.save("article.mp3")
playsound("article.mp3")
another_article = input("Wold you like to play another article? [y/n]: ")
|
none
| 1
| 3.123392
| 3
|
|
setup.py
|
vubon/pyfi
| 2
|
6626857
|
<reponame>vubon/pyfi<filename>setup.py
import os
import re
from setuptools import setup, find_packages
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
def get_version(package: str) -> str:
with open(os.path.join(BASE_DIR, f'{package}/__version__.py')) as version:
version = version.readline()
match = re.search("__version__ = ['\"]([^'\"]+)['\"]", version)
assert match is not None
return match.group(1)
def get_log_description():
with open('README.md') as readme:
with open('CHANGELOG.md') as changelog:
return readme.read() + "\n\n" + changelog.read()
setup(
name='PyFi',
version=get_version('PyFi'),
author='<NAME>',
author_email='<EMAIL>',
description='This package will help to send file into mobile or other computer',
url='https://github.com/vubon/pyfi',
project_urls={
"Documentation": "https://github.com/vubon/pyfi/blob/master/docs/GUIDE.md"
},
packages=find_packages(),
long_description=get_log_description(),
long_description_content_type="text/markdown",
license='MIT',
platforms='Python',
install_requires=[
'qrcode',
'psutil',
'pyinstaller',
],
classifiers=[
'Development Status :: 1 - Alpha', # 2 - Pre-Alpha, 3 - Alpha, 4 - Beta, 5 - Production/Stable
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Customer Service",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
"Natural Language :: English",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: FileShare",
"Topic :: System :: Systems Administration",
"Topic :: Utilities"
]
)
|
import os
import re
from setuptools import setup, find_packages
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
def get_version(package: str) -> str:
with open(os.path.join(BASE_DIR, f'{package}/__version__.py')) as version:
version = version.readline()
match = re.search("__version__ = ['\"]([^'\"]+)['\"]", version)
assert match is not None
return match.group(1)
def get_log_description():
with open('README.md') as readme:
with open('CHANGELOG.md') as changelog:
return readme.read() + "\n\n" + changelog.read()
setup(
name='PyFi',
version=get_version('PyFi'),
author='<NAME>',
author_email='<EMAIL>',
description='This package will help to send file into mobile or other computer',
url='https://github.com/vubon/pyfi',
project_urls={
"Documentation": "https://github.com/vubon/pyfi/blob/master/docs/GUIDE.md"
},
packages=find_packages(),
long_description=get_log_description(),
long_description_content_type="text/markdown",
license='MIT',
platforms='Python',
install_requires=[
'qrcode',
'psutil',
'pyinstaller',
],
classifiers=[
'Development Status :: 1 - Alpha', # 2 - Pre-Alpha, 3 - Alpha, 4 - Beta, 5 - Production/Stable
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Customer Service",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
"Natural Language :: English",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: FileShare",
"Topic :: System :: Systems Administration",
"Topic :: Utilities"
]
)
|
en
| 0.607496
|
# 2 - Pre-Alpha, 3 - Alpha, 4 - Beta, 5 - Production/Stable
| 1.934901
| 2
|
experiments/BBVI/planar_robot.py
|
OlegArenz/tensorflow_VI
| 0
|
6626858
|
<filename>experiments/BBVI/planar_robot.py
from experiments.target_lnpdfs.Planar_Robot import make_four_goal, make_single_goal
from experiments.BBVI.experiment_script import sample, construct_initial_mixture
import os
import numpy as np
if __name__ == "__main__":
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
num_dimensions = 10
conf_likelihood_var = 4e-2 * np.ones(num_dimensions)
conf_likelihood_var[0] = 1
four_goal = False
if four_goal:
target_fn = make_four_goal
model = construct_initial_mixture(num_dimensions=num_dimensions, num_initial_components=10, prior_scale=np.sqrt(conf_likelihood_var))
else:
target_fn = make_single_goal
model = construct_initial_mixture(num_dimensions=num_dimensions, num_initial_components=1, prior_scale=np.sqrt(conf_likelihood_var))
sample(target_dist_maker=target_fn, path="/tmp/bbvi_gvm/planar_1/samps1000/lr_8e-4/6",
model=model, max_fevals=int(100000000), samples_per_batch=1000, learning_rate=8e-4, do_plots=True)
print("done")
|
<filename>experiments/BBVI/planar_robot.py
from experiments.target_lnpdfs.Planar_Robot import make_four_goal, make_single_goal
from experiments.BBVI.experiment_script import sample, construct_initial_mixture
import os
import numpy as np
if __name__ == "__main__":
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
num_dimensions = 10
conf_likelihood_var = 4e-2 * np.ones(num_dimensions)
conf_likelihood_var[0] = 1
four_goal = False
if four_goal:
target_fn = make_four_goal
model = construct_initial_mixture(num_dimensions=num_dimensions, num_initial_components=10, prior_scale=np.sqrt(conf_likelihood_var))
else:
target_fn = make_single_goal
model = construct_initial_mixture(num_dimensions=num_dimensions, num_initial_components=1, prior_scale=np.sqrt(conf_likelihood_var))
sample(target_dist_maker=target_fn, path="/tmp/bbvi_gvm/planar_1/samps1000/lr_8e-4/6",
model=model, max_fevals=int(100000000), samples_per_batch=1000, learning_rate=8e-4, do_plots=True)
print("done")
|
none
| 1
| 2.066332
| 2
|
|
tests/test_compose_array.py
|
dgketchum/IrrMapper
| 6
|
6626859
|
<filename>tests/test_compose_array.py
# ===============================================================================
# Copyright 2018 dgketchum
#
# Licensed under the Apache License, Version 2.(the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import os
import shutil
import unittest
from fiona import open as fopen
from pixel_classification.compose_array import PixelTrainingArray
from tests.build_extract_test_data import make_test_dataset
from pixel_classification.runspec import MontanaTest
home = os.path.expanduser('~')
class TestPointExtract(unittest.TestCase):
def setUp(self):
self.origin = os.path.join(os.path.dirname(__file__), 'data', 'pixel_extract_test')
self.satellite = 'LC8'
self.directory = os.path.join(os.path.dirname(__file__), 'data', 'pixel_extract_test')
def test_sample_points(self):
montana = MontanaTest()
p = PixelTrainingArray(root=self.directory, geography=montana, instances=10,
overwrite_array=True)
p.extract_sample(save_points=True)
with fopen(p.shapefile_path, 'r') as src:
points = [x for x in src]
self.assertGreater(len(points), 40)
self.assertGreater(p.extracted_points.shape[0], 40)
def test_instantiate_w_pkl(self):
p = PixelTrainingArray()
self.assertIsInstance(p, PixelTrainingArray)
if __name__ == '__main__':
unittest.main()
# ===============================================================================
|
<filename>tests/test_compose_array.py
# ===============================================================================
# Copyright 2018 dgketchum
#
# Licensed under the Apache License, Version 2.(the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import os
import shutil
import unittest
from fiona import open as fopen
from pixel_classification.compose_array import PixelTrainingArray
from tests.build_extract_test_data import make_test_dataset
from pixel_classification.runspec import MontanaTest
home = os.path.expanduser('~')
class TestPointExtract(unittest.TestCase):
def setUp(self):
self.origin = os.path.join(os.path.dirname(__file__), 'data', 'pixel_extract_test')
self.satellite = 'LC8'
self.directory = os.path.join(os.path.dirname(__file__), 'data', 'pixel_extract_test')
def test_sample_points(self):
montana = MontanaTest()
p = PixelTrainingArray(root=self.directory, geography=montana, instances=10,
overwrite_array=True)
p.extract_sample(save_points=True)
with fopen(p.shapefile_path, 'r') as src:
points = [x for x in src]
self.assertGreater(len(points), 40)
self.assertGreater(p.extracted_points.shape[0], 40)
def test_instantiate_w_pkl(self):
p = PixelTrainingArray()
self.assertIsInstance(p, PixelTrainingArray)
if __name__ == '__main__':
unittest.main()
# ===============================================================================
|
en
| 0.72253
|
# =============================================================================== # Copyright 2018 dgketchum # # Licensed under the Apache License, Version 2.(the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ===============================================================================
| 2.026507
| 2
|
jsk_2015_05_baxter_apc/node_scripts/sift_matcher_for_imgs.py
|
pazeshun/jsk_apc
| 0
|
6626860
|
#!/usr/bin/env python
#
"""
This script is to visualize how match sift features are matched between
an image and camera frame.
Usage
-----
$ roslaunch roseus_tutorials usb-camera.launch
$ roslaunch jsk_2015_05_baxter_apc sift_matcher_for_imgs.launch
$ rosrun image_view image_view image:=/sift_matcher_for_imgs/output
"""
import os
import cv2
import numpy as np
import rospy
import cv_bridge
from sensor_msgs.msg import Image
from posedetection_msgs.srv import Feature0DDetect
from sift_matcher import SiftMatcher, imgsift_client
from common import load_img
class ImageSubscriber(object):
def __init__(self, image_topic):
rospy.Subscriber(image_topic, Image, self._cb_img)
rospy.loginfo('Waiting for: {topic}'.format(topic=image_topic))
rospy.wait_for_message(image_topic, Image)
rospy.loginfo('Found: {topic}'.format(topic=image_topic))
def _cb_img(self, msg):
"""Callback function of Subscribers to listen Image"""
bridge = cv_bridge.CvBridge()
self.stamp = msg.header.stamp
self.img = bridge.imgmsg_to_cv2(msg)
class SiftMatcherOneImg(SiftMatcher):
"""Compare two images.
Usually camera image (input) with static image (reference)"""
def __init__(self):
super(SiftMatcherOneImg, self).__init__()
self.img_sub = ImageSubscriber('~input')
self.reference_sub = ImageSubscriber('~input/reference')
self.pub = rospy.Publisher('~output', Image, queue_size=1)
def match(self):
input_stamp, input_img = self.img_sub.stamp, self.img_sub.img
input_features = self.query_features
reference_img = self.reference_sub.img
reference_features = imgsift_client(reference_img)
matches = self.find_match(input_features.descriptors,
reference_features.descriptors)
rospy.loginfo('matches: {}'.format(len(matches)))
# prepare output img
matched_img = drawMatches(input_img, input_features.positions,
reference_img, reference_features.positions,
matches)
cv2.putText(matched_img, 'matches: {}'.format(len(matches)),
(5, 25), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (255, 255, 255))
self.publish_img(stamp=input_stamp, img=matched_img)
def publish_img(self, stamp, img, encoding='bgr8'):
bridge = cv_bridge.CvBridge()
img_msg = bridge.cv2_to_imgmsg(img, encoding=encoding)
img_msg.header.stamp = stamp
self.pub.publish(img_msg)
def drawMatches(query_img, query_pos, train_img, train_pos, matches):
"""Draw match points for two images"""
query_img = cv2.cvtColor(query_img, cv2.COLOR_RGB2GRAY)
train_img = cv2.cvtColor(train_img, cv2.COLOR_RGB2GRAY)
query_pos = np.array(query_pos).reshape((-1, 2))
train_pos = np.array(train_pos).reshape((-1, 2))
n_row1, n_col1 = query_img.shape[:2]
n_row2, n_col2 = train_img.shape[:2]
# parepare output img
img_out = np.zeros((max([n_row1,n_row2]), n_col1+n_col2, 3), dtype='uint8')
img_out[:n_row1, :n_col1, :] = np.dstack(3*[query_img])
img_out[:n_row2, n_col1:n_col1+n_col2, :] = np.dstack(3*[train_img])
for mat in matches:
# draw and connect match points
x1, y1 = query_pos[mat.queryIdx]
x2, y2 = train_pos[mat.trainIdx]
pt1 = (int(x1), int(y1))
pt2 = (int(x2)+n_col1, int(y2))
cv2.circle(img_out, pt1, 4, (255, 0, 0), 1)
cv2.circle(img_out, pt2, 4, (255, 0, 0), 1)
cv2.line(img_out, pt1, pt2, (255, 0, 0), 1)
return img_out
def main():
rospy.init_node('sift_matcher_oneimg')
rate = rospy.Rate(10)
while not rospy.is_shutdown():
matcher = SiftMatcherOneImg()
matcher.match()
rate.sleep()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#
"""
This script is to visualize how match sift features are matched between
an image and camera frame.
Usage
-----
$ roslaunch roseus_tutorials usb-camera.launch
$ roslaunch jsk_2015_05_baxter_apc sift_matcher_for_imgs.launch
$ rosrun image_view image_view image:=/sift_matcher_for_imgs/output
"""
import os
import cv2
import numpy as np
import rospy
import cv_bridge
from sensor_msgs.msg import Image
from posedetection_msgs.srv import Feature0DDetect
from sift_matcher import SiftMatcher, imgsift_client
from common import load_img
class ImageSubscriber(object):
def __init__(self, image_topic):
rospy.Subscriber(image_topic, Image, self._cb_img)
rospy.loginfo('Waiting for: {topic}'.format(topic=image_topic))
rospy.wait_for_message(image_topic, Image)
rospy.loginfo('Found: {topic}'.format(topic=image_topic))
def _cb_img(self, msg):
"""Callback function of Subscribers to listen Image"""
bridge = cv_bridge.CvBridge()
self.stamp = msg.header.stamp
self.img = bridge.imgmsg_to_cv2(msg)
class SiftMatcherOneImg(SiftMatcher):
"""Compare two images.
Usually camera image (input) with static image (reference)"""
def __init__(self):
super(SiftMatcherOneImg, self).__init__()
self.img_sub = ImageSubscriber('~input')
self.reference_sub = ImageSubscriber('~input/reference')
self.pub = rospy.Publisher('~output', Image, queue_size=1)
def match(self):
input_stamp, input_img = self.img_sub.stamp, self.img_sub.img
input_features = self.query_features
reference_img = self.reference_sub.img
reference_features = imgsift_client(reference_img)
matches = self.find_match(input_features.descriptors,
reference_features.descriptors)
rospy.loginfo('matches: {}'.format(len(matches)))
# prepare output img
matched_img = drawMatches(input_img, input_features.positions,
reference_img, reference_features.positions,
matches)
cv2.putText(matched_img, 'matches: {}'.format(len(matches)),
(5, 25), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (255, 255, 255))
self.publish_img(stamp=input_stamp, img=matched_img)
def publish_img(self, stamp, img, encoding='bgr8'):
bridge = cv_bridge.CvBridge()
img_msg = bridge.cv2_to_imgmsg(img, encoding=encoding)
img_msg.header.stamp = stamp
self.pub.publish(img_msg)
def drawMatches(query_img, query_pos, train_img, train_pos, matches):
"""Draw match points for two images"""
query_img = cv2.cvtColor(query_img, cv2.COLOR_RGB2GRAY)
train_img = cv2.cvtColor(train_img, cv2.COLOR_RGB2GRAY)
query_pos = np.array(query_pos).reshape((-1, 2))
train_pos = np.array(train_pos).reshape((-1, 2))
n_row1, n_col1 = query_img.shape[:2]
n_row2, n_col2 = train_img.shape[:2]
# parepare output img
img_out = np.zeros((max([n_row1,n_row2]), n_col1+n_col2, 3), dtype='uint8')
img_out[:n_row1, :n_col1, :] = np.dstack(3*[query_img])
img_out[:n_row2, n_col1:n_col1+n_col2, :] = np.dstack(3*[train_img])
for mat in matches:
# draw and connect match points
x1, y1 = query_pos[mat.queryIdx]
x2, y2 = train_pos[mat.trainIdx]
pt1 = (int(x1), int(y1))
pt2 = (int(x2)+n_col1, int(y2))
cv2.circle(img_out, pt1, 4, (255, 0, 0), 1)
cv2.circle(img_out, pt2, 4, (255, 0, 0), 1)
cv2.line(img_out, pt1, pt2, (255, 0, 0), 1)
return img_out
def main():
rospy.init_node('sift_matcher_oneimg')
rate = rospy.Rate(10)
while not rospy.is_shutdown():
matcher = SiftMatcherOneImg()
matcher.match()
rate.sleep()
if __name__ == '__main__':
main()
|
en
| 0.667253
|
#!/usr/bin/env python # This script is to visualize how match sift features are matched between an image and camera frame. Usage ----- $ roslaunch roseus_tutorials usb-camera.launch $ roslaunch jsk_2015_05_baxter_apc sift_matcher_for_imgs.launch $ rosrun image_view image_view image:=/sift_matcher_for_imgs/output Callback function of Subscribers to listen Image Compare two images. Usually camera image (input) with static image (reference) # prepare output img Draw match points for two images # parepare output img # draw and connect match points
| 2.834453
| 3
|
drdown/users/tests/test_view_health_team.py
|
fga-gpp-mds/2018.1-Cris-Down
| 11
|
6626861
|
from test_plus.test import TestCase
from django.test import RequestFactory
from django.test.client import Client
from ..models.model_health_team import HealthTeam
class TestViewHealthTeam (TestCase):
"""
Test if View Health_Team is working correctly
"""
def setUp(self):
"""
This method will run before any test.
"""
self.client = Client()
self.user = self.make_user()
self.health_team = HealthTeam.objects.create(
cpf="057.641.271-65",
user=self.user,
speciality=HealthTeam.NEUROLOGY,
council_acronym=HealthTeam.CRM,
register_number="1234567",
registration_state=HealthTeam.DF,
)
def test_health_team_get_context_data(self):
"""
Test if the view health team is passing the data correctly
"""
self.health_team.save()
self.client.force_login(user=self.user)
response = self.client.get(path='/users/testuser/', follow=True)
self.assertEquals(response.status_code, 200)
self.assertEquals(self.user.healthteam.cpf, self.health_team.cpf)
self.assertContains(response, text=self.user.username)
self.assertContains(response, text=self.user.username)
self.assertContains(response, text=self.user.healthteam.cpf)
|
from test_plus.test import TestCase
from django.test import RequestFactory
from django.test.client import Client
from ..models.model_health_team import HealthTeam
class TestViewHealthTeam (TestCase):
"""
Test if View Health_Team is working correctly
"""
def setUp(self):
"""
This method will run before any test.
"""
self.client = Client()
self.user = self.make_user()
self.health_team = HealthTeam.objects.create(
cpf="057.641.271-65",
user=self.user,
speciality=HealthTeam.NEUROLOGY,
council_acronym=HealthTeam.CRM,
register_number="1234567",
registration_state=HealthTeam.DF,
)
def test_health_team_get_context_data(self):
"""
Test if the view health team is passing the data correctly
"""
self.health_team.save()
self.client.force_login(user=self.user)
response = self.client.get(path='/users/testuser/', follow=True)
self.assertEquals(response.status_code, 200)
self.assertEquals(self.user.healthteam.cpf, self.health_team.cpf)
self.assertContains(response, text=self.user.username)
self.assertContains(response, text=self.user.username)
self.assertContains(response, text=self.user.healthteam.cpf)
|
en
| 0.827258
|
Test if View Health_Team is working correctly This method will run before any test. Test if the view health team is passing the data correctly
| 2.376943
| 2
|
sets/set-symmetric-difference-operation.py
|
vcelis/hackerrank-python
| 0
|
6626862
|
<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Title : Set .symmetric_difference() Operation
Subdomain : Sets
Author : <NAME>
Created : 20 July 2018
https://www.hackerrank.com/challenges/py-set-symmetric-difference-operation/problem
"""
if __name__ == '__main__':
_ = input()
set_one = set(map(int, input().split()))
_ = input()
set_two = set(map(int, input().split()))
print(len(set_one.symmetric_difference(set_two)))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Title : Set .symmetric_difference() Operation
Subdomain : Sets
Author : <NAME>
Created : 20 July 2018
https://www.hackerrank.com/challenges/py-set-symmetric-difference-operation/problem
"""
if __name__ == '__main__':
_ = input()
set_one = set(map(int, input().split()))
_ = input()
set_two = set(map(int, input().split()))
print(len(set_one.symmetric_difference(set_two)))
|
en
| 0.632722
|
#!/usr/bin/env python3 # -*- coding: utf-8 -*- Title : Set .symmetric_difference() Operation Subdomain : Sets Author : <NAME> Created : 20 July 2018 https://www.hackerrank.com/challenges/py-set-symmetric-difference-operation/problem
| 3.398071
| 3
|
scrapper/migrations/0008_auto_20200912_1324.py
|
Dalakoti07/optimal-price-back-end
| 0
|
6626863
|
<reponame>Dalakoti07/optimal-price-back-end<filename>scrapper/migrations/0008_auto_20200912_1324.py
# Generated by Django 3.1 on 2020-09-12 07:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('scrapper', '0007_productdetails_review'),
]
operations = [
migrations.RemoveField(
model_name='productdetails',
name='OTG_compatible',
),
migrations.RemoveField(
model_name='productdetails',
name='audio_jack',
),
migrations.RemoveField(
model_name='productdetails',
name='bluetooth_support',
),
migrations.RemoveField(
model_name='productdetails',
name='bluetooth_version',
),
migrations.RemoveField(
model_name='productdetails',
name='browser_type',
),
migrations.RemoveField(
model_name='productdetails',
name='color',
),
migrations.RemoveField(
model_name='productdetails',
name='depth',
),
migrations.RemoveField(
model_name='productdetails',
name='display_size',
),
migrations.RemoveField(
model_name='productdetails',
name='dual_camera_lens',
),
migrations.RemoveField(
model_name='productdetails',
name='edge',
),
migrations.RemoveField(
model_name='productdetails',
name='flash',
),
migrations.RemoveField(
model_name='productdetails',
name='flash_rate',
),
migrations.RemoveField(
model_name='productdetails',
name='gprs',
),
migrations.RemoveField(
model_name='productdetails',
name='height',
),
migrations.RemoveField(
model_name='productdetails',
name='hybrid_sim_slot',
),
migrations.RemoveField(
model_name='productdetails',
name='in_the_box',
),
migrations.RemoveField(
model_name='productdetails',
name='internal_storage',
),
migrations.RemoveField(
model_name='productdetails',
name='internet_connectivity',
),
migrations.RemoveField(
model_name='productdetails',
name='memory_card_slot_type',
),
migrations.RemoveField(
model_name='productdetails',
name='micro_usb_port',
),
migrations.RemoveField(
model_name='productdetails',
name='model_name',
),
migrations.RemoveField(
model_name='productdetails',
name='model_number',
),
migrations.RemoveField(
model_name='productdetails',
name='networking_type',
),
migrations.RemoveField(
model_name='productdetails',
name='operating_frequency',
),
migrations.RemoveField(
model_name='productdetails',
name='operating_system',
),
migrations.RemoveField(
model_name='productdetails',
name='other_display_feature',
),
migrations.RemoveField(
model_name='productdetails',
name='preinstalled_browser',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_camera',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_camera_available',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_camera_features',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_clock_speed',
),
migrations.RemoveField(
model_name='productdetails',
name='processor_core',
),
migrations.RemoveField(
model_name='productdetails',
name='processor_type',
),
migrations.RemoveField(
model_name='productdetails',
name='ram',
),
migrations.RemoveField(
model_name='productdetails',
name='resolution',
),
migrations.RemoveField(
model_name='productdetails',
name='resolution_type',
),
migrations.RemoveField(
model_name='productdetails',
name='sar_value',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_camera',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_camera_available',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_camera_features',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_clock_speed',
),
migrations.RemoveField(
model_name='productdetails',
name='sim_type',
),
migrations.RemoveField(
model_name='productdetails',
name='sound_enhancement',
),
migrations.RemoveField(
model_name='productdetails',
name='supported_memory_card_type',
),
migrations.RemoveField(
model_name='productdetails',
name='supported_network',
),
migrations.RemoveField(
model_name='productdetails',
name='three_g_speed',
),
migrations.RemoveField(
model_name='productdetails',
name='touch_screen',
),
migrations.RemoveField(
model_name='productdetails',
name='usb_connectivity',
),
migrations.RemoveField(
model_name='productdetails',
name='warranty_summary',
),
migrations.RemoveField(
model_name='productdetails',
name='weight',
),
migrations.RemoveField(
model_name='productdetails',
name='width',
),
migrations.RemoveField(
model_name='productdetails',
name='wifi',
),
migrations.RemoveField(
model_name='productdetails',
name='wifi_version',
),
migrations.AddField(
model_name='productdetails',
name='product_full_spec',
field=models.JSONField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='productdetails',
name='product_images',
field=models.JSONField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='productdetails',
name='product',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='scrapper.product'),
),
]
|
# Generated by Django 3.1 on 2020-09-12 07:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('scrapper', '0007_productdetails_review'),
]
operations = [
migrations.RemoveField(
model_name='productdetails',
name='OTG_compatible',
),
migrations.RemoveField(
model_name='productdetails',
name='audio_jack',
),
migrations.RemoveField(
model_name='productdetails',
name='bluetooth_support',
),
migrations.RemoveField(
model_name='productdetails',
name='bluetooth_version',
),
migrations.RemoveField(
model_name='productdetails',
name='browser_type',
),
migrations.RemoveField(
model_name='productdetails',
name='color',
),
migrations.RemoveField(
model_name='productdetails',
name='depth',
),
migrations.RemoveField(
model_name='productdetails',
name='display_size',
),
migrations.RemoveField(
model_name='productdetails',
name='dual_camera_lens',
),
migrations.RemoveField(
model_name='productdetails',
name='edge',
),
migrations.RemoveField(
model_name='productdetails',
name='flash',
),
migrations.RemoveField(
model_name='productdetails',
name='flash_rate',
),
migrations.RemoveField(
model_name='productdetails',
name='gprs',
),
migrations.RemoveField(
model_name='productdetails',
name='height',
),
migrations.RemoveField(
model_name='productdetails',
name='hybrid_sim_slot',
),
migrations.RemoveField(
model_name='productdetails',
name='in_the_box',
),
migrations.RemoveField(
model_name='productdetails',
name='internal_storage',
),
migrations.RemoveField(
model_name='productdetails',
name='internet_connectivity',
),
migrations.RemoveField(
model_name='productdetails',
name='memory_card_slot_type',
),
migrations.RemoveField(
model_name='productdetails',
name='micro_usb_port',
),
migrations.RemoveField(
model_name='productdetails',
name='model_name',
),
migrations.RemoveField(
model_name='productdetails',
name='model_number',
),
migrations.RemoveField(
model_name='productdetails',
name='networking_type',
),
migrations.RemoveField(
model_name='productdetails',
name='operating_frequency',
),
migrations.RemoveField(
model_name='productdetails',
name='operating_system',
),
migrations.RemoveField(
model_name='productdetails',
name='other_display_feature',
),
migrations.RemoveField(
model_name='productdetails',
name='preinstalled_browser',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_camera',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_camera_available',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_camera_features',
),
migrations.RemoveField(
model_name='productdetails',
name='primary_clock_speed',
),
migrations.RemoveField(
model_name='productdetails',
name='processor_core',
),
migrations.RemoveField(
model_name='productdetails',
name='processor_type',
),
migrations.RemoveField(
model_name='productdetails',
name='ram',
),
migrations.RemoveField(
model_name='productdetails',
name='resolution',
),
migrations.RemoveField(
model_name='productdetails',
name='resolution_type',
),
migrations.RemoveField(
model_name='productdetails',
name='sar_value',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_camera',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_camera_available',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_camera_features',
),
migrations.RemoveField(
model_name='productdetails',
name='secondary_clock_speed',
),
migrations.RemoveField(
model_name='productdetails',
name='sim_type',
),
migrations.RemoveField(
model_name='productdetails',
name='sound_enhancement',
),
migrations.RemoveField(
model_name='productdetails',
name='supported_memory_card_type',
),
migrations.RemoveField(
model_name='productdetails',
name='supported_network',
),
migrations.RemoveField(
model_name='productdetails',
name='three_g_speed',
),
migrations.RemoveField(
model_name='productdetails',
name='touch_screen',
),
migrations.RemoveField(
model_name='productdetails',
name='usb_connectivity',
),
migrations.RemoveField(
model_name='productdetails',
name='warranty_summary',
),
migrations.RemoveField(
model_name='productdetails',
name='weight',
),
migrations.RemoveField(
model_name='productdetails',
name='width',
),
migrations.RemoveField(
model_name='productdetails',
name='wifi',
),
migrations.RemoveField(
model_name='productdetails',
name='wifi_version',
),
migrations.AddField(
model_name='productdetails',
name='product_full_spec',
field=models.JSONField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='productdetails',
name='product_images',
field=models.JSONField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='productdetails',
name='product',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='scrapper.product'),
),
]
|
en
| 0.802738
|
# Generated by Django 3.1 on 2020-09-12 07:54
| 1.584083
| 2
|
src/m3u_gen_acestream.py
|
SCP002/m3u-gen-ttv
| 1
|
6626864
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from os import chdir
from socket import gethostbyname, gethostname
from sys import path, stderr
from time import sleep
from traceback import format_exc, print_exc
from channel.channel_handler import ChannelHandler
from config.config import Config
from utils import Utils
class M3UGenAceStream:
@staticmethod
def main() -> None:
channel_handler: ChannelHandler = ChannelHandler()
while True:
print('Started at', datetime.now().strftime('%b %d %H:%M:%S'), end='\n\n')
Utils.wait_for_internet()
data_sets_amount: int = len(Config.DATA_SETS)
for data_set_index, data_set in enumerate(Config.DATA_SETS):
print('Processing data set', data_set_index + 1, 'of', data_sets_amount)
channel_handler.data_set = data_set
channel_handler.write_playlist()
# If remain at least one DataSet to process
if data_set_index + 1 < data_sets_amount:
next_data_set_url: str = Config.DATA_SETS[data_set_index + 1].src_channels_url
# If do not have cached channels for the next DataSet
if not channel_handler.get_cached_channels_for_url(next_data_set_url):
print('Sleeping for', timedelta(seconds=Config.CHANN_SRC_REQ_DELAY_UP),
'before processing next data set...')
sleep(Config.CHANN_SRC_REQ_DELAY_UP)
print('')
channel_handler.clear_cached_channels()
print('Finished at', datetime.now().strftime('%b %d %H:%M:%S'))
print('Sleeping for', timedelta(seconds=Config.UPDATE_DELAY), 'before the new update...')
print('-' * 45, end='\n\n\n')
sleep(Config.UPDATE_DELAY)
# Main start point.
if __name__ == '__main__':
# noinspection PyBroadException
try:
chdir(path[0])
M3UGenAceStream.main()
except Exception:
print_exc()
if Config.MAIL_ON_CRASH:
print('Sending notification.', file=stderr)
subject: str = 'm3u-gen-acestream has crashed on ' + gethostname() + '@' + gethostbyname(gethostname())
Utils.send_email(subject, format_exc())
if Config.PAUSE_ON_CRASH:
input('Press <Enter> to exit...\n')
exit(1)
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from os import chdir
from socket import gethostbyname, gethostname
from sys import path, stderr
from time import sleep
from traceback import format_exc, print_exc
from channel.channel_handler import ChannelHandler
from config.config import Config
from utils import Utils
class M3UGenAceStream:
@staticmethod
def main() -> None:
channel_handler: ChannelHandler = ChannelHandler()
while True:
print('Started at', datetime.now().strftime('%b %d %H:%M:%S'), end='\n\n')
Utils.wait_for_internet()
data_sets_amount: int = len(Config.DATA_SETS)
for data_set_index, data_set in enumerate(Config.DATA_SETS):
print('Processing data set', data_set_index + 1, 'of', data_sets_amount)
channel_handler.data_set = data_set
channel_handler.write_playlist()
# If remain at least one DataSet to process
if data_set_index + 1 < data_sets_amount:
next_data_set_url: str = Config.DATA_SETS[data_set_index + 1].src_channels_url
# If do not have cached channels for the next DataSet
if not channel_handler.get_cached_channels_for_url(next_data_set_url):
print('Sleeping for', timedelta(seconds=Config.CHANN_SRC_REQ_DELAY_UP),
'before processing next data set...')
sleep(Config.CHANN_SRC_REQ_DELAY_UP)
print('')
channel_handler.clear_cached_channels()
print('Finished at', datetime.now().strftime('%b %d %H:%M:%S'))
print('Sleeping for', timedelta(seconds=Config.UPDATE_DELAY), 'before the new update...')
print('-' * 45, end='\n\n\n')
sleep(Config.UPDATE_DELAY)
# Main start point.
if __name__ == '__main__':
# noinspection PyBroadException
try:
chdir(path[0])
M3UGenAceStream.main()
except Exception:
print_exc()
if Config.MAIL_ON_CRASH:
print('Sending notification.', file=stderr)
subject: str = 'm3u-gen-acestream has crashed on ' + gethostname() + '@' + gethostbyname(gethostname())
Utils.send_email(subject, format_exc())
if Config.PAUSE_ON_CRASH:
input('Press <Enter> to exit...\n')
exit(1)
|
en
| 0.734661
|
#!/usr/bin/python3 # -*- coding: utf-8 -*- # If remain at least one DataSet to process # If do not have cached channels for the next DataSet # Main start point. # noinspection PyBroadException
| 2.36553
| 2
|
src/pansim/cli.py
|
parantapa/pansim
| 0
|
6626865
|
"""PanSim Command Line Interface."""
import click
import click_completion
from .simplesim import simplesim
from .distsim import distsim
@click.group()
def cli():
"""PanSim: The Pandemic Simulator."""
cli.add_command(simplesim)
cli.add_command(distsim)
click_completion.init()
|
"""PanSim Command Line Interface."""
import click
import click_completion
from .simplesim import simplesim
from .distsim import distsim
@click.group()
def cli():
"""PanSim: The Pandemic Simulator."""
cli.add_command(simplesim)
cli.add_command(distsim)
click_completion.init()
|
en
| 0.564407
|
PanSim Command Line Interface. PanSim: The Pandemic Simulator.
| 1.842339
| 2
|
core/network/ViTAE_S/NormalCell.py
|
ViTAE-Transformer/ViTAE-Transformer-Matting
| 8
|
6626866
|
<reponame>ViTAE-Transformer/ViTAE-Transformer-Matting
# Copyright (c) [2012]-[2021] Shanghai Yitu Technology Co., Ltd.
#
# This source code is licensed under the Clear BSD License
# LICENSE file in the root directory of this file
# All rights reserved.
"""
Borrow from timm(https://github.com/rwightman/pytorch-image-models)
"""
import torch
import torch.nn as nn
import numpy as np
from timm.models.layers import DropPath
from .SELayer import SELayer
import math
class Mlp(nn.Module):
def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.hidden_features = hidden_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
class Attention(nn.Module):
def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.):
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
self.scale = qk_scale or head_dim ** -0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
def forward(self, x):
B, N, C = x.shape
qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
q, k, v = qkv[0], qkv[1], qkv[2]
attn = (q @ k.transpose(-2, -1)) * self.scale
attn = attn.softmax(dim=-1)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
class AttentionPerformer(nn.Module):
def __init__(self, dim, num_heads=1, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., kernel_ratio=0.5):
super().__init__()
self.head_dim = dim // num_heads
self.emb = dim
self.kqv = nn.Linear(dim, 3 * self.emb)
self.dp = nn.Dropout(proj_drop)
self.proj = nn.Linear(self.emb, self.emb)
self.head_cnt = num_heads
self.norm1 = nn.LayerNorm(dim, eps=1e-6)
self.epsilon = 1e-8 # for stable in division
self.drop_path = nn.Identity()
self.m = int(self.head_dim * kernel_ratio)
self.w = torch.randn(self.head_cnt, self.m, self.head_dim)
for i in range(self.head_cnt):
self.w[i] = nn.Parameter(nn.init.orthogonal_(self.w[i]) * math.sqrt(self.m), requires_grad=False)
self.w.requires_grad_(False)
def prm_exp(self, x):
# part of the function is borrow from https://github.com/lucidrains/performer-pytorch
# and <NAME> (https://github.com/cloneofsimo)
# ==== positive random features for gaussian kernels ====
# x = (B, T, hs)
# w = (m, hs)
# return : x : B, T, m
# SM(x, y) = E_w[exp(w^T x - |x|/2) exp(w^T y - |y|/2)]
# therefore return exp(w^Tx - |x|/2)/sqrt(m)
xd = ((x * x).sum(dim=-1, keepdim=True)).repeat(1, 1, 1, self.m) / 2
wtx = torch.einsum('bhti,hmi->bhtm', x.float(), self.w.to(x.device))
return torch.exp(wtx - xd) / math.sqrt(self.m)
def attn(self, x):
B, N, C = x.shape
kqv = self.kqv(x).reshape(B, N, 3, self.head_cnt, self.head_dim).permute(2, 0, 3, 1, 4)
k, q, v = kqv[0], kqv[1], kqv[2] # (B, H, T, hs)
kp, qp = self.prm_exp(k), self.prm_exp(q) # (B, H, T, m), (B, H, T, m)
D = torch.einsum('bhti,bhi->bht', qp, kp.sum(dim=2)).unsqueeze(dim=-1) # (B, H, T, m) * (B, H, m) -> (B, H, T, 1)
kptv = torch.einsum('bhin,bhim->bhnm', v.float(), kp) # (B, H, emb, m)
y = torch.einsum('bhti,bhni->bhtn', qp, kptv) / (D.repeat(1, 1, 1, self.head_dim) + self.epsilon) # (B, H, T, emb)/Diag
# skip connection
y = y.permute(0, 2, 1, 3).reshape(B, N, self.emb)
y = self.dp(self.proj(y)) # same as token_transformer in T2T layer, use v as skip connection
return y
def forward(self, x):
x = self.attn(x)
return x
class NormalCell(nn.Module):
def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0.,
drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, class_token=False, group=64, tokens_type='transformer', gamma=False, init_values=1e-4, SE=False):
super().__init__()
self.norm1 = norm_layer(dim)
self.class_token = class_token
if tokens_type == 'transformer':
self.attn = Attention(
dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop)
elif tokens_type == 'performer':
self.attn = AttentionPerformer(
dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop)
self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop)
self.PCM = nn.Sequential(
nn.Conv2d(dim, mlp_hidden_dim, 3, 1, 1, 1, group),
nn.BatchNorm2d(mlp_hidden_dim),
nn.SiLU(inplace=True),
nn.Conv2d(mlp_hidden_dim, dim, 3, 1, 1, 1, group),
nn.BatchNorm2d(dim),
nn.SiLU(inplace=True),
nn.Conv2d(dim, dim, 3, 1, 1, 1, group),
nn.SiLU(inplace=True),
)
if gamma:
self.gamma1 = nn.Parameter(init_values * torch.ones((dim)),requires_grad=True)
self.gamma2 = nn.Parameter(init_values * torch.ones((dim)),requires_grad=True)
self.gamma3 = nn.Parameter(init_values * torch.ones((dim)),requires_grad=True)
else:
self.gamma1 = 1
self.gamma2 = 1
self.gamma3 = 1
if SE:
self.SE = SELayer(dim)
else:
self.SE = nn.Identity()
def forward(self, x, input_resolution=None):
b, n, c = x.shape
if self.class_token:
n = n - 1
wh = input_resolution[0] if input_resolution is not None else int(math.sqrt(n))
ww = input_resolution[1] if input_resolution is not None else int(math.sqrt(n))
convX = self.drop_path(self.gamma2 * self.PCM(x[:, 1:, :].view(b, wh, ww, c).permute(0, 3, 1, 2).contiguous()).permute(0, 2, 3, 1).contiguous().view(b, n, c))
x = x + self.drop_path(self.gamma1 * self.attn(self.norm1(x)))
x[:, 1:] = x[:, 1:] + convX
else:
wh = input_resolution[0] if input_resolution is not None else int(math.sqrt(n))
ww = input_resolution[1] if input_resolution is not None else int(math.sqrt(n))
convX = self.drop_path(self.gamma2 * self.PCM(x.view(b, wh, ww, c).permute(0, 3, 1, 2).contiguous()).permute(0, 2, 3, 1).contiguous().view(b, n, c))
x = x + self.drop_path(self.gamma1 * self.attn(self.norm1(x)))
x = x + convX
x = x + self.drop_path(self.gamma3 * self.mlp(self.norm2(x)))
x = self.SE(x)
return x
def get_sinusoid_encoding(n_position, d_hid):
''' Sinusoid position encoding table '''
def get_position_angle_vec(position):
return [position / np.power(10000, 2 * (hid_j // 2) / d_hid) for hid_j in range(d_hid)]
sinusoid_table = np.array([get_position_angle_vec(pos_i) for pos_i in range(n_position)])
sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i
sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1
return torch.FloatTensor(sinusoid_table).unsqueeze(0)
|
# Copyright (c) [2012]-[2021] Shanghai Yitu Technology Co., Ltd.
#
# This source code is licensed under the Clear BSD License
# LICENSE file in the root directory of this file
# All rights reserved.
"""
Borrow from timm(https://github.com/rwightman/pytorch-image-models)
"""
import torch
import torch.nn as nn
import numpy as np
from timm.models.layers import DropPath
from .SELayer import SELayer
import math
class Mlp(nn.Module):
def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.hidden_features = hidden_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
class Attention(nn.Module):
def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.):
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
self.scale = qk_scale or head_dim ** -0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
def forward(self, x):
B, N, C = x.shape
qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
q, k, v = qkv[0], qkv[1], qkv[2]
attn = (q @ k.transpose(-2, -1)) * self.scale
attn = attn.softmax(dim=-1)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
class AttentionPerformer(nn.Module):
def __init__(self, dim, num_heads=1, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., kernel_ratio=0.5):
super().__init__()
self.head_dim = dim // num_heads
self.emb = dim
self.kqv = nn.Linear(dim, 3 * self.emb)
self.dp = nn.Dropout(proj_drop)
self.proj = nn.Linear(self.emb, self.emb)
self.head_cnt = num_heads
self.norm1 = nn.LayerNorm(dim, eps=1e-6)
self.epsilon = 1e-8 # for stable in division
self.drop_path = nn.Identity()
self.m = int(self.head_dim * kernel_ratio)
self.w = torch.randn(self.head_cnt, self.m, self.head_dim)
for i in range(self.head_cnt):
self.w[i] = nn.Parameter(nn.init.orthogonal_(self.w[i]) * math.sqrt(self.m), requires_grad=False)
self.w.requires_grad_(False)
def prm_exp(self, x):
# part of the function is borrow from https://github.com/lucidrains/performer-pytorch
# and <NAME> (https://github.com/cloneofsimo)
# ==== positive random features for gaussian kernels ====
# x = (B, T, hs)
# w = (m, hs)
# return : x : B, T, m
# SM(x, y) = E_w[exp(w^T x - |x|/2) exp(w^T y - |y|/2)]
# therefore return exp(w^Tx - |x|/2)/sqrt(m)
xd = ((x * x).sum(dim=-1, keepdim=True)).repeat(1, 1, 1, self.m) / 2
wtx = torch.einsum('bhti,hmi->bhtm', x.float(), self.w.to(x.device))
return torch.exp(wtx - xd) / math.sqrt(self.m)
def attn(self, x):
B, N, C = x.shape
kqv = self.kqv(x).reshape(B, N, 3, self.head_cnt, self.head_dim).permute(2, 0, 3, 1, 4)
k, q, v = kqv[0], kqv[1], kqv[2] # (B, H, T, hs)
kp, qp = self.prm_exp(k), self.prm_exp(q) # (B, H, T, m), (B, H, T, m)
D = torch.einsum('bhti,bhi->bht', qp, kp.sum(dim=2)).unsqueeze(dim=-1) # (B, H, T, m) * (B, H, m) -> (B, H, T, 1)
kptv = torch.einsum('bhin,bhim->bhnm', v.float(), kp) # (B, H, emb, m)
y = torch.einsum('bhti,bhni->bhtn', qp, kptv) / (D.repeat(1, 1, 1, self.head_dim) + self.epsilon) # (B, H, T, emb)/Diag
# skip connection
y = y.permute(0, 2, 1, 3).reshape(B, N, self.emb)
y = self.dp(self.proj(y)) # same as token_transformer in T2T layer, use v as skip connection
return y
def forward(self, x):
x = self.attn(x)
return x
class NormalCell(nn.Module):
def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0.,
drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, class_token=False, group=64, tokens_type='transformer', gamma=False, init_values=1e-4, SE=False):
super().__init__()
self.norm1 = norm_layer(dim)
self.class_token = class_token
if tokens_type == 'transformer':
self.attn = Attention(
dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop)
elif tokens_type == 'performer':
self.attn = AttentionPerformer(
dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop)
self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop)
self.PCM = nn.Sequential(
nn.Conv2d(dim, mlp_hidden_dim, 3, 1, 1, 1, group),
nn.BatchNorm2d(mlp_hidden_dim),
nn.SiLU(inplace=True),
nn.Conv2d(mlp_hidden_dim, dim, 3, 1, 1, 1, group),
nn.BatchNorm2d(dim),
nn.SiLU(inplace=True),
nn.Conv2d(dim, dim, 3, 1, 1, 1, group),
nn.SiLU(inplace=True),
)
if gamma:
self.gamma1 = nn.Parameter(init_values * torch.ones((dim)),requires_grad=True)
self.gamma2 = nn.Parameter(init_values * torch.ones((dim)),requires_grad=True)
self.gamma3 = nn.Parameter(init_values * torch.ones((dim)),requires_grad=True)
else:
self.gamma1 = 1
self.gamma2 = 1
self.gamma3 = 1
if SE:
self.SE = SELayer(dim)
else:
self.SE = nn.Identity()
def forward(self, x, input_resolution=None):
b, n, c = x.shape
if self.class_token:
n = n - 1
wh = input_resolution[0] if input_resolution is not None else int(math.sqrt(n))
ww = input_resolution[1] if input_resolution is not None else int(math.sqrt(n))
convX = self.drop_path(self.gamma2 * self.PCM(x[:, 1:, :].view(b, wh, ww, c).permute(0, 3, 1, 2).contiguous()).permute(0, 2, 3, 1).contiguous().view(b, n, c))
x = x + self.drop_path(self.gamma1 * self.attn(self.norm1(x)))
x[:, 1:] = x[:, 1:] + convX
else:
wh = input_resolution[0] if input_resolution is not None else int(math.sqrt(n))
ww = input_resolution[1] if input_resolution is not None else int(math.sqrt(n))
convX = self.drop_path(self.gamma2 * self.PCM(x.view(b, wh, ww, c).permute(0, 3, 1, 2).contiguous()).permute(0, 2, 3, 1).contiguous().view(b, n, c))
x = x + self.drop_path(self.gamma1 * self.attn(self.norm1(x)))
x = x + convX
x = x + self.drop_path(self.gamma3 * self.mlp(self.norm2(x)))
x = self.SE(x)
return x
def get_sinusoid_encoding(n_position, d_hid):
''' Sinusoid position encoding table '''
def get_position_angle_vec(position):
return [position / np.power(10000, 2 * (hid_j // 2) / d_hid) for hid_j in range(d_hid)]
sinusoid_table = np.array([get_position_angle_vec(pos_i) for pos_i in range(n_position)])
sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i
sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1
return torch.FloatTensor(sinusoid_table).unsqueeze(0)
|
en
| 0.710244
|
# Copyright (c) [2012]-[2021] Shanghai Yitu Technology Co., Ltd. # # This source code is licensed under the Clear BSD License # LICENSE file in the root directory of this file # All rights reserved. Borrow from timm(https://github.com/rwightman/pytorch-image-models) # for stable in division # part of the function is borrow from https://github.com/lucidrains/performer-pytorch # and <NAME> (https://github.com/cloneofsimo) # ==== positive random features for gaussian kernels ==== # x = (B, T, hs) # w = (m, hs) # return : x : B, T, m # SM(x, y) = E_w[exp(w^T x - |x|/2) exp(w^T y - |y|/2)] # therefore return exp(w^Tx - |x|/2)/sqrt(m) # (B, H, T, hs) # (B, H, T, m), (B, H, T, m) # (B, H, T, m) * (B, H, m) -> (B, H, T, 1) # (B, H, emb, m) # (B, H, T, emb)/Diag # skip connection # same as token_transformer in T2T layer, use v as skip connection Sinusoid position encoding table # dim 2i # dim 2i+1
| 2.685949
| 3
|
beta_rec/datasets/movielens.py
|
ChaosCodes/beta-recsys
| 0
|
6626867
|
<gh_stars>0
import os
import numpy as np
import pandas as pd
from beta_rec.datasets.dataset_base import DatasetBase
from beta_rec.utils.constants import (
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
DEFAULT_USER_COL,
)
# download_url
ML_100K_URL = r"http://files.grouplens.org/datasets/movielens/ml-100k.zip"
ML_1M_URL = r"http://files.grouplens.org/datasets/movielens/ml-1m.zip"
ML_25M_URL = r"http://files.grouplens.org/datasets/movielens/ml-25m.zip"
# processed data url
ML_100K_LEAVE_ONE_OUT_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugU-siALoN5y9eaCq?e=jsgoOB"
ML_100K_RANDOM_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVD4bv1iR6KgZn63?e=89eToa"
ML_100K_TEMPORAL_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVG_vS_DggoFaySY?e=HpcD9b"
ML_1M_LEAVE_ONE_OUT_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVMZ5TK2sTGBUSr0?e=32CmFJ"
ML_1M_RANDOM_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVW2Bl1A1kORNuTY?e=iEabat"
ML_1M_TEMPORAL_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVf8PRlo82hSnblP?e=VpZa0L"
# indicators of the colunmn name
par_abs_dir = os.path.abspath(os.path.join(os.path.abspath("."), os.pardir))
# raw dataset
ml_1m_raw_dir = "datasets/ml-1m/raw/ratings.dat"
# dataset dir under temporal split
ml_1m_temporal_dir = "datasets/ml-1m/temporal"
# dataset dir under leave-one-out split
ml_1m_l1o_dir = os.path.join(par_abs_dir, "datasets/ml-1m/leave_one_out")
class Movielens_100k(DatasetBase):
"""Movielens 100k Dataset."""
def __init__(self, root_dir=None):
"""Init Movielens_100k Class."""
super().__init__(
"ml_100k",
root_dir=root_dir,
url=ML_100K_URL,
processed_leave_one_out_url=ML_100K_LEAVE_ONE_OUT_URL,
processed_random_split_url=ML_100K_RANDOM_URL,
processed_temporal_split_url=ML_100K_TEMPORAL_URL,
)
def preprocess(self):
"""Preprocess the raw file.
Preprocess the file downloaded via the url, convert it to a dataframe consisting of the user-item
interactions and save it in the processed directory.
"""
file_name = os.path.join(self.raw_path, self.dataset_name, "u.data")
if not os.path.exists(file_name):
self.download()
data = pd.read_table(
file_name,
header=None,
sep="\t",
engine="python",
names=[
DEFAULT_USER_COL,
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
],
)
self.save_dataframe_as_npz(
data,
os.path.join(self.processed_path, f"{self.dataset_name}_interaction.npz"),
)
def make_fea_vec(self):
"""Make feature vectors for users and items.
1. For items (movies), we use the last 19 fields as feature, which are the genres,
with 1 indicating the movie is of that genre, and 0 indicating it is not;
movies can be in several genres at once.
2. For users, we construct one_hot encoding for age, gender and occupation as their
feature, where ages are categorized into 8 groups.
Returns:
user_feat (numpy.ndarray): The first column is the user id, rest column are feat vectors.
item_feat (numpy.ndarray): The first column is the item id, rest column are feat vectors.
"""
print(f"Making user and item feature vectors for dataset {self.dataset_name}")
data = pd.read_table(
f"{self.dataset_dir}/raw/ml_100k/u.item",
header=None,
sep="|",
engine="python",
)
item_feat = data[[0] + [i for i in range(5, 24)]].to_numpy()
# first column is the item id, other 19 columns are feature
data = pd.read_table(
f"{self.dataset_dir}/raw/ml_100k/u.user",
header=None,
sep="|",
engine="python",
)
age_one_hot = np.eye(8).astype(np.int)
# categorize age into 8 groups
age_mapping = {
1: age_one_hot[0],
2: age_one_hot[1],
3: age_one_hot[2],
4: age_one_hot[3],
5: age_one_hot[4],
6: age_one_hot[5],
7: age_one_hot[6],
8: age_one_hot[7],
}
data["age_one_hot"] = data[1].apply(lambda x: age_mapping[int(x / 10) + 1])
col_2 = data[2].unique()
col_2_one_hot = np.eye(len(col_2)).astype(np.int)
col_2_mapping = {}
for idx, col in enumerate(col_2):
col_2_mapping[col] = col_2_one_hot[idx]
data["col_2_one_hot"] = data[2].apply(lambda x: col_2_mapping[x])
col_3 = data[3].unique()
col_3_one_hot = np.eye(len(col_3)).astype(np.int)
col_3_mapping = {}
for idx, col in enumerate(col_3):
col_3_mapping[col] = col_3_one_hot[idx]
data["col_3_one_hot"] = data[3].apply(lambda x: col_3_mapping[x])
A = []
for i in data.index:
A.append(
[data.loc[i][0]]
+ list(data.loc[i]["age_one_hot"])
+ list(data.loc[i]["col_2_one_hot"])
+ list(data.loc[i]["col_3_one_hot"])
)
user_feat = np.stack(A)
np.savez_compressed(
f"{self.dataset_dir}/processed/feature_vec.npz",
user_feat=user_feat,
item_feat=item_feat,
)
return user_feat, item_feat
def load_fea_vec(self):
"""Load feature vectors for users and items.
1. For items (movies), we use the last 19 fields as feature, which are the genres,
with 1 indicating the movie is of that genre, and 0 indicating it is not;
movies can be in several genres at once.
2. For users, we construct one_hot encoding for age, gender and occupation as their
feature, where ages are categorized into 8 groups.
Returns:
user_feat (numpy.ndarray): The first column is the user id, rest column are feat vectors.
item_feat (numpy.ndarray): The first column is the itm id, rest column are feat vectors.
"""
if not os.path.exists(self.dataset_dir):
self.preprocess()
if not os.path.exists(f"{self.dataset_dir}/processed/feature_vec.npz"):
self.make_fea_vec()
print(f"Loading user and item feature vectors for dataset {self.dataset_name}")
loaded = np.load(f"{self.dataset_dir}/processed/feature_vec.npz")
return loaded["user_feat"], loaded["item_feat"]
class Movielens_1m(DatasetBase):
"""Movielens 1m Dataset."""
def __init__(self, root_dir=None):
"""Init Movielens_1m Class."""
super().__init__("ml_1m", root_dir=root_dir, url=ML_1M_URL)
def preprocess(self):
"""Preprocess the raw file.
Preprocess the file downloaded via the url, convert it to a DataFrame consisting of the user-item
interactions and save it in the processed directory.
"""
file_name = os.path.join(self.raw_path, self.dataset_name, "ratings.dat")
if not os.path.exists(file_name):
self.download()
data = pd.read_table(
file_name,
header=None,
sep="::",
engine="python",
names=[
DEFAULT_USER_COL,
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
],
)
self.save_dataframe_as_npz(
data,
os.path.join(self.processed_path, f"{self.dataset_name}_interaction.npz"),
)
class Movielens_25m(DatasetBase):
"""Movielens 25m Dataset."""
def __init__(self, root_dir=None):
"""Init Movielens_25m Class."""
super().__init__("ml_25m", root_dir=root_dir, url=ML_25M_URL)
def preprocess(self):
"""Preprocess the raw file.
Preprocess the file downloaded via the url, convert it to a DataFrame consisting of the user-item
interactions and save it in the processed directory.
"""
file_name = os.path.join(self.raw_path, self.dataset_name, "ratings.csv")
if not os.path.exists(file_name):
self.download()
data = pd.read_table(
file_name,
header=None,
sep="::",
engine="python",
names=[
DEFAULT_USER_COL,
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
],
)
self.save_dataframe_as_npz(
data,
os.path.join(self.processed_path, f"{self.dataset_name}_interaction.npz"),
)
|
import os
import numpy as np
import pandas as pd
from beta_rec.datasets.dataset_base import DatasetBase
from beta_rec.utils.constants import (
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
DEFAULT_USER_COL,
)
# download_url
ML_100K_URL = r"http://files.grouplens.org/datasets/movielens/ml-100k.zip"
ML_1M_URL = r"http://files.grouplens.org/datasets/movielens/ml-1m.zip"
ML_25M_URL = r"http://files.grouplens.org/datasets/movielens/ml-25m.zip"
# processed data url
ML_100K_LEAVE_ONE_OUT_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugU-siALoN5y9eaCq?e=jsgoOB"
ML_100K_RANDOM_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVD4bv1iR6KgZn63?e=89eToa"
ML_100K_TEMPORAL_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVG_vS_DggoFaySY?e=HpcD9b"
ML_1M_LEAVE_ONE_OUT_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVMZ5TK2sTGBUSr0?e=32CmFJ"
ML_1M_RANDOM_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVW2Bl1A1kORNuTY?e=iEabat"
ML_1M_TEMPORAL_URL = r"https://1drv.ms/u/s!AjMahLyQeZqugVf8PRlo82hSnblP?e=VpZa0L"
# indicators of the colunmn name
par_abs_dir = os.path.abspath(os.path.join(os.path.abspath("."), os.pardir))
# raw dataset
ml_1m_raw_dir = "datasets/ml-1m/raw/ratings.dat"
# dataset dir under temporal split
ml_1m_temporal_dir = "datasets/ml-1m/temporal"
# dataset dir under leave-one-out split
ml_1m_l1o_dir = os.path.join(par_abs_dir, "datasets/ml-1m/leave_one_out")
class Movielens_100k(DatasetBase):
"""Movielens 100k Dataset."""
def __init__(self, root_dir=None):
"""Init Movielens_100k Class."""
super().__init__(
"ml_100k",
root_dir=root_dir,
url=ML_100K_URL,
processed_leave_one_out_url=ML_100K_LEAVE_ONE_OUT_URL,
processed_random_split_url=ML_100K_RANDOM_URL,
processed_temporal_split_url=ML_100K_TEMPORAL_URL,
)
def preprocess(self):
"""Preprocess the raw file.
Preprocess the file downloaded via the url, convert it to a dataframe consisting of the user-item
interactions and save it in the processed directory.
"""
file_name = os.path.join(self.raw_path, self.dataset_name, "u.data")
if not os.path.exists(file_name):
self.download()
data = pd.read_table(
file_name,
header=None,
sep="\t",
engine="python",
names=[
DEFAULT_USER_COL,
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
],
)
self.save_dataframe_as_npz(
data,
os.path.join(self.processed_path, f"{self.dataset_name}_interaction.npz"),
)
def make_fea_vec(self):
"""Make feature vectors for users and items.
1. For items (movies), we use the last 19 fields as feature, which are the genres,
with 1 indicating the movie is of that genre, and 0 indicating it is not;
movies can be in several genres at once.
2. For users, we construct one_hot encoding for age, gender and occupation as their
feature, where ages are categorized into 8 groups.
Returns:
user_feat (numpy.ndarray): The first column is the user id, rest column are feat vectors.
item_feat (numpy.ndarray): The first column is the item id, rest column are feat vectors.
"""
print(f"Making user and item feature vectors for dataset {self.dataset_name}")
data = pd.read_table(
f"{self.dataset_dir}/raw/ml_100k/u.item",
header=None,
sep="|",
engine="python",
)
item_feat = data[[0] + [i for i in range(5, 24)]].to_numpy()
# first column is the item id, other 19 columns are feature
data = pd.read_table(
f"{self.dataset_dir}/raw/ml_100k/u.user",
header=None,
sep="|",
engine="python",
)
age_one_hot = np.eye(8).astype(np.int)
# categorize age into 8 groups
age_mapping = {
1: age_one_hot[0],
2: age_one_hot[1],
3: age_one_hot[2],
4: age_one_hot[3],
5: age_one_hot[4],
6: age_one_hot[5],
7: age_one_hot[6],
8: age_one_hot[7],
}
data["age_one_hot"] = data[1].apply(lambda x: age_mapping[int(x / 10) + 1])
col_2 = data[2].unique()
col_2_one_hot = np.eye(len(col_2)).astype(np.int)
col_2_mapping = {}
for idx, col in enumerate(col_2):
col_2_mapping[col] = col_2_one_hot[idx]
data["col_2_one_hot"] = data[2].apply(lambda x: col_2_mapping[x])
col_3 = data[3].unique()
col_3_one_hot = np.eye(len(col_3)).astype(np.int)
col_3_mapping = {}
for idx, col in enumerate(col_3):
col_3_mapping[col] = col_3_one_hot[idx]
data["col_3_one_hot"] = data[3].apply(lambda x: col_3_mapping[x])
A = []
for i in data.index:
A.append(
[data.loc[i][0]]
+ list(data.loc[i]["age_one_hot"])
+ list(data.loc[i]["col_2_one_hot"])
+ list(data.loc[i]["col_3_one_hot"])
)
user_feat = np.stack(A)
np.savez_compressed(
f"{self.dataset_dir}/processed/feature_vec.npz",
user_feat=user_feat,
item_feat=item_feat,
)
return user_feat, item_feat
def load_fea_vec(self):
"""Load feature vectors for users and items.
1. For items (movies), we use the last 19 fields as feature, which are the genres,
with 1 indicating the movie is of that genre, and 0 indicating it is not;
movies can be in several genres at once.
2. For users, we construct one_hot encoding for age, gender and occupation as their
feature, where ages are categorized into 8 groups.
Returns:
user_feat (numpy.ndarray): The first column is the user id, rest column are feat vectors.
item_feat (numpy.ndarray): The first column is the itm id, rest column are feat vectors.
"""
if not os.path.exists(self.dataset_dir):
self.preprocess()
if not os.path.exists(f"{self.dataset_dir}/processed/feature_vec.npz"):
self.make_fea_vec()
print(f"Loading user and item feature vectors for dataset {self.dataset_name}")
loaded = np.load(f"{self.dataset_dir}/processed/feature_vec.npz")
return loaded["user_feat"], loaded["item_feat"]
class Movielens_1m(DatasetBase):
"""Movielens 1m Dataset."""
def __init__(self, root_dir=None):
"""Init Movielens_1m Class."""
super().__init__("ml_1m", root_dir=root_dir, url=ML_1M_URL)
def preprocess(self):
"""Preprocess the raw file.
Preprocess the file downloaded via the url, convert it to a DataFrame consisting of the user-item
interactions and save it in the processed directory.
"""
file_name = os.path.join(self.raw_path, self.dataset_name, "ratings.dat")
if not os.path.exists(file_name):
self.download()
data = pd.read_table(
file_name,
header=None,
sep="::",
engine="python",
names=[
DEFAULT_USER_COL,
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
],
)
self.save_dataframe_as_npz(
data,
os.path.join(self.processed_path, f"{self.dataset_name}_interaction.npz"),
)
class Movielens_25m(DatasetBase):
"""Movielens 25m Dataset."""
def __init__(self, root_dir=None):
"""Init Movielens_25m Class."""
super().__init__("ml_25m", root_dir=root_dir, url=ML_25M_URL)
def preprocess(self):
"""Preprocess the raw file.
Preprocess the file downloaded via the url, convert it to a DataFrame consisting of the user-item
interactions and save it in the processed directory.
"""
file_name = os.path.join(self.raw_path, self.dataset_name, "ratings.csv")
if not os.path.exists(file_name):
self.download()
data = pd.read_table(
file_name,
header=None,
sep="::",
engine="python",
names=[
DEFAULT_USER_COL,
DEFAULT_ITEM_COL,
DEFAULT_RATING_COL,
DEFAULT_TIMESTAMP_COL,
],
)
self.save_dataframe_as_npz(
data,
os.path.join(self.processed_path, f"{self.dataset_name}_interaction.npz"),
)
|
en
| 0.858973
|
# download_url # processed data url # indicators of the colunmn name # raw dataset # dataset dir under temporal split # dataset dir under leave-one-out split Movielens 100k Dataset. Init Movielens_100k Class. Preprocess the raw file. Preprocess the file downloaded via the url, convert it to a dataframe consisting of the user-item interactions and save it in the processed directory. Make feature vectors for users and items. 1. For items (movies), we use the last 19 fields as feature, which are the genres, with 1 indicating the movie is of that genre, and 0 indicating it is not; movies can be in several genres at once. 2. For users, we construct one_hot encoding for age, gender and occupation as their feature, where ages are categorized into 8 groups. Returns: user_feat (numpy.ndarray): The first column is the user id, rest column are feat vectors. item_feat (numpy.ndarray): The first column is the item id, rest column are feat vectors. # first column is the item id, other 19 columns are feature # categorize age into 8 groups Load feature vectors for users and items. 1. For items (movies), we use the last 19 fields as feature, which are the genres, with 1 indicating the movie is of that genre, and 0 indicating it is not; movies can be in several genres at once. 2. For users, we construct one_hot encoding for age, gender and occupation as their feature, where ages are categorized into 8 groups. Returns: user_feat (numpy.ndarray): The first column is the user id, rest column are feat vectors. item_feat (numpy.ndarray): The first column is the itm id, rest column are feat vectors. Movielens 1m Dataset. Init Movielens_1m Class. Preprocess the raw file. Preprocess the file downloaded via the url, convert it to a DataFrame consisting of the user-item interactions and save it in the processed directory. Movielens 25m Dataset. Init Movielens_25m Class. Preprocess the raw file. Preprocess the file downloaded via the url, convert it to a DataFrame consisting of the user-item interactions and save it in the processed directory.
| 2.227994
| 2
|
definitions/srcwriter.py
|
Atlamillias/pixl-engine
| 6
|
6626868
|
from __future__ import annotations
from inspect import Parameter, formatannotation, _empty
from typing import Union
from io import StringIO
from pathlib import Path
import functools
class _PyTextList(list):
def append(self, value):
if not isinstance(value, PyTextObject):
value = PyTextObject(value)
super().append(value)
class PyTextObject:
"""An object representing any Python object. It can be
represented in a file-friendly text format.
Args:
* obj (object): The object or object reference. Can also be a string.
* namespace (PyNamespace): The namespace for <obj>.
"""
def __init__(self, obj: object, namespace: PyTextNamespace = None):
self.obj = obj
self.namespace = namespace
@property
def name(self):
if isinstance(self.obj, str):
return self.obj
try:
return self.obj.__name__
except AttributeError:
return formatannotation(self.obj)
def __str__(self):
if self.namespace and self.namespace.alias:
return f"{self.namespace.alias}.{self.name}"
elif self.namespace:
return f"{self.namespace.name}.{self.name}"
return self.name
def __repr__(self):
return str(self)
class PyTextNamespace(PyTextObject):
"""An object representing a Python namespace (import, etc). It
can be represented in a file-friendly text format.
"""
def __init__(self, namespace: object, alias: str = None, imports: list = None):
self.obj = namespace
self.alias = alias
self._imports: list[PyTextObject] = []
if imports:
self.imports = imports
def __str__(self) -> str:
if not self.imports:
return f"import {self.name}\n"
else:
length = len(self._imports)
text = f"from {self.name} import "
if length == 1:
text += f"{self._imports[0].name}"
elif length < 4:
text += ", ".join([i.name for i in self._imports]).rstrip(",")
else:
text += "("
text += f"".join((f'\n {i.name},' for i in self._imports))
text += "\n)"
text += "\n"
return text
@property
def name(self):
if isinstance(self.obj, str):
return self.obj
try:
return self.obj.__name__
except AttributeError:
return formatannotation(self.obj)
@property
def imports(self):
return self._imports
@imports.setter
def imports(self, value):
if isinstance(value, list):
value = _PyTextList([PyTextObject(nspace) for nspace in value])
self._imports = value
else:
raise ValueError("Must be a list of `PyTextObject`'s.")
class PyTextClass(PyTextObject):
init_max_oneline_params = 4
init_params_incl_args_var = False
init_params_incl_kwargs_var = False
init_params_as_self_attrs = False
def __init__(
self,
name : str,
baseclasses : list[Union[str, PyTextObject]] = None,
metaclass : Union[str, PyTextObject] = None,
cls_attributes : list[Parameter] = None,
self_attributes: list[Parameter] = None,
init_parameters: list[Parameter] = None,
docstring : str = None,
) -> None:
self._name = None
self._baseclasses = self._PyTextList()
self._metaclass = None
self._buffer = None
self.name = name
self.baseclasses = baseclasses
self.metaclass = metaclass
self.cls_attributes = cls_attributes or []
self.self_attributes = self_attributes or []
self.init_parameters = init_parameters or []
self.docstring = docstring
@property
def name(self):
return self._name
@name.setter
def name(self, value: str):
if value is None:
raise ValueError("`name` property cannot be set to None.")
name = value.replace("_","")
if name[0].isdigit():
number, letter = name[:2]
suffix = f"{number}{letter.upper()}"
name = f"{name[2:]}{suffix}"
self._name = name
@property
def baseclasses(self) -> list[PyTextObject]:
return self._baseclasses
@baseclasses.setter
def baseclasses(self, value):
if value is None:
value = _PyTextList()
elif not isinstance(value, PyTextObject):
value = _PyTextList(value)
self._baseclasses = value
@property
def metaclass(self):
return self._metaclass
@metaclass.setter
def metaclass(self, value: object):
if value is None:
self._metaclass = None
elif not isinstance(value, PyTextObject):
self._metaclass = PyTextObject(value)
else:
raise ValueError(f"`metaclass` property cannot be set to {type(value)!r}")
def _apply_buffer(method):
@functools.wraps(method)
def wrapper(self):
if not self._buffer:
self._buffer = StringIO()
method(self)
string = self._buffer.getvalue()
self._buffer.close()
self._buffer = None
return string
return method(self)
return wrapper
@_apply_buffer
def __str__(self):
self._write_cls_def()
self._write_docstring()
self._write_cls_attrs()
self._write_init()
@_apply_buffer
def _write_cls_def(self):
text: str = f"class {self.name}:"
if self.baseclasses or self.metaclass:
bases_text = ", ".join(base for base in self.baseclasses)
text = text.replace(":", f"({bases_text}):").replace(",)", ")")
if self.metaclass and self.baseclasses:
text = text.replace("):", f", metaclass={self.metaclass}):")
elif self.metaclass:
text = text.replace(":", f"(metaclass={self.metaclass}):")
self._buffer.write(f"{text}\n")
@_apply_buffer
def _write_docstring(self):
if not self.docstring:
return None
text = self.docstring.replace('\n', '\n ')
self._buffer.write(f' """{text.strip()}\n """\n')
@_apply_buffer
def _write_cls_attrs(self):
if not self.cls_attributes:
return None
attr_strings = self._format_parameters_as_strings(self.cls_attributes, 4)
# Sorting
public_attrs = []
private_attrs = []
for attr_str in attr_strings:
if attr_str.lstrip().startswith("_"):
private_attrs.append(attr_str)
continue
public_attrs.append(attr_str)
text = ""
if public_attrs:
linebreak = False
for line in public_attrs:
# Messy formatting fix to add a newline between config and states.
if not linebreak and 'ItemAttribute("state"' in line:
linebreak = True
text += "\n"
text += f"\n{line}"
text += "\n"
if private_attrs:
for line in private_attrs:
text += f"\n{line}"
text += "\n"
self._buffer.write(f"{text}")
@_apply_buffer
def _write_init(self):
text = "\n def __init__("
# __init__ definition
init_parameters = [Parameter("self", Parameter.POSITIONAL_ONLY), *self.init_parameters]
# Formatting depends on the number of parameters.
if len(init_parameters) < self.init_max_oneline_params:
text += ", ".join([str(param) for param in init_parameters]) + ") -> None:"
else:
formatted = self._format_parameters_as_strings(init_parameters, 8)
text += "".join((f"\n{line}," for line in formatted))
text += "\n ) -> None:"
# __init__ body
if not self.baseclasses and not self.self_attributes and not self.init_max_oneline_params:
text += "\n ..."
self._buffer.write(f"{text}\n")
return None
self_attributes = []
var_keyword = None
var_positional = None
if self.baseclasses or self.init_params_as_self_attrs:
for idx, param in enumerate(self.init_parameters):
if param._name == "self":
continue
elif param.kind == Parameter.VAR_KEYWORD:
var_keyword = self.init_parameters[idx]
elif param.kind == Parameter.VAR_POSITIONAL:
var_positional = self.init_parameters[idx]
else:
self_attributes.append(param)
# super().__init__
if self.baseclasses:
text += "\n super().__init__("
for param in self_attributes:
text += f"\n {param.name}={param.name},"
if var_positional:
text += f"\n *{var_positional.name}"
if var_keyword:
text += f"\n **{var_keyword.name},"
text += "\n )"
self._buffer.write(f"{text}")
def _format_parameters_as_strings(self, parameters: list[Parameter], indent: int = 0) -> list[str]:
len_ceil_name = 0
len_ceil_anno = 0
len_ceil_defv = 0
param_lines = []
for param in parameters:
spaces = " " * indent
param_string = f"{spaces}{str(param)}"
if param._annotation is not _empty and param._default is not _empty:
name, param_string = param_string.split(":", maxsplit=1)
anno, defv = param_string.split("=", maxsplit=1)
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
if (length := len(anno)) > len_ceil_anno:
len_ceil_anno = length
if (length := len(defv)) > len_ceil_defv:
len_ceil_defv = length
param_lines.append((name, anno, defv))
elif param._annotation is not _empty:
name, anno = param_string.split(":", maxsplit=1)
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
if (length := len(anno)) > len_ceil_anno:
len_ceil_anno = length
param_lines.append((name, anno, None))
elif param._default is not _empty:
name, defv = param_string.split(":", maxsplit=1)
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
if (length := len(defv)) > len_ceil_defv:
len_ceil_defv = length
param_lines.append((name, None, anno))
else:
name = param_string
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
param_lines.append((name, None, None))
name_temp = "{name:<{len_name}}"
anno_temp = "{anno:<{len_anno}}"
defv_temp = "{defv:<{len_defv}}"
name_anno_defv_temp = name_temp + ":" + anno_temp + "=" + defv_temp
name_anno_temp = name_temp + ":" + anno_temp
name_defv_temp = name_temp + " = " + defv_temp
formatted = []
for param_parts in param_lines:
name, anno, defv = param_parts
if anno is not None and defv is not None:
formatted_string = name_anno_defv_temp.format(
name=name,
len_name=len_ceil_name,
anno=anno,
len_anno=len_ceil_anno,
defv=defv,
len_defv=len_ceil_defv
)
elif anno is not None:
formatted_string = name_anno_temp.format(
name=name,
len_name=len_ceil_name,
anno=anno,
# The "1" accounts for the padding that would be included
# with the default delimiter `=`.
len_anno=len_ceil_anno + len_ceil_defv + 1
)
elif defv is not None:
formatted_string = name_defv_temp.format(
name=name,
len_name=len_ceil_name + len_ceil_anno,
defv=defv,
len_defv=len_ceil_defv
)
else:
formatted_string = name_temp.format(
name=name,
# The "2" accounts for the padding that would be included
# with the annotation/default delimiters (= or :).
len_name=len_ceil_name + len_ceil_anno + len_ceil_defv + 2,
)
formatted.append(formatted_string)
return formatted
class _PyTextList(list):
def append(self, value):
if not isinstance(value, PyTextObject):
value = PyTextObject(value)
super().append(value)
class PyFile:
banner = [
"##################################################\n"
"####### NOTE: This file is auto-generated. #######\n"
"##################################################\n"
]
def __init__(
self,
filename: str,
dirpath: str,
banner: list[str] = None,
imports: list[PyTextNamespace] = None
):
self._buffer = None
self.filename = filename
self.dirpath = dirpath
self.banner = banner or self.banner
self.imports = imports or []
self.objects: list[PyTextClass] = []
@property
def path(self) -> Path:
return Path(self.dirpath, f"{self.filename}.py")
_apply_buffer = PyTextClass._apply_buffer
@_apply_buffer
def __str__(self):
self._write_imports()
self._write_banner()
self._write_all_dunder()
self._write_objects()
@_apply_buffer
def _write_imports(self):
for module in self.imports:
self._buffer.write(str(module))
self._buffer.write("\n")
@_apply_buffer
def _write_banner(self):
self._buffer.writelines(self.banner)
self._buffer.write("\n")
@_apply_buffer
def _write_all_dunder(self):
self._buffer.write("__all__ = [")
for cls in self.objects:
self._buffer.write(f'\n "{cls.name}",')
self._buffer.write("\n]")
@_apply_buffer
def _write_objects(self):
for obj in self.objects:
self._buffer.write("\n\n\n")
self._buffer.write(str(obj))
self._buffer.write("\n")
def write(self):
dirpath = Path(self.dirpath)
if not dirpath.exists():
dirpath.mkdir()
filetext = str(self)
with open(str(self.path), "w") as file:
file.write(filetext)
|
from __future__ import annotations
from inspect import Parameter, formatannotation, _empty
from typing import Union
from io import StringIO
from pathlib import Path
import functools
class _PyTextList(list):
def append(self, value):
if not isinstance(value, PyTextObject):
value = PyTextObject(value)
super().append(value)
class PyTextObject:
"""An object representing any Python object. It can be
represented in a file-friendly text format.
Args:
* obj (object): The object or object reference. Can also be a string.
* namespace (PyNamespace): The namespace for <obj>.
"""
def __init__(self, obj: object, namespace: PyTextNamespace = None):
self.obj = obj
self.namespace = namespace
@property
def name(self):
if isinstance(self.obj, str):
return self.obj
try:
return self.obj.__name__
except AttributeError:
return formatannotation(self.obj)
def __str__(self):
if self.namespace and self.namespace.alias:
return f"{self.namespace.alias}.{self.name}"
elif self.namespace:
return f"{self.namespace.name}.{self.name}"
return self.name
def __repr__(self):
return str(self)
class PyTextNamespace(PyTextObject):
"""An object representing a Python namespace (import, etc). It
can be represented in a file-friendly text format.
"""
def __init__(self, namespace: object, alias: str = None, imports: list = None):
self.obj = namespace
self.alias = alias
self._imports: list[PyTextObject] = []
if imports:
self.imports = imports
def __str__(self) -> str:
if not self.imports:
return f"import {self.name}\n"
else:
length = len(self._imports)
text = f"from {self.name} import "
if length == 1:
text += f"{self._imports[0].name}"
elif length < 4:
text += ", ".join([i.name for i in self._imports]).rstrip(",")
else:
text += "("
text += f"".join((f'\n {i.name},' for i in self._imports))
text += "\n)"
text += "\n"
return text
@property
def name(self):
if isinstance(self.obj, str):
return self.obj
try:
return self.obj.__name__
except AttributeError:
return formatannotation(self.obj)
@property
def imports(self):
return self._imports
@imports.setter
def imports(self, value):
if isinstance(value, list):
value = _PyTextList([PyTextObject(nspace) for nspace in value])
self._imports = value
else:
raise ValueError("Must be a list of `PyTextObject`'s.")
class PyTextClass(PyTextObject):
init_max_oneline_params = 4
init_params_incl_args_var = False
init_params_incl_kwargs_var = False
init_params_as_self_attrs = False
def __init__(
self,
name : str,
baseclasses : list[Union[str, PyTextObject]] = None,
metaclass : Union[str, PyTextObject] = None,
cls_attributes : list[Parameter] = None,
self_attributes: list[Parameter] = None,
init_parameters: list[Parameter] = None,
docstring : str = None,
) -> None:
self._name = None
self._baseclasses = self._PyTextList()
self._metaclass = None
self._buffer = None
self.name = name
self.baseclasses = baseclasses
self.metaclass = metaclass
self.cls_attributes = cls_attributes or []
self.self_attributes = self_attributes or []
self.init_parameters = init_parameters or []
self.docstring = docstring
@property
def name(self):
return self._name
@name.setter
def name(self, value: str):
if value is None:
raise ValueError("`name` property cannot be set to None.")
name = value.replace("_","")
if name[0].isdigit():
number, letter = name[:2]
suffix = f"{number}{letter.upper()}"
name = f"{name[2:]}{suffix}"
self._name = name
@property
def baseclasses(self) -> list[PyTextObject]:
return self._baseclasses
@baseclasses.setter
def baseclasses(self, value):
if value is None:
value = _PyTextList()
elif not isinstance(value, PyTextObject):
value = _PyTextList(value)
self._baseclasses = value
@property
def metaclass(self):
return self._metaclass
@metaclass.setter
def metaclass(self, value: object):
if value is None:
self._metaclass = None
elif not isinstance(value, PyTextObject):
self._metaclass = PyTextObject(value)
else:
raise ValueError(f"`metaclass` property cannot be set to {type(value)!r}")
def _apply_buffer(method):
@functools.wraps(method)
def wrapper(self):
if not self._buffer:
self._buffer = StringIO()
method(self)
string = self._buffer.getvalue()
self._buffer.close()
self._buffer = None
return string
return method(self)
return wrapper
@_apply_buffer
def __str__(self):
self._write_cls_def()
self._write_docstring()
self._write_cls_attrs()
self._write_init()
@_apply_buffer
def _write_cls_def(self):
text: str = f"class {self.name}:"
if self.baseclasses or self.metaclass:
bases_text = ", ".join(base for base in self.baseclasses)
text = text.replace(":", f"({bases_text}):").replace(",)", ")")
if self.metaclass and self.baseclasses:
text = text.replace("):", f", metaclass={self.metaclass}):")
elif self.metaclass:
text = text.replace(":", f"(metaclass={self.metaclass}):")
self._buffer.write(f"{text}\n")
@_apply_buffer
def _write_docstring(self):
if not self.docstring:
return None
text = self.docstring.replace('\n', '\n ')
self._buffer.write(f' """{text.strip()}\n """\n')
@_apply_buffer
def _write_cls_attrs(self):
if not self.cls_attributes:
return None
attr_strings = self._format_parameters_as_strings(self.cls_attributes, 4)
# Sorting
public_attrs = []
private_attrs = []
for attr_str in attr_strings:
if attr_str.lstrip().startswith("_"):
private_attrs.append(attr_str)
continue
public_attrs.append(attr_str)
text = ""
if public_attrs:
linebreak = False
for line in public_attrs:
# Messy formatting fix to add a newline between config and states.
if not linebreak and 'ItemAttribute("state"' in line:
linebreak = True
text += "\n"
text += f"\n{line}"
text += "\n"
if private_attrs:
for line in private_attrs:
text += f"\n{line}"
text += "\n"
self._buffer.write(f"{text}")
@_apply_buffer
def _write_init(self):
text = "\n def __init__("
# __init__ definition
init_parameters = [Parameter("self", Parameter.POSITIONAL_ONLY), *self.init_parameters]
# Formatting depends on the number of parameters.
if len(init_parameters) < self.init_max_oneline_params:
text += ", ".join([str(param) for param in init_parameters]) + ") -> None:"
else:
formatted = self._format_parameters_as_strings(init_parameters, 8)
text += "".join((f"\n{line}," for line in formatted))
text += "\n ) -> None:"
# __init__ body
if not self.baseclasses and not self.self_attributes and not self.init_max_oneline_params:
text += "\n ..."
self._buffer.write(f"{text}\n")
return None
self_attributes = []
var_keyword = None
var_positional = None
if self.baseclasses or self.init_params_as_self_attrs:
for idx, param in enumerate(self.init_parameters):
if param._name == "self":
continue
elif param.kind == Parameter.VAR_KEYWORD:
var_keyword = self.init_parameters[idx]
elif param.kind == Parameter.VAR_POSITIONAL:
var_positional = self.init_parameters[idx]
else:
self_attributes.append(param)
# super().__init__
if self.baseclasses:
text += "\n super().__init__("
for param in self_attributes:
text += f"\n {param.name}={param.name},"
if var_positional:
text += f"\n *{var_positional.name}"
if var_keyword:
text += f"\n **{var_keyword.name},"
text += "\n )"
self._buffer.write(f"{text}")
def _format_parameters_as_strings(self, parameters: list[Parameter], indent: int = 0) -> list[str]:
len_ceil_name = 0
len_ceil_anno = 0
len_ceil_defv = 0
param_lines = []
for param in parameters:
spaces = " " * indent
param_string = f"{spaces}{str(param)}"
if param._annotation is not _empty and param._default is not _empty:
name, param_string = param_string.split(":", maxsplit=1)
anno, defv = param_string.split("=", maxsplit=1)
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
if (length := len(anno)) > len_ceil_anno:
len_ceil_anno = length
if (length := len(defv)) > len_ceil_defv:
len_ceil_defv = length
param_lines.append((name, anno, defv))
elif param._annotation is not _empty:
name, anno = param_string.split(":", maxsplit=1)
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
if (length := len(anno)) > len_ceil_anno:
len_ceil_anno = length
param_lines.append((name, anno, None))
elif param._default is not _empty:
name, defv = param_string.split(":", maxsplit=1)
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
if (length := len(defv)) > len_ceil_defv:
len_ceil_defv = length
param_lines.append((name, None, anno))
else:
name = param_string
if (length := len(name)) > len_ceil_name:
len_ceil_name = length
param_lines.append((name, None, None))
name_temp = "{name:<{len_name}}"
anno_temp = "{anno:<{len_anno}}"
defv_temp = "{defv:<{len_defv}}"
name_anno_defv_temp = name_temp + ":" + anno_temp + "=" + defv_temp
name_anno_temp = name_temp + ":" + anno_temp
name_defv_temp = name_temp + " = " + defv_temp
formatted = []
for param_parts in param_lines:
name, anno, defv = param_parts
if anno is not None and defv is not None:
formatted_string = name_anno_defv_temp.format(
name=name,
len_name=len_ceil_name,
anno=anno,
len_anno=len_ceil_anno,
defv=defv,
len_defv=len_ceil_defv
)
elif anno is not None:
formatted_string = name_anno_temp.format(
name=name,
len_name=len_ceil_name,
anno=anno,
# The "1" accounts for the padding that would be included
# with the default delimiter `=`.
len_anno=len_ceil_anno + len_ceil_defv + 1
)
elif defv is not None:
formatted_string = name_defv_temp.format(
name=name,
len_name=len_ceil_name + len_ceil_anno,
defv=defv,
len_defv=len_ceil_defv
)
else:
formatted_string = name_temp.format(
name=name,
# The "2" accounts for the padding that would be included
# with the annotation/default delimiters (= or :).
len_name=len_ceil_name + len_ceil_anno + len_ceil_defv + 2,
)
formatted.append(formatted_string)
return formatted
class _PyTextList(list):
def append(self, value):
if not isinstance(value, PyTextObject):
value = PyTextObject(value)
super().append(value)
class PyFile:
banner = [
"##################################################\n"
"####### NOTE: This file is auto-generated. #######\n"
"##################################################\n"
]
def __init__(
self,
filename: str,
dirpath: str,
banner: list[str] = None,
imports: list[PyTextNamespace] = None
):
self._buffer = None
self.filename = filename
self.dirpath = dirpath
self.banner = banner or self.banner
self.imports = imports or []
self.objects: list[PyTextClass] = []
@property
def path(self) -> Path:
return Path(self.dirpath, f"{self.filename}.py")
_apply_buffer = PyTextClass._apply_buffer
@_apply_buffer
def __str__(self):
self._write_imports()
self._write_banner()
self._write_all_dunder()
self._write_objects()
@_apply_buffer
def _write_imports(self):
for module in self.imports:
self._buffer.write(str(module))
self._buffer.write("\n")
@_apply_buffer
def _write_banner(self):
self._buffer.writelines(self.banner)
self._buffer.write("\n")
@_apply_buffer
def _write_all_dunder(self):
self._buffer.write("__all__ = [")
for cls in self.objects:
self._buffer.write(f'\n "{cls.name}",')
self._buffer.write("\n]")
@_apply_buffer
def _write_objects(self):
for obj in self.objects:
self._buffer.write("\n\n\n")
self._buffer.write(str(obj))
self._buffer.write("\n")
def write(self):
dirpath = Path(self.dirpath)
if not dirpath.exists():
dirpath.mkdir()
filetext = str(self)
with open(str(self.path), "w") as file:
file.write(filetext)
|
en
| 0.607638
|
An object representing any Python object. It can be represented in a file-friendly text format. Args: * obj (object): The object or object reference. Can also be a string. * namespace (PyNamespace): The namespace for <obj>. An object representing a Python namespace (import, etc). It can be represented in a file-friendly text format. {text.strip()}\n # Sorting # Messy formatting fix to add a newline between config and states. # __init__ definition # Formatting depends on the number of parameters. # __init__ body # super().__init__ # The "1" accounts for the padding that would be included # with the default delimiter `=`. # The "2" accounts for the padding that would be included # with the annotation/default delimiters (= or :). #################################################\n" ###### NOTE: This file is auto-generated. #######\n" #################################################\n"
| 3.016671
| 3
|
samples/python/src/main/service/ZoweClient.py
|
BroadcomMFD/test4z
| 6
|
6626869
|
# ZOWE and Z/OSMF client for the job submission
import sys
sys.path.append("../../main")
from zowe.zos_console_for_zowe_sdk import Console
from zowe.zos_jobs_for_zowe_sdk import Jobs
from polling import TimeoutException, poll
from utility import get_zosmf_connection, get_polling_timeout, get_polling_interval
# Job submission through ZOSMF
# Waits until the job return code and returns it
# @dataset - JCL dataset with the job/s
def zowe_submit_job_notify(dataset):
connection = Jobs(get_zosmf_connection())
try:
job = connection.submit_from_mainframe(dataset)
print("\nBatch application job submitted\nJobId: " + job['jobid'] + "\nWaiting for the return code...")
except:
raise Exception("An error occurred during the job submission. Check the ZOSMF access, health status and verify the config.cfg values")
try:
result = poll(
lambda: connection.get_job_status(job['jobname'], job['jobid']),
timeout=get_polling_timeout(),
step=get_polling_interval(),
check_success=is_polling_successful
)
print ("Return code: " + result['retcode'])
return result
except TimeoutException:
raise Exception("Timeout value exceeded by the function. Job return code could not be retrieved")
except Exception:
raise Exception("Unexpected error")
# Python polling check_success method,
# checks the job execution.
# @response - polling request response
# @returns - boolean status of the polling method
def is_polling_successful(response):
try:
return response['status']=='OUTPUT'
except Exception:
raise Exception("Unexpected error")
|
# ZOWE and Z/OSMF client for the job submission
import sys
sys.path.append("../../main")
from zowe.zos_console_for_zowe_sdk import Console
from zowe.zos_jobs_for_zowe_sdk import Jobs
from polling import TimeoutException, poll
from utility import get_zosmf_connection, get_polling_timeout, get_polling_interval
# Job submission through ZOSMF
# Waits until the job return code and returns it
# @dataset - JCL dataset with the job/s
def zowe_submit_job_notify(dataset):
connection = Jobs(get_zosmf_connection())
try:
job = connection.submit_from_mainframe(dataset)
print("\nBatch application job submitted\nJobId: " + job['jobid'] + "\nWaiting for the return code...")
except:
raise Exception("An error occurred during the job submission. Check the ZOSMF access, health status and verify the config.cfg values")
try:
result = poll(
lambda: connection.get_job_status(job['jobname'], job['jobid']),
timeout=get_polling_timeout(),
step=get_polling_interval(),
check_success=is_polling_successful
)
print ("Return code: " + result['retcode'])
return result
except TimeoutException:
raise Exception("Timeout value exceeded by the function. Job return code could not be retrieved")
except Exception:
raise Exception("Unexpected error")
# Python polling check_success method,
# checks the job execution.
# @response - polling request response
# @returns - boolean status of the polling method
def is_polling_successful(response):
try:
return response['status']=='OUTPUT'
except Exception:
raise Exception("Unexpected error")
|
en
| 0.755556
|
# ZOWE and Z/OSMF client for the job submission # Job submission through ZOSMF # Waits until the job return code and returns it # @dataset - JCL dataset with the job/s # Python polling check_success method, # checks the job execution. # @response - polling request response # @returns - boolean status of the polling method
| 2.269495
| 2
|
cqi_cpp/src/wrapper/setup.py
|
AMR-/Conservative-Q-Improvement
| 0
|
6626870
|
<filename>cqi_cpp/src/wrapper/setup.py
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
file_list = ["qtree_wrapper.pyx", "../discrete.cpp", "../box.cpp", "../leafsplit.cpp", "../qtreeleaf.cpp", "../qtreeinternal.cpp", "../qtree.cpp", "../state.cpp", "../action.cpp"]
setup(
ext_modules=[Extension("qtree_wrapper", file_list, language="c++")],
cmdclass = {'build_ext': build_ext}
)
|
<filename>cqi_cpp/src/wrapper/setup.py
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
file_list = ["qtree_wrapper.pyx", "../discrete.cpp", "../box.cpp", "../leafsplit.cpp", "../qtreeleaf.cpp", "../qtreeinternal.cpp", "../qtree.cpp", "../state.cpp", "../action.cpp"]
setup(
ext_modules=[Extension("qtree_wrapper", file_list, language="c++")],
cmdclass = {'build_ext': build_ext}
)
|
none
| 1
| 1.479067
| 1
|
|
src/Compile.py
|
laubonghaudoi/Cantonese
| 0
|
6626871
|
<gh_stars>0
class Compile(object):
def __init__(self, ast, target, path) -> None:
self.ast = ast
self.target = target
self.path = path
self.TO_JS_CODE = ""
self.TO_C_CODE = ""
self.TO_ASM_CODE = ""
if self.target == "js":
self.run_js(self.ast)
if self.target == "c":
self.run_c(self.ast)
if self.target == "asm":
self.run_asm(self.ast)
def ret(self):
if self.target == "js":
return self.TO_JS_CODE, self.path[ : len(self.path) - len('cantonese')] + 'js'
if self.target == "c":
return self.TO_C_CODE, self.path[ : len(self.path) - len('cantonese')] + 'c'
if self.target == "asm":
return self.TO_ASM_CODE, self.path[ : len(self.path) - len('cantonese')] + 'S'
# TODO
def eval_expr(self, expr):
return expr
def run_asm(self, Nodes : list, label = '', path = '') -> None:
for node in Nodes:
pass
def run_c(self, Nodes : list, label = '', path = '') -> None:
for node in Nodes:
if node[0] == "node_print":
pass
def run_js(self, Nodes : list, label = '', path = '', in_web = False) -> None:
for node in Nodes:
if node[0] == "node_print":
if in_web:
self.TO_JS_CODE += "alert(" + self.eval_expr(node[1][1]) + ");\n"
else:
self.TO_JS_CODE += "console.log(" + self.eval_expr(node[1][1]) + ");\n"
if node[0] == "node_exit":
self.TO_JS_CODE += "process.exit();\n"
if node[0] == "node_let":
self.TO_JS_CODE += node[1][1] + " = " + self.eval_expr(node[2][1]) + ";\n"
if node[0] == "node_if":
self.TO_JS_CODE += "if (" + self.eval_expr(node[1][1]) + ") {\n"
self.run_js(node[2])
self.TO_JS_CODE += "}"
if node[0] == "node_elif":
self.TO_JS_CODE += "else if (" + self.eval_expr(node[1][1]) + ") {\n"
self.run_js(node[2])
self.TO_JS_CODE += "}"
if node[0] == "node_else":
self.TO_JS_CODE += "else{"
self.run_js(node[1])
self.TO_JS_CODE += "}"
if node[0] == "node_call":
self.TO_JS_CODE += node[1][1] + ";\n"
if node[0] == "node_fundef":
if node[2] == 'None':
self.TO_JS_CODE += "function " + node[1][1] + "() {\n"
self.run_js(node[3])
self.TO_JS_CODE += "}\n"
else:
self.TO_JS_CODE += "function " + node[1][1] + "(" + node[2][1] + ") {\n"
self.run_js(node[3])
self.TO_JS_CODE += "}\n"
|
class Compile(object):
def __init__(self, ast, target, path) -> None:
self.ast = ast
self.target = target
self.path = path
self.TO_JS_CODE = ""
self.TO_C_CODE = ""
self.TO_ASM_CODE = ""
if self.target == "js":
self.run_js(self.ast)
if self.target == "c":
self.run_c(self.ast)
if self.target == "asm":
self.run_asm(self.ast)
def ret(self):
if self.target == "js":
return self.TO_JS_CODE, self.path[ : len(self.path) - len('cantonese')] + 'js'
if self.target == "c":
return self.TO_C_CODE, self.path[ : len(self.path) - len('cantonese')] + 'c'
if self.target == "asm":
return self.TO_ASM_CODE, self.path[ : len(self.path) - len('cantonese')] + 'S'
# TODO
def eval_expr(self, expr):
return expr
def run_asm(self, Nodes : list, label = '', path = '') -> None:
for node in Nodes:
pass
def run_c(self, Nodes : list, label = '', path = '') -> None:
for node in Nodes:
if node[0] == "node_print":
pass
def run_js(self, Nodes : list, label = '', path = '', in_web = False) -> None:
for node in Nodes:
if node[0] == "node_print":
if in_web:
self.TO_JS_CODE += "alert(" + self.eval_expr(node[1][1]) + ");\n"
else:
self.TO_JS_CODE += "console.log(" + self.eval_expr(node[1][1]) + ");\n"
if node[0] == "node_exit":
self.TO_JS_CODE += "process.exit();\n"
if node[0] == "node_let":
self.TO_JS_CODE += node[1][1] + " = " + self.eval_expr(node[2][1]) + ";\n"
if node[0] == "node_if":
self.TO_JS_CODE += "if (" + self.eval_expr(node[1][1]) + ") {\n"
self.run_js(node[2])
self.TO_JS_CODE += "}"
if node[0] == "node_elif":
self.TO_JS_CODE += "else if (" + self.eval_expr(node[1][1]) + ") {\n"
self.run_js(node[2])
self.TO_JS_CODE += "}"
if node[0] == "node_else":
self.TO_JS_CODE += "else{"
self.run_js(node[1])
self.TO_JS_CODE += "}"
if node[0] == "node_call":
self.TO_JS_CODE += node[1][1] + ";\n"
if node[0] == "node_fundef":
if node[2] == 'None':
self.TO_JS_CODE += "function " + node[1][1] + "() {\n"
self.run_js(node[3])
self.TO_JS_CODE += "}\n"
else:
self.TO_JS_CODE += "function " + node[1][1] + "(" + node[2][1] + ") {\n"
self.run_js(node[3])
self.TO_JS_CODE += "}\n"
|
none
| 1
| 2.732922
| 3
|
|
configure.py
|
StarMKWii/mkw-sp
| 0
|
6626872
|
#!/usr/bin/env python3
from argparse import ArgumentParser
import io
import os, sys
from vendor.ninja_syntax import Writer
try:
import json5
del json5
except ModuleNotFoundError:
raise SystemExit("Error: pyjson5 not found. Please install it with `python -m pip install json5`")
import subprocess
parser = ArgumentParser()
parser.add_argument('--gdb_compatible', action='store_true')
args = parser.parse_args()
# https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script/14989911#14989911
def get_git_revision_hash() -> str:
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('ascii').strip()
def get_git_revision_short_hash() -> str:
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode('ascii').strip()
out_buf = io.StringIO()
n = Writer(out_buf)
n.variable('ninja_required_version', '1.3')
n.newline()
n.variable('builddir', 'build')
n.variable('outdir', 'out')
n.newline()
devkitppc = os.environ.get("DEVKITPPC")
n.variable('cc', os.path.join(devkitppc, 'bin', 'powerpc-eabi-gcc'))
n.variable('cpp', os.path.join(devkitppc, 'bin', 'powerpc-eabi-g++'))
n.variable('port', 'port.py')
n.newline()
asflags = [
'-isystem', 'include',
'-isystem', 'payload',
'-isystem', 'vendor',
]
cflags_loader = [
'-fms-extensions',
'-fno-asynchronous-unwind-tables',
'-fplan9-extensions',
'-fshort-wchar',
'-isystem', 'include',
'-isystem', 'payload',
'-isystem', 'vendor',
'-Wall',
'-Werror=implicit-function-declaration',
'-Werror=incompatible-pointer-types',
'-Wextra',
'-Wno-packed-bitfield-compat',
f'-DGIT_HASH={get_git_revision_short_hash()}',
]
if args.gdb_compatible:
cflags_loader += ['-DGDB_COMPATIBLE=1']
cflags_payload = [
*cflags_loader,
'-fstack-protector-strong',
]
cppflags = [
'-fms-extensions',
'-fno-asynchronous-unwind-tables',
# '-fplan9-extensions',
'-fshort-wchar',
'-fstack-protector-strong',
'-isystem', 'include',
'-isystem', 'payload',
'-isystem', 'vendor',
'-Wall',
# '-Werror=implicit-function-declaration',
# '-Werror=incompatible-pointer-types',
'-Wextra',
'-Wno-packed-bitfield-compat',
f'-DGIT_HASH="{get_git_revision_short_hash()}"',
'-fno-exceptions',
'-fno-unwind-tables',
]
ldflags = [
'-nostdlib',
'-Wl,-n',
]
n.variable('asflags', ' '.join(asflags))
n.variable('cppflags', ' '.join(cppflags))
n.variable('ldflags', ' '.join(ldflags))
n.newline()
n.rule(
'as',
command = '$cc -MD -MT $out -MF $out.d $asflags -c $in -o $out',
depfile = '$out.d',
deps = 'gcc',
description = 'AS $out',
)
n.newline()
n.rule(
'incbin',
command = '$cc -DNAME=$name -DPATH=$path -c Incbin.S -o $out',
description = 'INCBIN $out',
)
n.newline()
n.rule(
'cc',
command = '$cc -MD -MT $out -MF $out.d $cflags -c $in -o $out',
depfile = '$out.d',
deps = 'gcc',
description = 'CC $out',
)
n.newline()
n.rule(
'cpp',
command = '$cpp -MD -MT $out -MF $out.d $cppflags -c $in -o $out',
depfile = '$out.d',
deps = 'gcc',
description = 'CPP $out',
)
n.newline()
n.rule(
'port',
command = f'{sys.executable} $port $region $in $out' + (' --base' if args.gdb_compatible else ''),
description = 'PORT $out'
)
n.newline()
ldparams = [
'-Wl,--defsym,base=$base',
'-Wl,--entry=$entry',
'-Wl,--oformat,$format',
'-Wl,-T,$script',
]
n.rule(
'ld',
command = '$cc $ldflags ' + ' '.join(ldparams) + ' $in -o $out',
description = 'LD $out',
)
n.newline()
code_in_files = {
'payload': [
os.path.join('payload', 'egg', 'core', 'eggColorFader.c'),
os.path.join('payload', 'egg', 'core', 'eggDisplay.S'),
os.path.join('payload', 'egg', 'core', 'eggDvdFile.c'),
os.path.join('payload', 'egg', 'core', 'eggEffectCreator.S'),
os.path.join('payload', 'egg', 'core', 'eggG3dUtil.S'),
os.path.join('payload', 'egg', 'core', 'eggHeap.c'),
os.path.join('payload', 'egg', 'core', 'eggSystem.c'),
os.path.join('payload', 'game', 'effect', 'Effect.S'),
os.path.join('payload', 'game', 'gfx', 'Camera.S'),
os.path.join('payload', 'game', 'gfx', 'CameraManager.S'),
os.path.join('payload', 'game', 'host_system', 'BootStrapScene.c'),
os.path.join('payload', 'game', 'host_system', 'RkSystem.S'),
os.path.join('payload', 'game', 'host_system', 'RkSystem.c'),
os.path.join('payload', 'game', 'host_system', 'SceneManager.S'),
os.path.join('payload', 'game', 'host_system', 'SceneManager.c'),
os.path.join('payload', 'game', 'item', 'ItemObjKouraAka.S'),
os.path.join('payload', 'game', 'item', 'ItemObjKouraAo.S'),
os.path.join('payload', 'game', 'item', 'ItemObjKouraMidori.S'),
os.path.join('payload', 'game', 'item', 'KartItem.S'),
os.path.join('payload', 'game', 'kart', 'KartMove.S'),
os.path.join('payload', 'game', 'kart', 'KartObject.S'),
os.path.join('payload', 'game', 'kart', 'KartObjectManager.S'),
os.path.join('payload', 'game', 'kart', 'KartObjectManager.c'),
os.path.join('payload', 'game', 'kart', 'KartParam.S'),
os.path.join('payload', 'game', 'kart', 'KartState.S'),
os.path.join('payload', 'game', 'kart', 'KartSub.S'),
os.path.join('payload', 'game', 'net', 'NetManager.S'),
os.path.join('payload', 'game', 'obj', 'ObjEffect.S'),
os.path.join('payload', 'game', 'obj', 'ObjManager.c'),
os.path.join('payload', 'game', 'obj', 'ObjPylon01.S'),
os.path.join('payload', 'game', 'obj', 'ObjPylon01.c'),
os.path.join('payload', 'game', 'race', 'Driver.S'),
os.path.join('payload', 'game', 'race', 'DriverManager.S'),
os.path.join('payload', 'game', 'race', 'DriverManager.c'),
os.path.join('payload', 'game', 'race', 'JugemManager.S'),
os.path.join('payload', 'game', 'race', 'JugemManager.c'),
os.path.join('payload', 'game', 'rel', 'Rel.S'),
os.path.join('payload', 'game', 'snd', 'DriverSound.S'),
os.path.join('payload', 'game', 'snd', 'KartSound.S'),
os.path.join('payload', 'game', 'snd', 'Snd.S'),
os.path.join('payload', 'game', 'system', 'BugCheck.c'),
os.path.join('payload', 'game', 'system', 'Console.c'),
os.path.join('payload', 'game', 'system', 'CourseMap.S'),
os.path.join('payload', 'game', 'system', 'DvdArchive.S'),
os.path.join('payload', 'game', 'system', 'FatalScene.c'),
os.path.join('payload', 'game', 'system', 'GhostFile.c'),
os.path.join('payload', 'game', 'system', 'HomeButton.S'),
os.path.join('payload', 'game', 'system', 'InputManager.S'),
os.path.join('payload', 'game', 'system', 'InputManager.c'),
os.path.join('payload', 'game', 'system', 'Mii.S'),
os.path.join('payload', 'game', 'system', 'MultiDvdArchive.S'),
os.path.join('payload', 'game', 'system', 'MultiDvdArchive.c'),
os.path.join('payload', 'game', 'system', 'NandManager.S'),
os.path.join('payload', 'game', 'system', 'RaceConfig.S'),
os.path.join('payload', 'game', 'system', 'RaceConfig.c'),
os.path.join('payload', 'game', 'system', 'RaceManager.S'),
os.path.join('payload', 'game', 'system', 'RaceManager.c'),
os.path.join('payload', 'game', 'system', 'ResourceManager.S'),
os.path.join('payload', 'game', 'system', 'ResourceManager.c'),
os.path.join('payload', 'game', 'system', 'SaveManager.c'),
os.path.join('payload', 'game', 'system', 'SceneCreatorDynamic.S'),
os.path.join('payload', 'game', 'system', 'SceneCreatorDynamic.c'),
os.path.join('payload', 'game', 'ui', 'ControlLoader.S'),
os.path.join('payload', 'game', 'ui', 'Font.S'),
os.path.join('payload', 'game', 'ui', 'Font.c'),
os.path.join('payload', 'game', 'ui', 'FontManager.c'),
os.path.join('payload', 'game', 'ui', 'GhostManagerPage.S'),
os.path.join('payload', 'game', 'ui', 'GhostManagerPage.c'),
os.path.join('payload', 'game', 'ui', 'GhostSelectButton.c'),
os.path.join('payload', 'game', 'ui', 'GhostSelectControl.c'),
os.path.join('payload', 'game', 'ui', 'Layout.S'),
os.path.join('payload', 'game', 'ui', 'License.S'),
os.path.join('payload', 'game', 'ui', 'License.c'),
os.path.join('payload', 'game', 'ui', 'LicenseSelectButton.c'),
os.path.join('payload', 'game', 'ui', 'LicenseSelectPage.c'),
os.path.join('payload', 'game', 'ui', 'LicenseSettingsPage.c'),
os.path.join('payload', 'game', 'ui', 'Map2DRenderer.c'),
os.path.join('payload', 'game', 'ui', 'MiiGroup.c'),
os.path.join('payload', 'game', 'ui', 'Model.S'),
os.path.join('payload', 'game', 'ui', 'Page.c'),
os.path.join('payload', 'game', 'ui', 'Save.S'),
os.path.join('payload', 'game', 'ui', 'SaveManagerProxy.S'),
os.path.join('payload', 'game', 'ui', 'Section.c'),
os.path.join('payload', 'game', 'ui', 'SectionManager.S'),
os.path.join('payload', 'game', 'ui', 'SectionManager.c'),
os.path.join('payload', 'game', 'ui', 'TabControl.c'),
os.path.join('payload', 'game', 'ui', 'TimeAttackGhostListPage.c'),
os.path.join('payload', 'game', 'ui', 'TimeAttackRulesPage.c'),
os.path.join('payload', 'game', 'ui', 'TimeAttackTopPage.S'),
os.path.join('payload', 'game', 'ui', 'TitlePage.S'),
os.path.join('payload', 'game', 'ui', 'TitlePage.c'),
os.path.join('payload', 'game', 'ui', 'UIAnimator.c'),
os.path.join('payload', 'game', 'ui', 'UIControl.c'),
os.path.join('payload', 'game', 'ui', 'Wipe.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlMenuBackButton.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlMenuInstructionText.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlMenuPageTitleText.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRace2DMap.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceBase.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceBase.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceInputDisplay.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceLap.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceNameBalloon.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceNameBalloon.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceSpeed.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceTime.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceTime.c'),
os.path.join('payload', 'game', 'ui', 'page', 'CharacterSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'CourseSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'CupSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'DemoPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'DriftSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'MachineSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'RaceMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'RaceMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'RacePage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'RacePage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'SingleTopMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'SingleTopMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'TimeAttackSplitsPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'TimeAttackSplitsPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'TopMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'TopMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'VsMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'VsMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'VsRulesPage.c'),
os.path.join('payload', 'game', 'util', 'Input.S'),
os.path.join('payload', 'nw4r', 'db', 'dbAssert.S'),
os.path.join('payload', 'nw4r', 'db', 'dbException.S'),
os.path.join('payload', 'nw4r', 'db', 'dbException.c'),
os.path.join('payload', 'nw4r', 'g3d', 'g3d_resmat.c'),
os.path.join('payload', 'nw4r', 'g3d', 'g3dResFile.S'),
os.path.join('payload', 'nw4r', 'g3d', 'MSan.c'),
os.path.join('payload', 'nw4r', 'lyt', 'lyt_arcResourceAccessor.S'),
os.path.join('payload', 'nw4r', 'lyt', 'lyt_layout.S'),
os.path.join('payload', 'nw4r', 'snd', 'snd_DvdSoundArchive.S'),
os.path.join('payload', 'nw4r', 'ut', 'ut_DvdFileStream.S'),
os.path.join('payload', 'platform', 'string.c'),
os.path.join('payload', 'platform', 'wchar.c'),
os.path.join('payload', 'revolution', 'dvd.S'),
os.path.join('payload', 'revolution', 'dvd.c'),
os.path.join('payload', 'revolution', 'dvdex.c'),
os.path.join('payload', 'revolution', 'ios.S'),
os.path.join('payload', 'revolution', 'ios.c'),
os.path.join('payload', 'revolution', 'nand.c'),
os.path.join('payload', 'revolution', 'OS.S'),
os.path.join('payload', 'revolution', 'OS.c'),
os.path.join('payload', 'revolution', 'os', 'OSContext.S'),
os.path.join('payload', 'revolution', 'os', 'OSError.S'),
os.path.join('payload', 'revolution', 'os', 'OSError.c'),
os.path.join('payload', 'revolution', 'os', 'OSMemory.S'),
os.path.join('payload', 'revolution', 'os', 'OSThread.S'),
os.path.join('payload', 'revolution', 'os', 'OSThread.c'),
os.path.join('payload', 'sp', 'Fatal.c'),
os.path.join('payload', 'sp', 'FormattingCodes.c'),
os.path.join('payload', 'sp', 'FlameGraph.c'),
os.path.join('payload', 'sp', 'Host.c'),
os.path.join('payload', 'sp', 'IOSDolphin.c'),
# Keyboard module
os.path.join('payload', 'sp', 'keyboard', 'Keyboard.c'),
os.path.join('payload', 'sp', 'keyboard', 'SIKeyboard.c'),
os.path.join('payload', 'sp', 'keyboard', 'IOSKeyboard.c'),
#
os.path.join('payload', 'sp', 'Net.c'),
os.path.join('payload', 'sp', 'Patcher.c'),
os.path.join('payload', 'sp', 'Payload.c'),
# Security module
os.path.join('payload', 'sp', 'security', 'Memory.c'),
os.path.join('payload', 'sp', 'security', 'Stack.S'),
os.path.join('payload', 'sp', 'security', 'Stack.c'),
os.path.join('payload', 'sp', 'security', 'StackTrace.S'),
# Settings module
os.path.join('payload', 'sp', 'settings', 'BaseSettings.c'),
os.path.join('payload', 'sp', 'settings', 'ClientSettings.c'),
os.path.join('payload', 'sp', 'settings', 'IniReader.c'),
#
os.path.join('payload', 'sp', 'Slab.c'),
os.path.join('payload', 'sp', 'StackTrace.c'),
# Storage module
os.path.join('payload', 'sp', 'storage', 'FatStorage.c'),
os.path.join('payload', 'sp', 'storage', 'LogFile.c'),
os.path.join('payload', 'sp', 'storage', 'NetStorage.c'),
os.path.join('payload', 'sp', 'storage', 'NetStorageClient.c'),
os.path.join('payload', 'sp', 'storage', 'Sdi.c'),
os.path.join('payload', 'sp', 'storage', 'Storage.c'),
os.path.join('payload', 'sp', 'storage', 'Usb.c'),
os.path.join('payload', 'sp', 'storage', 'UsbStorage.c'),
#
os.path.join('payload', 'sp', 'Tcp.c'),
os.path.join('payload', 'sp', 'Yaz.c'),
os.path.join('vendor', 'arith64.c'),
os.path.join('vendor', 'ff', 'diskio.c'),
os.path.join('vendor', 'ff', 'ff.c'),
os.path.join('vendor', 'ff', 'fftime.c'),
os.path.join('vendor', 'ff', 'ffunicode.c'),
],
'loader': [
os.path.join('loader', 'Apploader.c'),
os.path.join('loader', 'Cache.S'),
os.path.join('loader', 'Console.c'),
os.path.join('loader', 'Delay.S'),
os.path.join('loader', 'Di.c'),
os.path.join('loader', 'Font.c'),
os.path.join('loader', 'Ios.c'),
os.path.join('loader', 'Loader.c'),
os.path.join('loader', 'Memcpy.c'),
os.path.join('loader', 'Memset.c'),
os.path.join('loader', 'Stack.c'),
os.path.join('loader', 'Start.S'),
os.path.join('loader', 'Strlen.c'),
os.path.join('loader', 'Vi.c'),
],
}
code_out_files = {}
for profile in ['DEBUG', 'RELEASE']:
code_out_files[profile] = {target: [] for target in code_in_files}
for target in code_in_files:
for in_file in code_in_files[target]:
_, ext = os.path.splitext(in_file)
for profile in ['DEBUG', 'RELEASE']:
out_file = os.path.join('$builddir', in_file + ('.o' if profile == 'RELEASE' else 'D.o'))
code_out_files[profile][target] += [out_file]
rule = {
'.S': 'as',
'.c': 'cc',
'.cpp': 'cpp',
}[ext]
cur_cflags = (cflags_loader if target == 'loader' else cflags_payload) + (['-O0', '-g', '-DSP_DEBUG'] if profile == 'DEBUG' else ['-O2', '-DSP_RELEASE'])
n.build(
out_file,
rule,
in_file,
variables = {
'cflags': ' '.join(cur_cflags),
},
)
n.newline()
for region in ['P', 'E', 'J', 'K']:
n.build(
os.path.join('$builddir', 'scripts', f'RMC{region}.ld'),
'port',
os.path.join('.', 'symbols.txt'),
variables = {
'region': region,
},
implicit = '$port',
)
n.newline()
for region in ['P', 'E', 'J', 'K']:
for fmt in ['binary', 'elf32-powerpc']:
for profile in ['DEBUG', 'RELEASE']:
suffix = 'D' if profile == 'DEBUG' else ''
extension = 'bin' if fmt == 'binary' else 'elf'
n.build(
os.path.join('$builddir', 'bin', f'payload{region}{suffix}.{extension}'),
'ld',
code_out_files[profile]['payload'],
variables = {
'base': {
'P': '0x8076db60' if not args.gdb_compatible else '0x809C4FA0',
'E': '0x80769400',
'J': '0x8076cca0',
'K': '0x8075bfe0',
}[region],
'entry': 'Payload_run',
'format': fmt,
'script': os.path.join('$builddir', 'scripts', f'RMC{region}.ld'),
},
implicit = os.path.join('$builddir', 'scripts', f'RMC{region}.ld'),
)
n.newline()
for region in ['P', 'E', 'J', 'K']:
for profile in ['DEBUG', 'RELEASE']:
suffix = 'D' if profile == 'DEBUG' else ''
out_file = os.path.join('$builddir', 'loader', f'payload{region}{suffix}.o')
n.build(
out_file,
'incbin',
os.path.join('$builddir', 'bin', f'payload{region}{suffix}.bin'),
variables = {
'name': f'payload{region}{suffix}',
'path': '/'.join(['$builddir', 'bin', f'payload{region}{suffix}.bin']),
},
implicit = 'Incbin.S',
)
code_out_files[profile]['loader'] += [out_file]
for profile in ['DEBUG', 'RELEASE']:
suffix = 'D' if profile == 'DEBUG' else ''
n.build(
os.path.join('$outdir', f'boot{suffix}.elf'),
'ld',
code_out_files[profile]['loader'],
variables = {
'base': '0x80910000' if not args.gdb_compatible else '0x80E50F90',
'entry': 'start',
'format': 'elf32-powerpc',
'script': os.path.join('loader', 'RMC.ld'),
},
implicit = os.path.join('loader', 'RMC.ld'),
)
n.newline()
n.variable('merge', os.path.join('.', 'merge.py'))
n.variable('wuj5', os.path.join('vendor', 'wuj5', 'wuj5.py'))
n.newline()
n.rule(
'merge',
command = f'{sys.executable} $merge $in -o $out',
description = 'MERGE $out',
)
n.newline()
n.rule(
'wuj5',
command = f'{sys.executable} $wuj5 encode $in -o $out',
description = 'WUJ5 $out',
)
n.newline()
n.rule(
'cp',
command = 'cp $in $out',
description = 'CP $out',
)
n.newline()
n.rule(
'szs',
command = f'{sys.executable} $wuj5 encode $szsin -o $out --retained $in $args',
description = 'SZS $out',
)
n.newline()
LANGUAGES = [
'E', # English (PAL)
'F', # French (PAL)
'G', # German
'I', # Italian
'J', # Japanese
'K', # Korean
'M', # Spanish (NTSC)
'Q', # French (NTSC)
'S', # Spanish (PAL)
'U', # English (NTSC)
'N', # Dutch
]
HUD_LANGUAGES = {
'E': 'E',
'F': 'F',
'G': 'G',
'I': 'I',
'J': 'E',
'K': 'E',
'M': 'S',
'Q': 'F',
'S': 'S',
'U': 'E',
'N': 'N',
}
asset_in_files = {
os.path.join('Scene', 'UI', 'CrashSP.szs'): [
os.path.join('fatal', 'blyt', 'Fatal.brlyt'),
os.path.join('fatal', 'font', 'sf_light_i8_utf16.brfnt'),
os.path.join('fatal', 'font', 'sf_medium_basic.brfnt'),
],
os.path.join('Scene', 'UI', 'FontSP_K.szs'): [
os.path.join('kart_font_korea.brfnt'),
os.path.join('tt_kart_font_rodan_ntlg_pro_b_K.brfnt'),
],
os.path.join('Scene', 'UI', 'FontSP_R.szs'): [
os.path.join('kart_kanji_font.brfnt'),
os.path.join('tt_kart_font_rodan_ntlg_pro_b_R.brfnt'),
],
os.path.join('Scene', 'UI', 'MenuOtherSP.szs'): [
os.path.join('button', 'ctrl', 'LicenseManagementButton.brctr.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off_to_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_stop.brlan.json5'),
os.path.join('control', 'blyt', 'common_w024_rule_icon.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w076_license_icon_center.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w201_setting_menu.brlyt.json5'),
os.path.join('control', 'ctrl', 'LicenseDisplay.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseManagement.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioBase.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioOption.brctr.json5'),
os.path.join('control', 'timg', 'tt_license_icon_004.tpl'),
],
os.path.join('Scene', 'UI', 'MenuSingleSP.szs'): [
os.path.join('button', 'blyt', 'common_w129_movie_button_single_top.brlyt.json5'),
os.path.join('button', 'ctrl', 'SingleTop.brctr.json5'),
os.path.join('button', 'ctrl', 'TimeAttackGhostListArrowLeft.brctr.json5'),
os.path.join('button', 'ctrl', 'TimeAttackGhostListArrowRight.brctr.json5'),
os.path.join('button', 'ctrl', 'TimeAttackGhostList.brctr.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_active_off.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_active_off_to_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_active_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_02_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_center_to_right.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_hide.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_left_to_center.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_show.brlan.json5'),
os.path.join('control', 'blyt', 'common_w200_ghost_button.brlyt.json5'),
os.path.join('control', 'blyt', 'ghost_container.brlyt.json5'),
os.path.join('control', 'ctrl', 'GhostSelectBase.brctr.json5'),
os.path.join('control', 'ctrl', 'GhostSelectOption.brctr.json5'),
os.path.join('control', 'ctrl', 'TASettingRadioBase.brctr.json5'),
os.path.join('control', 'ctrl', 'TASettingRadioOption.brctr.json5'),
os.path.join('control', 'ctrl', 'TimeAttackGhostListPageNum.brctr.json5'),
os.path.join('control', 'ctrl', 'VSSettingRadioOption.brctr.json5'),
],
os.path.join('Scene', 'UI', 'RaceSP.szs'): [
os.path.join('button', 'blyt', 'common_w202_menu_compact.brlyt.json5'),
os.path.join('button', 'ctrl', 'AfterMenuBT.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuBTLast.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuEndConfirm.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuTimeAttack.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuVS.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuVSLast.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuGhostWatch.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuReplayTA.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuTimeAttack.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuVS.brctr.json5'),
# For in-race licence settings editor
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off_to_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_stop.brlan.json5'),
os.path.join('control', 'blyt', 'common_w024_rule_icon.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w076_license_icon_center.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w201_setting_menu.brlyt.json5'),
os.path.join('control', 'ctrl', 'LicenseDisplay.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseManagement.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioBase.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioOption.brctr.json5'),
os.path.join('control', 'timg', 'tt_license_icon_004.tpl'),
os.path.join('game_image', 'anim', 'game_image_speed_texture_pattern_0_9.brlan.json5'),
os.path.join('game_image', 'blyt', 'game_image_speed.brlyt.json5'),
os.path.join('game_image', 'blyt', 'InputDisplay.brlyt.json5'),
os.path.join('game_image', 'blyt', 'race_message_half.brlyt'),
os.path.join('game_image', 'ctrl', 'battle_total_point.brctr.json5'),
os.path.join('game_image', 'ctrl', 'InputDisplay.brctr.json5'),
os.path.join('game_image', 'ctrl', 'lap_number.brctr.json5'),
os.path.join('game_image', 'ctrl', 'position_multi.brctr.json5'),
os.path.join('game_image', 'ctrl', 'speed_number.brctr.json5'),
os.path.join('game_image', 'ctrl', 'time_number.brctr.json5'),
os.path.join('game_image', 'timg', 'basic_accel_off.tpl'),
os.path.join('game_image', 'timg', 'basic_accel_on.tpl'),
os.path.join('game_image', 'timg', 'basic_cstick_bg.tpl'),
os.path.join('game_image', 'timg', 'basic_cstick_center.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_down.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_left.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_off.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_right.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_up.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_bd_off.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_bd_on.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_l_off.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_l_on.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_r_off.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_r_on.tpl'),
os.path.join('game_image', 'timg', 'tt_d_number_3d_minus.tpl'),
os.path.join('game_image', 'timg', 'tt_d_number_3d_none.tpl'),
],
os.path.join('Scene', 'UI', 'TitleSP.szs'): [
os.path.join('button', 'blyt', 'common_w076_license_icon_center.brlyt.json5'),
os.path.join('button', 'ctrl', 'LicenseSelect.brctr.json5'),
os.path.join('button', 'ctrl', 'TopMenuMultiWaku.brctr.json5'),
os.path.join('button', 'ctrl', 'TopMenuSingleWaku.brctr.json5'),
os.path.join('button', 'timg', 'tt_license_icon_004.tpl'),
],
}
for language in LANGUAGES:
hud_language = HUD_LANGUAGES[language]
if language != 'K':
asset_in_files[os.path.join('Race', f'Common_{language}.szs')] = [
os.path.join(f'jugemu_lap_{language}.brres'),
os.path.join(f'jugemu_lapf_{language}.brres'),
]
asset_in_files[os.path.join('Scene', 'UI', f'AwardSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
os.path.join('message', f'Race_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'MenuMultiSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'MenuOtherSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'MenuSingleSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'RaceSP_{language}.szs')] = [
os.path.join('game_image', 'timg', f'tt_speed_{hud_language}.tpl'),
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
os.path.join('message', f'Race_{language}.bmg.json5'),
]
if hud_language != 'E':
asset_in_files[os.path.join('Scene', 'UI', f'RaceSP_{language}.szs')] += [
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}.tpl'),
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}_lap1.tpl'),
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}_lap2.tpl'),
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}_lap3.tpl'),
os.path.join('game_image', 'timg', f'tt_time_{hud_language}.tpl'),
]
for i in range(12):
for base in ['tt_position_no_st_64x64', 'tt_multi_position_no_st_64x64']:
asset_in_files[os.path.join('Scene', 'UI', f'RaceSP_{language}.szs')] += [
os.path.join('game_image', 'timg', f'{base}_{hud_language}_{i + 1:02d}.tpl')
]
asset_in_files[os.path.join('Scene', 'UI', f'TitleSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_out_files = {target: [] for target in asset_in_files}
for target in asset_in_files:
for in_file in asset_in_files[target]:
base, ext = os.path.splitext(in_file)
outext = {
'.brfna': '.brfna',
'.brfnt': '.brfnt',
'.brlyt': '.brlyt',
'.brres': '.brres',
'.json5': '',
'.tpl': '.tpl',
}[ext]
out_file = os.path.join('$builddir', 'Shared.szs.d', base + outext)
basebase, baseext = os.path.splitext(base)
out_files = [out_file for out_files in asset_out_files.values() for out_file in out_files]
if baseext == '.bmg':
merged_file = os.path.join('$builddir', 'merged', in_file)
if out_file not in out_files:
n.build(
merged_file,
'merge',
[
os.path.join('assets', in_file),
os.path.join('assets', basebase.rsplit('_', 1)[0] + 'SP_U.bmg.json5'),
os.path.join('assets', basebase.replace('_', 'SP_') + '.bmg.json5'),
],
implicit = '$merge',
)
in_file = merged_file
else:
in_file = os.path.join('assets', in_file)
if out_file not in out_files:
rule = {
'.brfna': 'cp',
'.brfnt': 'cp',
'.brlyt': 'cp',
'.brres': 'cp',
'.json5': 'wuj5',
'.tpl': 'cp',
}[ext]
n.build(
out_file,
rule,
in_file,
)
asset_out_files[target] += [out_file]
n.newline()
renamed = {}
for language in LANGUAGES:
renamed[f'jugemu_lap_{language}.brres'] = 'jugemu_lap.brres'
renamed[f'jugemu_lapf_{language}.brres'] = 'jugemu_lapf.brres'
renamed[f'Common_{language}.bmg'] = 'Common.bmg'
renamed[f'Menu_{language}.bmg'] = 'Menu.bmg'
renamed[f'Race_{language}.bmg'] = 'Race.bmg'
for hud_language in HUD_LANGUAGES.values():
renamed[f'tt_lap_{hud_language}.tpl'] = 'tt_lap_E.tpl'
renamed[f'tt_lap_{hud_language}_lap1.tpl'] = 'tt_lap_E_Lap1.tpl'
renamed[f'tt_lap_{hud_language}_lap2.tpl'] = 'tt_lap_E_lap2.tpl'
renamed[f'tt_lap_{hud_language}_lap3.tpl'] = 'tt_lap_E_lap3.tpl'
renamed[f'tt_speed_{hud_language}.tpl'] = 'tt_speed.tpl'
renamed[f'tt_time_{hud_language}.tpl'] = 'tt_time_E.tpl'
for i in range(12):
for base in ['tt_position_no_st_64x64', 'tt_multi_position_no_st_64x64']:
renamed[f'{base}_{hud_language}_{i + 1:02d}.tpl'] = f'{base}_{i + 1:02d}.tpl'
for target in asset_out_files:
target_renamed = {}
for out_file in asset_out_files[target]:
out_file = os.path.basename(out_file)
if out_file in renamed:
target_renamed[out_file] = renamed[out_file]
target_renamed = ' '.join([f'--renamed {src} {dst}' for src, dst in target_renamed.items()])
n.build(
os.path.join('$outdir', 'disc', target),
'szs',
asset_out_files[target],
variables = {
'szsin': os.path.join('$builddir', 'Shared.szs.d'),
'args': target_renamed,
},
)
n.newline()
n.variable('configure', 'configure.py')
n.newline()
n.rule(
'configure',
command = f'{sys.executable} $configure' + (' --gdb_compatible' if args.gdb_compatible else ''),
generator = True,
)
n.build(
'build.ninja',
'configure',
implicit = [
'$configure',
os.path.join('vendor', 'ninja_syntax.py'),
],
)
with open('build.ninja', 'w') as out_file:
out_file.write(out_buf.getvalue())
n.close()
|
#!/usr/bin/env python3
from argparse import ArgumentParser
import io
import os, sys
from vendor.ninja_syntax import Writer
try:
import json5
del json5
except ModuleNotFoundError:
raise SystemExit("Error: pyjson5 not found. Please install it with `python -m pip install json5`")
import subprocess
parser = ArgumentParser()
parser.add_argument('--gdb_compatible', action='store_true')
args = parser.parse_args()
# https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script/14989911#14989911
def get_git_revision_hash() -> str:
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('ascii').strip()
def get_git_revision_short_hash() -> str:
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode('ascii').strip()
out_buf = io.StringIO()
n = Writer(out_buf)
n.variable('ninja_required_version', '1.3')
n.newline()
n.variable('builddir', 'build')
n.variable('outdir', 'out')
n.newline()
devkitppc = os.environ.get("DEVKITPPC")
n.variable('cc', os.path.join(devkitppc, 'bin', 'powerpc-eabi-gcc'))
n.variable('cpp', os.path.join(devkitppc, 'bin', 'powerpc-eabi-g++'))
n.variable('port', 'port.py')
n.newline()
asflags = [
'-isystem', 'include',
'-isystem', 'payload',
'-isystem', 'vendor',
]
cflags_loader = [
'-fms-extensions',
'-fno-asynchronous-unwind-tables',
'-fplan9-extensions',
'-fshort-wchar',
'-isystem', 'include',
'-isystem', 'payload',
'-isystem', 'vendor',
'-Wall',
'-Werror=implicit-function-declaration',
'-Werror=incompatible-pointer-types',
'-Wextra',
'-Wno-packed-bitfield-compat',
f'-DGIT_HASH={get_git_revision_short_hash()}',
]
if args.gdb_compatible:
cflags_loader += ['-DGDB_COMPATIBLE=1']
cflags_payload = [
*cflags_loader,
'-fstack-protector-strong',
]
cppflags = [
'-fms-extensions',
'-fno-asynchronous-unwind-tables',
# '-fplan9-extensions',
'-fshort-wchar',
'-fstack-protector-strong',
'-isystem', 'include',
'-isystem', 'payload',
'-isystem', 'vendor',
'-Wall',
# '-Werror=implicit-function-declaration',
# '-Werror=incompatible-pointer-types',
'-Wextra',
'-Wno-packed-bitfield-compat',
f'-DGIT_HASH="{get_git_revision_short_hash()}"',
'-fno-exceptions',
'-fno-unwind-tables',
]
ldflags = [
'-nostdlib',
'-Wl,-n',
]
n.variable('asflags', ' '.join(asflags))
n.variable('cppflags', ' '.join(cppflags))
n.variable('ldflags', ' '.join(ldflags))
n.newline()
n.rule(
'as',
command = '$cc -MD -MT $out -MF $out.d $asflags -c $in -o $out',
depfile = '$out.d',
deps = 'gcc',
description = 'AS $out',
)
n.newline()
n.rule(
'incbin',
command = '$cc -DNAME=$name -DPATH=$path -c Incbin.S -o $out',
description = 'INCBIN $out',
)
n.newline()
n.rule(
'cc',
command = '$cc -MD -MT $out -MF $out.d $cflags -c $in -o $out',
depfile = '$out.d',
deps = 'gcc',
description = 'CC $out',
)
n.newline()
n.rule(
'cpp',
command = '$cpp -MD -MT $out -MF $out.d $cppflags -c $in -o $out',
depfile = '$out.d',
deps = 'gcc',
description = 'CPP $out',
)
n.newline()
n.rule(
'port',
command = f'{sys.executable} $port $region $in $out' + (' --base' if args.gdb_compatible else ''),
description = 'PORT $out'
)
n.newline()
ldparams = [
'-Wl,--defsym,base=$base',
'-Wl,--entry=$entry',
'-Wl,--oformat,$format',
'-Wl,-T,$script',
]
n.rule(
'ld',
command = '$cc $ldflags ' + ' '.join(ldparams) + ' $in -o $out',
description = 'LD $out',
)
n.newline()
code_in_files = {
'payload': [
os.path.join('payload', 'egg', 'core', 'eggColorFader.c'),
os.path.join('payload', 'egg', 'core', 'eggDisplay.S'),
os.path.join('payload', 'egg', 'core', 'eggDvdFile.c'),
os.path.join('payload', 'egg', 'core', 'eggEffectCreator.S'),
os.path.join('payload', 'egg', 'core', 'eggG3dUtil.S'),
os.path.join('payload', 'egg', 'core', 'eggHeap.c'),
os.path.join('payload', 'egg', 'core', 'eggSystem.c'),
os.path.join('payload', 'game', 'effect', 'Effect.S'),
os.path.join('payload', 'game', 'gfx', 'Camera.S'),
os.path.join('payload', 'game', 'gfx', 'CameraManager.S'),
os.path.join('payload', 'game', 'host_system', 'BootStrapScene.c'),
os.path.join('payload', 'game', 'host_system', 'RkSystem.S'),
os.path.join('payload', 'game', 'host_system', 'RkSystem.c'),
os.path.join('payload', 'game', 'host_system', 'SceneManager.S'),
os.path.join('payload', 'game', 'host_system', 'SceneManager.c'),
os.path.join('payload', 'game', 'item', 'ItemObjKouraAka.S'),
os.path.join('payload', 'game', 'item', 'ItemObjKouraAo.S'),
os.path.join('payload', 'game', 'item', 'ItemObjKouraMidori.S'),
os.path.join('payload', 'game', 'item', 'KartItem.S'),
os.path.join('payload', 'game', 'kart', 'KartMove.S'),
os.path.join('payload', 'game', 'kart', 'KartObject.S'),
os.path.join('payload', 'game', 'kart', 'KartObjectManager.S'),
os.path.join('payload', 'game', 'kart', 'KartObjectManager.c'),
os.path.join('payload', 'game', 'kart', 'KartParam.S'),
os.path.join('payload', 'game', 'kart', 'KartState.S'),
os.path.join('payload', 'game', 'kart', 'KartSub.S'),
os.path.join('payload', 'game', 'net', 'NetManager.S'),
os.path.join('payload', 'game', 'obj', 'ObjEffect.S'),
os.path.join('payload', 'game', 'obj', 'ObjManager.c'),
os.path.join('payload', 'game', 'obj', 'ObjPylon01.S'),
os.path.join('payload', 'game', 'obj', 'ObjPylon01.c'),
os.path.join('payload', 'game', 'race', 'Driver.S'),
os.path.join('payload', 'game', 'race', 'DriverManager.S'),
os.path.join('payload', 'game', 'race', 'DriverManager.c'),
os.path.join('payload', 'game', 'race', 'JugemManager.S'),
os.path.join('payload', 'game', 'race', 'JugemManager.c'),
os.path.join('payload', 'game', 'rel', 'Rel.S'),
os.path.join('payload', 'game', 'snd', 'DriverSound.S'),
os.path.join('payload', 'game', 'snd', 'KartSound.S'),
os.path.join('payload', 'game', 'snd', 'Snd.S'),
os.path.join('payload', 'game', 'system', 'BugCheck.c'),
os.path.join('payload', 'game', 'system', 'Console.c'),
os.path.join('payload', 'game', 'system', 'CourseMap.S'),
os.path.join('payload', 'game', 'system', 'DvdArchive.S'),
os.path.join('payload', 'game', 'system', 'FatalScene.c'),
os.path.join('payload', 'game', 'system', 'GhostFile.c'),
os.path.join('payload', 'game', 'system', 'HomeButton.S'),
os.path.join('payload', 'game', 'system', 'InputManager.S'),
os.path.join('payload', 'game', 'system', 'InputManager.c'),
os.path.join('payload', 'game', 'system', 'Mii.S'),
os.path.join('payload', 'game', 'system', 'MultiDvdArchive.S'),
os.path.join('payload', 'game', 'system', 'MultiDvdArchive.c'),
os.path.join('payload', 'game', 'system', 'NandManager.S'),
os.path.join('payload', 'game', 'system', 'RaceConfig.S'),
os.path.join('payload', 'game', 'system', 'RaceConfig.c'),
os.path.join('payload', 'game', 'system', 'RaceManager.S'),
os.path.join('payload', 'game', 'system', 'RaceManager.c'),
os.path.join('payload', 'game', 'system', 'ResourceManager.S'),
os.path.join('payload', 'game', 'system', 'ResourceManager.c'),
os.path.join('payload', 'game', 'system', 'SaveManager.c'),
os.path.join('payload', 'game', 'system', 'SceneCreatorDynamic.S'),
os.path.join('payload', 'game', 'system', 'SceneCreatorDynamic.c'),
os.path.join('payload', 'game', 'ui', 'ControlLoader.S'),
os.path.join('payload', 'game', 'ui', 'Font.S'),
os.path.join('payload', 'game', 'ui', 'Font.c'),
os.path.join('payload', 'game', 'ui', 'FontManager.c'),
os.path.join('payload', 'game', 'ui', 'GhostManagerPage.S'),
os.path.join('payload', 'game', 'ui', 'GhostManagerPage.c'),
os.path.join('payload', 'game', 'ui', 'GhostSelectButton.c'),
os.path.join('payload', 'game', 'ui', 'GhostSelectControl.c'),
os.path.join('payload', 'game', 'ui', 'Layout.S'),
os.path.join('payload', 'game', 'ui', 'License.S'),
os.path.join('payload', 'game', 'ui', 'License.c'),
os.path.join('payload', 'game', 'ui', 'LicenseSelectButton.c'),
os.path.join('payload', 'game', 'ui', 'LicenseSelectPage.c'),
os.path.join('payload', 'game', 'ui', 'LicenseSettingsPage.c'),
os.path.join('payload', 'game', 'ui', 'Map2DRenderer.c'),
os.path.join('payload', 'game', 'ui', 'MiiGroup.c'),
os.path.join('payload', 'game', 'ui', 'Model.S'),
os.path.join('payload', 'game', 'ui', 'Page.c'),
os.path.join('payload', 'game', 'ui', 'Save.S'),
os.path.join('payload', 'game', 'ui', 'SaveManagerProxy.S'),
os.path.join('payload', 'game', 'ui', 'Section.c'),
os.path.join('payload', 'game', 'ui', 'SectionManager.S'),
os.path.join('payload', 'game', 'ui', 'SectionManager.c'),
os.path.join('payload', 'game', 'ui', 'TabControl.c'),
os.path.join('payload', 'game', 'ui', 'TimeAttackGhostListPage.c'),
os.path.join('payload', 'game', 'ui', 'TimeAttackRulesPage.c'),
os.path.join('payload', 'game', 'ui', 'TimeAttackTopPage.S'),
os.path.join('payload', 'game', 'ui', 'TitlePage.S'),
os.path.join('payload', 'game', 'ui', 'TitlePage.c'),
os.path.join('payload', 'game', 'ui', 'UIAnimator.c'),
os.path.join('payload', 'game', 'ui', 'UIControl.c'),
os.path.join('payload', 'game', 'ui', 'Wipe.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlMenuBackButton.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlMenuInstructionText.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlMenuPageTitleText.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRace2DMap.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceBase.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceBase.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceInputDisplay.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceLap.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceNameBalloon.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceNameBalloon.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceSpeed.c'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceTime.S'),
os.path.join('payload', 'game', 'ui', 'ctrl', 'CtrlRaceTime.c'),
os.path.join('payload', 'game', 'ui', 'page', 'CharacterSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'CourseSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'CupSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'DemoPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'DriftSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'MachineSelectPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'RaceMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'RaceMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'RacePage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'RacePage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'SingleTopMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'SingleTopMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'TimeAttackSplitsPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'TimeAttackSplitsPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'TopMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'TopMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'VsMenuPage.S'),
os.path.join('payload', 'game', 'ui', 'page', 'VsMenuPage.c'),
os.path.join('payload', 'game', 'ui', 'page', 'VsRulesPage.c'),
os.path.join('payload', 'game', 'util', 'Input.S'),
os.path.join('payload', 'nw4r', 'db', 'dbAssert.S'),
os.path.join('payload', 'nw4r', 'db', 'dbException.S'),
os.path.join('payload', 'nw4r', 'db', 'dbException.c'),
os.path.join('payload', 'nw4r', 'g3d', 'g3d_resmat.c'),
os.path.join('payload', 'nw4r', 'g3d', 'g3dResFile.S'),
os.path.join('payload', 'nw4r', 'g3d', 'MSan.c'),
os.path.join('payload', 'nw4r', 'lyt', 'lyt_arcResourceAccessor.S'),
os.path.join('payload', 'nw4r', 'lyt', 'lyt_layout.S'),
os.path.join('payload', 'nw4r', 'snd', 'snd_DvdSoundArchive.S'),
os.path.join('payload', 'nw4r', 'ut', 'ut_DvdFileStream.S'),
os.path.join('payload', 'platform', 'string.c'),
os.path.join('payload', 'platform', 'wchar.c'),
os.path.join('payload', 'revolution', 'dvd.S'),
os.path.join('payload', 'revolution', 'dvd.c'),
os.path.join('payload', 'revolution', 'dvdex.c'),
os.path.join('payload', 'revolution', 'ios.S'),
os.path.join('payload', 'revolution', 'ios.c'),
os.path.join('payload', 'revolution', 'nand.c'),
os.path.join('payload', 'revolution', 'OS.S'),
os.path.join('payload', 'revolution', 'OS.c'),
os.path.join('payload', 'revolution', 'os', 'OSContext.S'),
os.path.join('payload', 'revolution', 'os', 'OSError.S'),
os.path.join('payload', 'revolution', 'os', 'OSError.c'),
os.path.join('payload', 'revolution', 'os', 'OSMemory.S'),
os.path.join('payload', 'revolution', 'os', 'OSThread.S'),
os.path.join('payload', 'revolution', 'os', 'OSThread.c'),
os.path.join('payload', 'sp', 'Fatal.c'),
os.path.join('payload', 'sp', 'FormattingCodes.c'),
os.path.join('payload', 'sp', 'FlameGraph.c'),
os.path.join('payload', 'sp', 'Host.c'),
os.path.join('payload', 'sp', 'IOSDolphin.c'),
# Keyboard module
os.path.join('payload', 'sp', 'keyboard', 'Keyboard.c'),
os.path.join('payload', 'sp', 'keyboard', 'SIKeyboard.c'),
os.path.join('payload', 'sp', 'keyboard', 'IOSKeyboard.c'),
#
os.path.join('payload', 'sp', 'Net.c'),
os.path.join('payload', 'sp', 'Patcher.c'),
os.path.join('payload', 'sp', 'Payload.c'),
# Security module
os.path.join('payload', 'sp', 'security', 'Memory.c'),
os.path.join('payload', 'sp', 'security', 'Stack.S'),
os.path.join('payload', 'sp', 'security', 'Stack.c'),
os.path.join('payload', 'sp', 'security', 'StackTrace.S'),
# Settings module
os.path.join('payload', 'sp', 'settings', 'BaseSettings.c'),
os.path.join('payload', 'sp', 'settings', 'ClientSettings.c'),
os.path.join('payload', 'sp', 'settings', 'IniReader.c'),
#
os.path.join('payload', 'sp', 'Slab.c'),
os.path.join('payload', 'sp', 'StackTrace.c'),
# Storage module
os.path.join('payload', 'sp', 'storage', 'FatStorage.c'),
os.path.join('payload', 'sp', 'storage', 'LogFile.c'),
os.path.join('payload', 'sp', 'storage', 'NetStorage.c'),
os.path.join('payload', 'sp', 'storage', 'NetStorageClient.c'),
os.path.join('payload', 'sp', 'storage', 'Sdi.c'),
os.path.join('payload', 'sp', 'storage', 'Storage.c'),
os.path.join('payload', 'sp', 'storage', 'Usb.c'),
os.path.join('payload', 'sp', 'storage', 'UsbStorage.c'),
#
os.path.join('payload', 'sp', 'Tcp.c'),
os.path.join('payload', 'sp', 'Yaz.c'),
os.path.join('vendor', 'arith64.c'),
os.path.join('vendor', 'ff', 'diskio.c'),
os.path.join('vendor', 'ff', 'ff.c'),
os.path.join('vendor', 'ff', 'fftime.c'),
os.path.join('vendor', 'ff', 'ffunicode.c'),
],
'loader': [
os.path.join('loader', 'Apploader.c'),
os.path.join('loader', 'Cache.S'),
os.path.join('loader', 'Console.c'),
os.path.join('loader', 'Delay.S'),
os.path.join('loader', 'Di.c'),
os.path.join('loader', 'Font.c'),
os.path.join('loader', 'Ios.c'),
os.path.join('loader', 'Loader.c'),
os.path.join('loader', 'Memcpy.c'),
os.path.join('loader', 'Memset.c'),
os.path.join('loader', 'Stack.c'),
os.path.join('loader', 'Start.S'),
os.path.join('loader', 'Strlen.c'),
os.path.join('loader', 'Vi.c'),
],
}
code_out_files = {}
for profile in ['DEBUG', 'RELEASE']:
code_out_files[profile] = {target: [] for target in code_in_files}
for target in code_in_files:
for in_file in code_in_files[target]:
_, ext = os.path.splitext(in_file)
for profile in ['DEBUG', 'RELEASE']:
out_file = os.path.join('$builddir', in_file + ('.o' if profile == 'RELEASE' else 'D.o'))
code_out_files[profile][target] += [out_file]
rule = {
'.S': 'as',
'.c': 'cc',
'.cpp': 'cpp',
}[ext]
cur_cflags = (cflags_loader if target == 'loader' else cflags_payload) + (['-O0', '-g', '-DSP_DEBUG'] if profile == 'DEBUG' else ['-O2', '-DSP_RELEASE'])
n.build(
out_file,
rule,
in_file,
variables = {
'cflags': ' '.join(cur_cflags),
},
)
n.newline()
for region in ['P', 'E', 'J', 'K']:
n.build(
os.path.join('$builddir', 'scripts', f'RMC{region}.ld'),
'port',
os.path.join('.', 'symbols.txt'),
variables = {
'region': region,
},
implicit = '$port',
)
n.newline()
for region in ['P', 'E', 'J', 'K']:
for fmt in ['binary', 'elf32-powerpc']:
for profile in ['DEBUG', 'RELEASE']:
suffix = 'D' if profile == 'DEBUG' else ''
extension = 'bin' if fmt == 'binary' else 'elf'
n.build(
os.path.join('$builddir', 'bin', f'payload{region}{suffix}.{extension}'),
'ld',
code_out_files[profile]['payload'],
variables = {
'base': {
'P': '0x8076db60' if not args.gdb_compatible else '0x809C4FA0',
'E': '0x80769400',
'J': '0x8076cca0',
'K': '0x8075bfe0',
}[region],
'entry': 'Payload_run',
'format': fmt,
'script': os.path.join('$builddir', 'scripts', f'RMC{region}.ld'),
},
implicit = os.path.join('$builddir', 'scripts', f'RMC{region}.ld'),
)
n.newline()
for region in ['P', 'E', 'J', 'K']:
for profile in ['DEBUG', 'RELEASE']:
suffix = 'D' if profile == 'DEBUG' else ''
out_file = os.path.join('$builddir', 'loader', f'payload{region}{suffix}.o')
n.build(
out_file,
'incbin',
os.path.join('$builddir', 'bin', f'payload{region}{suffix}.bin'),
variables = {
'name': f'payload{region}{suffix}',
'path': '/'.join(['$builddir', 'bin', f'payload{region}{suffix}.bin']),
},
implicit = 'Incbin.S',
)
code_out_files[profile]['loader'] += [out_file]
for profile in ['DEBUG', 'RELEASE']:
suffix = 'D' if profile == 'DEBUG' else ''
n.build(
os.path.join('$outdir', f'boot{suffix}.elf'),
'ld',
code_out_files[profile]['loader'],
variables = {
'base': '0x80910000' if not args.gdb_compatible else '0x80E50F90',
'entry': 'start',
'format': 'elf32-powerpc',
'script': os.path.join('loader', 'RMC.ld'),
},
implicit = os.path.join('loader', 'RMC.ld'),
)
n.newline()
n.variable('merge', os.path.join('.', 'merge.py'))
n.variable('wuj5', os.path.join('vendor', 'wuj5', 'wuj5.py'))
n.newline()
n.rule(
'merge',
command = f'{sys.executable} $merge $in -o $out',
description = 'MERGE $out',
)
n.newline()
n.rule(
'wuj5',
command = f'{sys.executable} $wuj5 encode $in -o $out',
description = 'WUJ5 $out',
)
n.newline()
n.rule(
'cp',
command = 'cp $in $out',
description = 'CP $out',
)
n.newline()
n.rule(
'szs',
command = f'{sys.executable} $wuj5 encode $szsin -o $out --retained $in $args',
description = 'SZS $out',
)
n.newline()
LANGUAGES = [
'E', # English (PAL)
'F', # French (PAL)
'G', # German
'I', # Italian
'J', # Japanese
'K', # Korean
'M', # Spanish (NTSC)
'Q', # French (NTSC)
'S', # Spanish (PAL)
'U', # English (NTSC)
'N', # Dutch
]
HUD_LANGUAGES = {
'E': 'E',
'F': 'F',
'G': 'G',
'I': 'I',
'J': 'E',
'K': 'E',
'M': 'S',
'Q': 'F',
'S': 'S',
'U': 'E',
'N': 'N',
}
asset_in_files = {
os.path.join('Scene', 'UI', 'CrashSP.szs'): [
os.path.join('fatal', 'blyt', 'Fatal.brlyt'),
os.path.join('fatal', 'font', 'sf_light_i8_utf16.brfnt'),
os.path.join('fatal', 'font', 'sf_medium_basic.brfnt'),
],
os.path.join('Scene', 'UI', 'FontSP_K.szs'): [
os.path.join('kart_font_korea.brfnt'),
os.path.join('tt_kart_font_rodan_ntlg_pro_b_K.brfnt'),
],
os.path.join('Scene', 'UI', 'FontSP_R.szs'): [
os.path.join('kart_kanji_font.brfnt'),
os.path.join('tt_kart_font_rodan_ntlg_pro_b_R.brfnt'),
],
os.path.join('Scene', 'UI', 'MenuOtherSP.szs'): [
os.path.join('button', 'ctrl', 'LicenseManagementButton.brctr.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off_to_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_stop.brlan.json5'),
os.path.join('control', 'blyt', 'common_w024_rule_icon.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w076_license_icon_center.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w201_setting_menu.brlyt.json5'),
os.path.join('control', 'ctrl', 'LicenseDisplay.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseManagement.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioBase.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioOption.brctr.json5'),
os.path.join('control', 'timg', 'tt_license_icon_004.tpl'),
],
os.path.join('Scene', 'UI', 'MenuSingleSP.szs'): [
os.path.join('button', 'blyt', 'common_w129_movie_button_single_top.brlyt.json5'),
os.path.join('button', 'ctrl', 'SingleTop.brctr.json5'),
os.path.join('button', 'ctrl', 'TimeAttackGhostListArrowLeft.brctr.json5'),
os.path.join('button', 'ctrl', 'TimeAttackGhostListArrowRight.brctr.json5'),
os.path.join('button', 'ctrl', 'TimeAttackGhostList.brctr.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_active_off.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_active_off_to_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_active_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_light_02_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w200_ghost_button_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_center_to_right.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_hide.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_left_to_center.brlan.json5'),
os.path.join('control', 'anim', 'friend_room_comment_container_show.brlan.json5'),
os.path.join('control', 'blyt', 'common_w200_ghost_button.brlyt.json5'),
os.path.join('control', 'blyt', 'ghost_container.brlyt.json5'),
os.path.join('control', 'ctrl', 'GhostSelectBase.brctr.json5'),
os.path.join('control', 'ctrl', 'GhostSelectOption.brctr.json5'),
os.path.join('control', 'ctrl', 'TASettingRadioBase.brctr.json5'),
os.path.join('control', 'ctrl', 'TASettingRadioOption.brctr.json5'),
os.path.join('control', 'ctrl', 'TimeAttackGhostListPageNum.brctr.json5'),
os.path.join('control', 'ctrl', 'VSSettingRadioOption.brctr.json5'),
],
os.path.join('Scene', 'UI', 'RaceSP.szs'): [
os.path.join('button', 'blyt', 'common_w202_menu_compact.brlyt.json5'),
os.path.join('button', 'ctrl', 'AfterMenuBT.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuBTLast.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuEndConfirm.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuTimeAttack.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuVS.brctr.json5'),
os.path.join('button', 'ctrl', 'AfterMenuVSLast.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuGhostWatch.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuReplayTA.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuTimeAttack.brctr.json5'),
os.path.join('button', 'ctrl', 'PauseMenuVS.brctr.json5'),
# For in-race licence settings editor
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w023_rule_menu_text_light_02_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_off_to_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_active_on.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_after.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in_before.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_in.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fade_out.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_free_to_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_fuchi_check_loop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_select_to_free.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_ok.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_01_stop.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_select.brlan.json5'),
os.path.join('control', 'anim', 'common_w024_rule_icon_text_light_02_stop.brlan.json5'),
os.path.join('control', 'blyt', 'common_w024_rule_icon.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w076_license_icon_center.brlyt.json5'),
os.path.join('control', 'blyt', 'common_w201_setting_menu.brlyt.json5'),
os.path.join('control', 'ctrl', 'LicenseDisplay.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseManagement.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioBase.brctr.json5'),
os.path.join('control', 'ctrl', 'LicenseSettingRadioOption.brctr.json5'),
os.path.join('control', 'timg', 'tt_license_icon_004.tpl'),
os.path.join('game_image', 'anim', 'game_image_speed_texture_pattern_0_9.brlan.json5'),
os.path.join('game_image', 'blyt', 'game_image_speed.brlyt.json5'),
os.path.join('game_image', 'blyt', 'InputDisplay.brlyt.json5'),
os.path.join('game_image', 'blyt', 'race_message_half.brlyt'),
os.path.join('game_image', 'ctrl', 'battle_total_point.brctr.json5'),
os.path.join('game_image', 'ctrl', 'InputDisplay.brctr.json5'),
os.path.join('game_image', 'ctrl', 'lap_number.brctr.json5'),
os.path.join('game_image', 'ctrl', 'position_multi.brctr.json5'),
os.path.join('game_image', 'ctrl', 'speed_number.brctr.json5'),
os.path.join('game_image', 'ctrl', 'time_number.brctr.json5'),
os.path.join('game_image', 'timg', 'basic_accel_off.tpl'),
os.path.join('game_image', 'timg', 'basic_accel_on.tpl'),
os.path.join('game_image', 'timg', 'basic_cstick_bg.tpl'),
os.path.join('game_image', 'timg', 'basic_cstick_center.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_down.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_left.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_off.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_right.tpl'),
os.path.join('game_image', 'timg', 'basic_dpad_up.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_bd_off.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_bd_on.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_l_off.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_l_on.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_r_off.tpl'),
os.path.join('game_image', 'timg', 'basic_trigger_r_on.tpl'),
os.path.join('game_image', 'timg', 'tt_d_number_3d_minus.tpl'),
os.path.join('game_image', 'timg', 'tt_d_number_3d_none.tpl'),
],
os.path.join('Scene', 'UI', 'TitleSP.szs'): [
os.path.join('button', 'blyt', 'common_w076_license_icon_center.brlyt.json5'),
os.path.join('button', 'ctrl', 'LicenseSelect.brctr.json5'),
os.path.join('button', 'ctrl', 'TopMenuMultiWaku.brctr.json5'),
os.path.join('button', 'ctrl', 'TopMenuSingleWaku.brctr.json5'),
os.path.join('button', 'timg', 'tt_license_icon_004.tpl'),
],
}
for language in LANGUAGES:
hud_language = HUD_LANGUAGES[language]
if language != 'K':
asset_in_files[os.path.join('Race', f'Common_{language}.szs')] = [
os.path.join(f'jugemu_lap_{language}.brres'),
os.path.join(f'jugemu_lapf_{language}.brres'),
]
asset_in_files[os.path.join('Scene', 'UI', f'AwardSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
os.path.join('message', f'Race_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'MenuMultiSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'MenuOtherSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'MenuSingleSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_in_files[os.path.join('Scene', 'UI', f'RaceSP_{language}.szs')] = [
os.path.join('game_image', 'timg', f'tt_speed_{hud_language}.tpl'),
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
os.path.join('message', f'Race_{language}.bmg.json5'),
]
if hud_language != 'E':
asset_in_files[os.path.join('Scene', 'UI', f'RaceSP_{language}.szs')] += [
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}.tpl'),
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}_lap1.tpl'),
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}_lap2.tpl'),
os.path.join('game_image', 'timg', f'tt_lap_{hud_language}_lap3.tpl'),
os.path.join('game_image', 'timg', f'tt_time_{hud_language}.tpl'),
]
for i in range(12):
for base in ['tt_position_no_st_64x64', 'tt_multi_position_no_st_64x64']:
asset_in_files[os.path.join('Scene', 'UI', f'RaceSP_{language}.szs')] += [
os.path.join('game_image', 'timg', f'{base}_{hud_language}_{i + 1:02d}.tpl')
]
asset_in_files[os.path.join('Scene', 'UI', f'TitleSP_{language}.szs')] = [
os.path.join('message', f'Common_{language}.bmg.json5'),
os.path.join('message', f'Menu_{language}.bmg.json5'),
]
asset_out_files = {target: [] for target in asset_in_files}
for target in asset_in_files:
for in_file in asset_in_files[target]:
base, ext = os.path.splitext(in_file)
outext = {
'.brfna': '.brfna',
'.brfnt': '.brfnt',
'.brlyt': '.brlyt',
'.brres': '.brres',
'.json5': '',
'.tpl': '.tpl',
}[ext]
out_file = os.path.join('$builddir', 'Shared.szs.d', base + outext)
basebase, baseext = os.path.splitext(base)
out_files = [out_file for out_files in asset_out_files.values() for out_file in out_files]
if baseext == '.bmg':
merged_file = os.path.join('$builddir', 'merged', in_file)
if out_file not in out_files:
n.build(
merged_file,
'merge',
[
os.path.join('assets', in_file),
os.path.join('assets', basebase.rsplit('_', 1)[0] + 'SP_U.bmg.json5'),
os.path.join('assets', basebase.replace('_', 'SP_') + '.bmg.json5'),
],
implicit = '$merge',
)
in_file = merged_file
else:
in_file = os.path.join('assets', in_file)
if out_file not in out_files:
rule = {
'.brfna': 'cp',
'.brfnt': 'cp',
'.brlyt': 'cp',
'.brres': 'cp',
'.json5': 'wuj5',
'.tpl': 'cp',
}[ext]
n.build(
out_file,
rule,
in_file,
)
asset_out_files[target] += [out_file]
n.newline()
renamed = {}
for language in LANGUAGES:
renamed[f'jugemu_lap_{language}.brres'] = 'jugemu_lap.brres'
renamed[f'jugemu_lapf_{language}.brres'] = 'jugemu_lapf.brres'
renamed[f'Common_{language}.bmg'] = 'Common.bmg'
renamed[f'Menu_{language}.bmg'] = 'Menu.bmg'
renamed[f'Race_{language}.bmg'] = 'Race.bmg'
for hud_language in HUD_LANGUAGES.values():
renamed[f'tt_lap_{hud_language}.tpl'] = 'tt_lap_E.tpl'
renamed[f'tt_lap_{hud_language}_lap1.tpl'] = 'tt_lap_E_Lap1.tpl'
renamed[f'tt_lap_{hud_language}_lap2.tpl'] = 'tt_lap_E_lap2.tpl'
renamed[f'tt_lap_{hud_language}_lap3.tpl'] = 'tt_lap_E_lap3.tpl'
renamed[f'tt_speed_{hud_language}.tpl'] = 'tt_speed.tpl'
renamed[f'tt_time_{hud_language}.tpl'] = 'tt_time_E.tpl'
for i in range(12):
for base in ['tt_position_no_st_64x64', 'tt_multi_position_no_st_64x64']:
renamed[f'{base}_{hud_language}_{i + 1:02d}.tpl'] = f'{base}_{i + 1:02d}.tpl'
for target in asset_out_files:
target_renamed = {}
for out_file in asset_out_files[target]:
out_file = os.path.basename(out_file)
if out_file in renamed:
target_renamed[out_file] = renamed[out_file]
target_renamed = ' '.join([f'--renamed {src} {dst}' for src, dst in target_renamed.items()])
n.build(
os.path.join('$outdir', 'disc', target),
'szs',
asset_out_files[target],
variables = {
'szsin': os.path.join('$builddir', 'Shared.szs.d'),
'args': target_renamed,
},
)
n.newline()
n.variable('configure', 'configure.py')
n.newline()
n.rule(
'configure',
command = f'{sys.executable} $configure' + (' --gdb_compatible' if args.gdb_compatible else ''),
generator = True,
)
n.build(
'build.ninja',
'configure',
implicit = [
'$configure',
os.path.join('vendor', 'ninja_syntax.py'),
],
)
with open('build.ninja', 'w') as out_file:
out_file.write(out_buf.getvalue())
n.close()
|
en
| 0.639917
|
#!/usr/bin/env python3 # https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script/14989911#14989911 # '-fplan9-extensions', # '-Werror=implicit-function-declaration', # '-Werror=incompatible-pointer-types', # Keyboard module # # Security module # Settings module # # Storage module # # English (PAL) # French (PAL) # German # Italian # Japanese # Korean # Spanish (NTSC) # French (NTSC) # Spanish (PAL) # English (NTSC) # Dutch # For in-race licence settings editor
| 2.283672
| 2
|
alveo/examples/deployment_modes/test_classify.py
|
dendisuhubdy/Vitis-AI
| 3
|
6626873
|
<reponame>dendisuhubdy/Vitis-AI
#!/usr/bin/env python
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from six import itervalues, iteritems
from ctypes import *
import numpy as np
import os, sys
from vai.dpuv1.rt import xdnn, xdnn_io
from vai.dpuv1.rt.vitis.python.dpu.runner import Runner
def main():
args = xdnn_io.processCommandLine()
runner = Runner(args['vitis_rundir'])
inTensors = runner.get_input_tensors()
outTensors = runner.get_output_tensors()
batch_sz = args['batch_sz']
if batch_sz == -1:
# use Runner's suggested batch size
batch_sz = inTensors[0].dims[0]
if args['golden']:
goldenMap = xdnn_io.getGoldenMap(args['golden'])
top5Count = 0
top1Count = 0
fpgaBlobs = []
for io in [inTensors, outTensors]:
blobs = []
for t in io:
shape = (batch_sz,) + tuple([t.dims[i] for i in range(t.ndims)][1:])
blobs.append(np.empty((shape), dtype=np.float32, order='C'))
fpgaBlobs.append(blobs)
img_paths = xdnn_io.getFilePaths(args['images'])
labels = xdnn_io.get_labels(args['labels'])
xdnnCPUOp = xdnn.XDNNCPUOp("%s/weights.h5" % args['vitis_rundir'])
fcOutput = np.empty((batch_sz, args['outsz'],), dtype=np.float32, order='C')
fpgaInput = fpgaBlobs[0][0]
for i in range(0, len(img_paths), batch_sz):
pl = []
# fill tensor input data from image file
for j, p in enumerate(img_paths[i:i + batch_sz]):
img, _ = xdnn_io.loadImageBlobFromFile(p,
args['img_raw_scale'], args['img_mean'], args['img_input_scale'],
fpgaInput.shape[2], fpgaInput.shape[3])
pl.append(p)
np.copyto(fpgaInput[j], img)
jid = runner.execute_async(fpgaBlobs[0], fpgaBlobs[1])
runner.wait(jid)
xdnnCPUOp.computeFC(fpgaBlobs[1][0], fcOutput)
softmaxOut = xdnnCPUOp.computeSoftmax(fcOutput)
if args['golden']:
for j,p in enumerate(img_paths[i:i + batch_sz]):
top1Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 1)
top5Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 5)
else:
xdnn_io.printClassification(softmaxOut, pl, labels)
if args['golden']:
print ( ("\nAverage accuracy (n=%d) Top-1: %.1f%%, Top-5: %.1f%%\n") % (len(img_paths), float(top1Count)/float(len(img_paths))*100., float(top5Count)/float(len(img_paths))*100.) )
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from six import itervalues, iteritems
from ctypes import *
import numpy as np
import os, sys
from vai.dpuv1.rt import xdnn, xdnn_io
from vai.dpuv1.rt.vitis.python.dpu.runner import Runner
def main():
args = xdnn_io.processCommandLine()
runner = Runner(args['vitis_rundir'])
inTensors = runner.get_input_tensors()
outTensors = runner.get_output_tensors()
batch_sz = args['batch_sz']
if batch_sz == -1:
# use Runner's suggested batch size
batch_sz = inTensors[0].dims[0]
if args['golden']:
goldenMap = xdnn_io.getGoldenMap(args['golden'])
top5Count = 0
top1Count = 0
fpgaBlobs = []
for io in [inTensors, outTensors]:
blobs = []
for t in io:
shape = (batch_sz,) + tuple([t.dims[i] for i in range(t.ndims)][1:])
blobs.append(np.empty((shape), dtype=np.float32, order='C'))
fpgaBlobs.append(blobs)
img_paths = xdnn_io.getFilePaths(args['images'])
labels = xdnn_io.get_labels(args['labels'])
xdnnCPUOp = xdnn.XDNNCPUOp("%s/weights.h5" % args['vitis_rundir'])
fcOutput = np.empty((batch_sz, args['outsz'],), dtype=np.float32, order='C')
fpgaInput = fpgaBlobs[0][0]
for i in range(0, len(img_paths), batch_sz):
pl = []
# fill tensor input data from image file
for j, p in enumerate(img_paths[i:i + batch_sz]):
img, _ = xdnn_io.loadImageBlobFromFile(p,
args['img_raw_scale'], args['img_mean'], args['img_input_scale'],
fpgaInput.shape[2], fpgaInput.shape[3])
pl.append(p)
np.copyto(fpgaInput[j], img)
jid = runner.execute_async(fpgaBlobs[0], fpgaBlobs[1])
runner.wait(jid)
xdnnCPUOp.computeFC(fpgaBlobs[1][0], fcOutput)
softmaxOut = xdnnCPUOp.computeSoftmax(fcOutput)
if args['golden']:
for j,p in enumerate(img_paths[i:i + batch_sz]):
top1Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 1)
top5Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 5)
else:
xdnn_io.printClassification(softmaxOut, pl, labels)
if args['golden']:
print ( ("\nAverage accuracy (n=%d) Top-1: %.1f%%, Top-5: %.1f%%\n") % (len(img_paths), float(top1Count)/float(len(img_paths))*100., float(top5Count)/float(len(img_paths))*100.) )
if __name__ == '__main__':
main()
|
en
| 0.824791
|
#!/usr/bin/env python # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # use Runner's suggested batch size # fill tensor input data from image file
| 1.85907
| 2
|
PnP/DenoiserScaling.py
|
sebemery/Lipschitz-constrained-neural-networks
| 0
|
6626874
|
import os
import numpy as np
import argparse
import json
import torch
import cv2
import scipy.io as sio
import matplotlib.pyplot as plt
import sys
sys.path.append('..')
import PnP
import models
def parse_arguments():
parser = argparse.ArgumentParser(description='PyTorch Training')
parser.add_argument('--config', default='configs/config.json', type=str, help='Path to the config file')
parser.add_argument('--model', default=None, type=str, help='Path to the trained .pth model')
parser.add_argument('--img', default='CS_MRI/file1002252_2_bottomright.pt', type=str, help='Path to the original image')
parser.add_argument('--mask', default='CS_MRI/Q_Random30.pt', type=str, help='Path to the k-space mask file')
parser.add_argument('--jpg', default=True, type=bool, help='file type either jpg or pt')
parser.add_argument('--noise', default='CS_MRI/noises.mat', type=str, help='Path to the k-space noise file')
parser.add_argument('--device', default="cpu", type=str, help='device location')
parser.add_argument('--experiment', default=None, type=str, help='name of the experiment')
parser.add_argument('--algo', default="admm", type=str, help='admm/fbs')
parser.add_argument('--mu_upper', default=3.0, type=float, help='highest value of mu')
parser.add_argument('--mu_lower', default=0.1, type=float, help='lowest value of mu')
parser.add_argument('--mu_step', default=30, type=int, help='step')
parser.add_argument("--sigma", type=float, default=0.05, help="Noise level for the denoising model")
parser.add_argument("--alpha", type=float, default=2.0, help="Step size in Plug-and Play")
parser.add_argument("--maxitr", type=int, default=100, help="Number of iterations")
parser.add_argument("--verbose", type=int, default=1, help="Whether printing the info out")
args = parser.parse_args()
return args
def check_directory(experiment, algo):
if not os.path.exists("Experiments"):
os.makedirs("Experiments")
path = os.path.join("Experiments", algo)
if not os.path.exists(path):
os.makedirs(path)
path = os.path.join(path, experiment)
if not os.path.exists(path):
os.makedirs(path)
return path
def scale(img):
img = (img - np.amin(img)) / (np.amax(img) - np.amin(img))
image = 255 * img
return image
def psnr(x, im_orig):
xout = (x - np.min(x)) / (np.max(x) - np.min(x))
norm1 = np.sum((np.absolute(im_orig)) ** 2)
norm2 = np.sum((np.absolute(x - im_orig)) ** 2)
psnr = 10 * np.log10(norm1 / norm2)
return psnr
if __name__ == '__main__':
# ---- input arguments ----
args = parse_arguments()
# CONFIG -> assert if config is here
assert args.config
config = json.load(open(args.config))
# ---- load the model ----
model = models.DnCNN(config, depth=config["model"]["depth"], n_channels=config["model"]["n_channels"],
image_channels=config["model"]["image_channels"], kernel_size=config["model"]["kernel_size"],
padding=config["model"]["padding"], architecture=config["model"]["architecture"],
spectral_norm=config["model"]["spectral_norm"],
shared_activation=config["model"]["shared_activation"],
shared_channels=config["model"]["shared_channels"], device=args.device)
device = args.device
checkpoint = torch.load(args.model, device)
if device == 'cpu':
for key in list(checkpoint['state_dict'].keys()):
if 'module.' in key:
checkpoint['state_dict'][key.replace('module.', '')] = checkpoint['state_dict'][key]
del checkpoint['state_dict'][key]
try:
model.load_state_dict(checkpoint['state_dict'], strict=True)
except Exception as e:
print(f'Some modules are missing: {e}')
model.load_state_dict(checkpoint['state_dict'], strict=False)
model.float()
model.eval()
if args.device != 'cpu':
model.to(device)
# create the output directory and return the path to it
path = check_directory(args.experiment, args.algo)
with torch.no_grad():
# ---- load the ground truth ----
if args.jpg is True:
im_orig = cv2.imread(f'{args.img}', 0) / 255.0
cv2.imwrite(f'{path}/GroundTruth.png', 255 * im_orig)
else:
im_orig = torch.load(f'{args.img}').numpy()
cv2.imwrite(f'{path}/GroundTruth.png', 255*im_orig)
# ---- load mask matrix ----
if args.jpg is True:
mat = sio.loadmat(f'{args.mask}')
mask = mat.get('Q1').astype(np.float64)
else:
mask = torch.load(f'{args.mask}').numpy()
# ---- load noises -----
if args.jpg is True:
noises = sio.loadmat(f'{args.noise}')
noises = noises.get('noises').astype(np.complex128) * 3.0
else:
noises = None
# ---- set options -----
opts = dict(sigma=args.sigma, alpha=args.alpha, maxitr=args.maxitr, verbose=args.verbose)
mu_snr = []
mu_vec = np.linspace(args.mu_lower, args.mu_upper, args.mu_step)
for mu in mu_vec:
# ---- plug and play !!! -----
if args.algo == "admm":
if args.verbose:
x_out, inc, x_init, zero_fill_snr, snr = PnP.pnp_admm_csmri.pnp_admm_csmri_(model, im_orig, mask, noises, mu, device, **opts)
else:
x_out, inc, x_init, zero_fill_snr = PnP.pnp_admm_csmri.pnp_admm_csmri_(model, im_orig, mask, noises, mu, device, **opts)
elif args.algo == "fbs":
if args.verbose:
x_out, inc, x_init, zero_fill_snr, snr = PnP.pnp_fbs_csmri.pnp_fbs_csmri_(model, im_orig, mask, noises, mu, device, **opts)
else:
x_out, inc, x_init, zero_fill_snr = PnP.pnp_fbs_csmri.pnp_fbs_csmri_(model, im_orig, mask, noises, mu, device, **opts)
# directory
path_mu = os.path.join(path, f"{mu}")
if not os.path.exists(path_mu):
os.makedirs(path_mu)
# ---- print result -----
out_snr = psnr(x_out, im_orig)
mu_snr.append(out_snr)
print('Plug-and-Play PNSR: ', out_snr)
metrics = {"PSNR": np.round(snr, 8), "Zero fill PSNR": np.round(zero_fill_snr, 8), }
with open(f'{path_mu}/snr.txt', 'w') as f:
for k, v in list(metrics.items()):
f.write("%s\n" % (k + ':' + f'{v}'))
# ---- save result -----
fig, ax1 = plt.subplots()
ax1.plot(inc, 'b-', linewidth=1)
ax1.set_xlabel('iteration')
ax1.set_ylabel('Increment', color='b')
ax1.set_title("Increment curve")
fig.savefig(f'{path_mu}/inc.png')
plt.show()
if args.verbose:
fig, ax1 = plt.subplots()
ax1.plot(snr, 'b-', linewidth=1)
ax1.set_xlabel('iteration')
ax1.set_ylabel('PSNR', color='b')
ax1.set_title("PSNR curve")
fig.savefig(f'{path_mu}/snr.png')
plt.show()
torch.save(torch.from_numpy(x_out), f'{path_mu}/{args.algo}.pt')
torch.save(torch.from_numpy(x_init), f'{path_mu}/ifft.pt')
x_out = scale(x_out)
x_init = scale(x_init)
cv2.imwrite(f'{path_mu}/{args.algo}.png', x_out)
cv2.imwrite(f'{path_mu}/ifft.png', x_init)
fig, ax1 = plt.subplots()
ax1.plot(mu_vec, np.asarray(mu_snr), 'b-', linewidth=1)
ax1.set_xlabel('mu')
ax1.set_ylabel('SNR', color='b')
ax1.set_title("SNR for different scaling mu")
fig.savefig(f'{path}/mu.png')
plt.show()
idx_max = np.argmax(np.asarray(mu_snr))
mu_max = mu_vec[idx_max]
param = {"mu": mu_max}
with open(f'{path}/mu.txt', 'w') as f:
for k, v in list(param.items()):
f.write("%s\n" % (k + ':' + f'{v}'))
|
import os
import numpy as np
import argparse
import json
import torch
import cv2
import scipy.io as sio
import matplotlib.pyplot as plt
import sys
sys.path.append('..')
import PnP
import models
def parse_arguments():
parser = argparse.ArgumentParser(description='PyTorch Training')
parser.add_argument('--config', default='configs/config.json', type=str, help='Path to the config file')
parser.add_argument('--model', default=None, type=str, help='Path to the trained .pth model')
parser.add_argument('--img', default='CS_MRI/file1002252_2_bottomright.pt', type=str, help='Path to the original image')
parser.add_argument('--mask', default='CS_MRI/Q_Random30.pt', type=str, help='Path to the k-space mask file')
parser.add_argument('--jpg', default=True, type=bool, help='file type either jpg or pt')
parser.add_argument('--noise', default='CS_MRI/noises.mat', type=str, help='Path to the k-space noise file')
parser.add_argument('--device', default="cpu", type=str, help='device location')
parser.add_argument('--experiment', default=None, type=str, help='name of the experiment')
parser.add_argument('--algo', default="admm", type=str, help='admm/fbs')
parser.add_argument('--mu_upper', default=3.0, type=float, help='highest value of mu')
parser.add_argument('--mu_lower', default=0.1, type=float, help='lowest value of mu')
parser.add_argument('--mu_step', default=30, type=int, help='step')
parser.add_argument("--sigma", type=float, default=0.05, help="Noise level for the denoising model")
parser.add_argument("--alpha", type=float, default=2.0, help="Step size in Plug-and Play")
parser.add_argument("--maxitr", type=int, default=100, help="Number of iterations")
parser.add_argument("--verbose", type=int, default=1, help="Whether printing the info out")
args = parser.parse_args()
return args
def check_directory(experiment, algo):
if not os.path.exists("Experiments"):
os.makedirs("Experiments")
path = os.path.join("Experiments", algo)
if not os.path.exists(path):
os.makedirs(path)
path = os.path.join(path, experiment)
if not os.path.exists(path):
os.makedirs(path)
return path
def scale(img):
img = (img - np.amin(img)) / (np.amax(img) - np.amin(img))
image = 255 * img
return image
def psnr(x, im_orig):
xout = (x - np.min(x)) / (np.max(x) - np.min(x))
norm1 = np.sum((np.absolute(im_orig)) ** 2)
norm2 = np.sum((np.absolute(x - im_orig)) ** 2)
psnr = 10 * np.log10(norm1 / norm2)
return psnr
if __name__ == '__main__':
# ---- input arguments ----
args = parse_arguments()
# CONFIG -> assert if config is here
assert args.config
config = json.load(open(args.config))
# ---- load the model ----
model = models.DnCNN(config, depth=config["model"]["depth"], n_channels=config["model"]["n_channels"],
image_channels=config["model"]["image_channels"], kernel_size=config["model"]["kernel_size"],
padding=config["model"]["padding"], architecture=config["model"]["architecture"],
spectral_norm=config["model"]["spectral_norm"],
shared_activation=config["model"]["shared_activation"],
shared_channels=config["model"]["shared_channels"], device=args.device)
device = args.device
checkpoint = torch.load(args.model, device)
if device == 'cpu':
for key in list(checkpoint['state_dict'].keys()):
if 'module.' in key:
checkpoint['state_dict'][key.replace('module.', '')] = checkpoint['state_dict'][key]
del checkpoint['state_dict'][key]
try:
model.load_state_dict(checkpoint['state_dict'], strict=True)
except Exception as e:
print(f'Some modules are missing: {e}')
model.load_state_dict(checkpoint['state_dict'], strict=False)
model.float()
model.eval()
if args.device != 'cpu':
model.to(device)
# create the output directory and return the path to it
path = check_directory(args.experiment, args.algo)
with torch.no_grad():
# ---- load the ground truth ----
if args.jpg is True:
im_orig = cv2.imread(f'{args.img}', 0) / 255.0
cv2.imwrite(f'{path}/GroundTruth.png', 255 * im_orig)
else:
im_orig = torch.load(f'{args.img}').numpy()
cv2.imwrite(f'{path}/GroundTruth.png', 255*im_orig)
# ---- load mask matrix ----
if args.jpg is True:
mat = sio.loadmat(f'{args.mask}')
mask = mat.get('Q1').astype(np.float64)
else:
mask = torch.load(f'{args.mask}').numpy()
# ---- load noises -----
if args.jpg is True:
noises = sio.loadmat(f'{args.noise}')
noises = noises.get('noises').astype(np.complex128) * 3.0
else:
noises = None
# ---- set options -----
opts = dict(sigma=args.sigma, alpha=args.alpha, maxitr=args.maxitr, verbose=args.verbose)
mu_snr = []
mu_vec = np.linspace(args.mu_lower, args.mu_upper, args.mu_step)
for mu in mu_vec:
# ---- plug and play !!! -----
if args.algo == "admm":
if args.verbose:
x_out, inc, x_init, zero_fill_snr, snr = PnP.pnp_admm_csmri.pnp_admm_csmri_(model, im_orig, mask, noises, mu, device, **opts)
else:
x_out, inc, x_init, zero_fill_snr = PnP.pnp_admm_csmri.pnp_admm_csmri_(model, im_orig, mask, noises, mu, device, **opts)
elif args.algo == "fbs":
if args.verbose:
x_out, inc, x_init, zero_fill_snr, snr = PnP.pnp_fbs_csmri.pnp_fbs_csmri_(model, im_orig, mask, noises, mu, device, **opts)
else:
x_out, inc, x_init, zero_fill_snr = PnP.pnp_fbs_csmri.pnp_fbs_csmri_(model, im_orig, mask, noises, mu, device, **opts)
# directory
path_mu = os.path.join(path, f"{mu}")
if not os.path.exists(path_mu):
os.makedirs(path_mu)
# ---- print result -----
out_snr = psnr(x_out, im_orig)
mu_snr.append(out_snr)
print('Plug-and-Play PNSR: ', out_snr)
metrics = {"PSNR": np.round(snr, 8), "Zero fill PSNR": np.round(zero_fill_snr, 8), }
with open(f'{path_mu}/snr.txt', 'w') as f:
for k, v in list(metrics.items()):
f.write("%s\n" % (k + ':' + f'{v}'))
# ---- save result -----
fig, ax1 = plt.subplots()
ax1.plot(inc, 'b-', linewidth=1)
ax1.set_xlabel('iteration')
ax1.set_ylabel('Increment', color='b')
ax1.set_title("Increment curve")
fig.savefig(f'{path_mu}/inc.png')
plt.show()
if args.verbose:
fig, ax1 = plt.subplots()
ax1.plot(snr, 'b-', linewidth=1)
ax1.set_xlabel('iteration')
ax1.set_ylabel('PSNR', color='b')
ax1.set_title("PSNR curve")
fig.savefig(f'{path_mu}/snr.png')
plt.show()
torch.save(torch.from_numpy(x_out), f'{path_mu}/{args.algo}.pt')
torch.save(torch.from_numpy(x_init), f'{path_mu}/ifft.pt')
x_out = scale(x_out)
x_init = scale(x_init)
cv2.imwrite(f'{path_mu}/{args.algo}.png', x_out)
cv2.imwrite(f'{path_mu}/ifft.png', x_init)
fig, ax1 = plt.subplots()
ax1.plot(mu_vec, np.asarray(mu_snr), 'b-', linewidth=1)
ax1.set_xlabel('mu')
ax1.set_ylabel('SNR', color='b')
ax1.set_title("SNR for different scaling mu")
fig.savefig(f'{path}/mu.png')
plt.show()
idx_max = np.argmax(np.asarray(mu_snr))
mu_max = mu_vec[idx_max]
param = {"mu": mu_max}
with open(f'{path}/mu.txt', 'w') as f:
for k, v in list(param.items()):
f.write("%s\n" % (k + ':' + f'{v}'))
|
en
| 0.303107
|
# ---- input arguments ---- # CONFIG -> assert if config is here # ---- load the model ---- # create the output directory and return the path to it # ---- load the ground truth ---- # ---- load mask matrix ---- # ---- load noises ----- # ---- set options ----- # ---- plug and play !!! ----- # directory # ---- print result ----- # ---- save result -----
| 2.340425
| 2
|
20160305_2.py
|
JaeGyu/PythonEx_1
| 0
|
6626875
|
<filename>20160305_2.py<gh_stars>0
#_*_ coding: utf-8 _*_
class MyClass:
def set(self, v): #self라는 인자가 있으면 이 메서드는 인스턴스 메서드임을 나타낸다.
self.value = v
def get(self):
return self.value
class Simple:
pass
t = MyClass()
print t
t.set("hello")
print t.get()
print t.value
c = MyClass()
c.set("egg")
print c.get()
print c.value
s = Simple()
c = MyClass()
MyClass.set(c, "foo")
print MyClass.get(c)
print c.value
def set(i):
print "set function outside function - ",i
class MyClass:
def set(self, v):
self.value = v
def incr(self):
set(self.value+1)
def get(self):
return self.value
c = MyClass()
c.set(1)
print c.get()
c.incr()
print c.get()
class D:
@staticmethod
def spam(x,y):
print "static method:",x,y
D.spam(1,2) #인스턴스 객체 없이 클래스에서 직접 호출
print
d = D()
d.spam(1,2) #인스턴스 객체를 통해서도 호출 가능
class C:
@classmethod
def spam(cls, y):
print cls, "->", y
print C
print
C.spam(5)
print
c = C()
c.spam(5)
print "-"*80
class D(C): #클래스 D는 클래스 C를 상속한다.
pass
D.spam(3)
d = D()
d.spam(3)
|
<filename>20160305_2.py<gh_stars>0
#_*_ coding: utf-8 _*_
class MyClass:
def set(self, v): #self라는 인자가 있으면 이 메서드는 인스턴스 메서드임을 나타낸다.
self.value = v
def get(self):
return self.value
class Simple:
pass
t = MyClass()
print t
t.set("hello")
print t.get()
print t.value
c = MyClass()
c.set("egg")
print c.get()
print c.value
s = Simple()
c = MyClass()
MyClass.set(c, "foo")
print MyClass.get(c)
print c.value
def set(i):
print "set function outside function - ",i
class MyClass:
def set(self, v):
self.value = v
def incr(self):
set(self.value+1)
def get(self):
return self.value
c = MyClass()
c.set(1)
print c.get()
c.incr()
print c.get()
class D:
@staticmethod
def spam(x,y):
print "static method:",x,y
D.spam(1,2) #인스턴스 객체 없이 클래스에서 직접 호출
print
d = D()
d.spam(1,2) #인스턴스 객체를 통해서도 호출 가능
class C:
@classmethod
def spam(cls, y):
print cls, "->", y
print C
print
C.spam(5)
print
c = C()
c.spam(5)
print "-"*80
class D(C): #클래스 D는 클래스 C를 상속한다.
pass
D.spam(3)
d = D()
d.spam(3)
|
ko
| 1.000049
|
#_*_ coding: utf-8 _*_ #self라는 인자가 있으면 이 메서드는 인스턴스 메서드임을 나타낸다. #인스턴스 객체 없이 클래스에서 직접 호출 #인스턴스 객체를 통해서도 호출 가능 #클래스 D는 클래스 C를 상속한다.
| 3.766464
| 4
|
VirtulizeOS/module.py
|
wuzirui/SchedulingLab
| 0
|
6626876
|
<filename>VirtulizeOS/module.py
import copy
from Schedulers import module
from .process import Process
import heapq
class Processor:
history = None
def __init__(self):
pass
def boot(self):
assert self.history is None, "already booted, shutdown() first"
self.history = []
pass
def shutdown(self):
assert self.history is not None, "processor not running, use boot() first"
ret = self.history
self.history = None
return ret
def is_running(self):
return self.history is not None
def run(self, process: Process = None):
if process is None:
self.history.append("free")
return 0
assert self.get_time() >= process.arrival_time, f"current time = {self.get_time()}, but process{process.pid} arrives at {process.arrival_time}"
self.history.append(process.pid)
return 1
def get_time(self):
assert self.history is not None, "processor not running, use boot() first"
return len(self.history)
def wait(self, time: int):
assert self.history is not None, "processor not running, use boot() first"
assert time > 0, "input invalid"
self.history.extend(["free"] * time)
class VirtualOS:
cpu = Processor()
scheduler = None
process_pool = None
process_dict = None
total_process_time = 0
cpu_busy = 0
clock = 0
def __init__(self, scheduler: module):
self.scheduler = scheduler
self.process_pool = []
self.process_dict = {}
pass
def load_processes(self, processes):
for process in processes:
self.load_process(process)
def load_process(self, process): # 所有process加入待处理字典
if not self.process_dict.get(process.pid) is None:
raise KeyError("Duplicate PID")
heapq.heappush(self.process_pool, [process.arrival_time + 0.0000001 * self.get_process_num(), process])
self.process_dict[process.pid] = process
def size_of_pool(self):
return len(self.process_pool)
def next_coming_process(self):
return self.process_pool[0][1]
def _init_processes(self):
for _, process in self.process_pool:
process.remain_time = process.process_time
self.total_process_time += process.process_time
process.status = "undefined"
process.history = []
def _time_pulse(self, clock):
for _, process in self.process_pool:
process.history.append("ready" if process.arrival_time <= clock else "undefined")
if process.remain_time == 0:
process.history[-1] = "Done"
if process.arrival_time == clock: # 判断是否有已经就绪者,加入就绪
self.scheduler.new_process(copy.copy(process))
next_pid = self.scheduler.next_to_run()
if next_pid >= 0:
next_process = self.process_dict[next_pid]
assert next_process.remain_time > 0
self.cpu_busy += self.cpu.run(next_process)
next_process.remain_time -= 1
next_process.history[-1] = "run"
if next_process.remain_time == 0:
self.scheduler.process_done(next_pid)
next_process.finish_time = clock
next_process.total_wait = clock - next_process.arrival_time - next_process.process_time + 1
next_process.turn_around = next_process.total_wait + next_process.process_time
else:
self.cpu.wait(1)
def run_from_start(self, print_history=True):
if self.cpu.is_running():
self.cpu.shutdown()
self.cpu.boot()
self._init_processes()
self.clock = 0
while self.cpu_busy < self.total_process_time:
self._time_pulse(self.clock)
self.clock += 1 # 每一次都要加一然后进入time_pulse
if print_history:
self.print_history(self.cpu.history)
self.print_statistic()
return self.cpu.history
def print_history(self, cpu_his):
print("Process History in Detail")
print("-" * (20 + len(self.process_pool) * 10))
print("%6s%10s" % ("clock", "CPU"), end="")
for _, process in self.process_pool:
print("%10s" % process.pid, end="")
else:
print("")
for clock in range(self.clock):
print("%4d" % clock, end=' ')
print("%10s" % cpu_his[clock], end="")
for _, process in self.process_pool:
print("%10s" % process.history[clock], end="")
else:
print("")
else:
print("")
def _print_attr_foreach_process(self, title: str, attr: str):
print("%15s" % title, end="")
for _, process in self.process_pool:
print("%10s" % process.__getattribute__(attr), end="")
else:
print("")
def _print_single_stat(self, title: str, data: float):
print("%15s%10f" % (title, data))
def print_statistic(self):
print("Process Statistics in Detail")
print("-" * (20 + len(self.process_pool) * 10))
self._print_attr_foreach_process("stat", "pid")
self._print_attr_foreach_process("arrival time", "arrival_time")
self._print_attr_foreach_process("process time", "process_time")
self._print_attr_foreach_process("total wait", "total_wait")
self._print_attr_foreach_process("turn around", "turn_around")
self._print_single_stat("avg wait", self.get_avg_wait())
self._print_single_stat("avg turn around", self.get_avg_turn_around())
def get_total_wait(self):
total_wait = 0
for _, process in self.process_pool:
total_wait += process.total_wait
return total_wait
def get_process_num(self):
return len(self.process_pool)
def get_avg_wait(self):
return self.get_total_wait() / self.get_process_num()
def get_avg_turn_around(self):
total_turn_around= 0
for _, process in self.process_pool:
total_turn_around += process.total_wait + process.process_time
return total_turn_around / self.get_process_num()
|
<filename>VirtulizeOS/module.py
import copy
from Schedulers import module
from .process import Process
import heapq
class Processor:
history = None
def __init__(self):
pass
def boot(self):
assert self.history is None, "already booted, shutdown() first"
self.history = []
pass
def shutdown(self):
assert self.history is not None, "processor not running, use boot() first"
ret = self.history
self.history = None
return ret
def is_running(self):
return self.history is not None
def run(self, process: Process = None):
if process is None:
self.history.append("free")
return 0
assert self.get_time() >= process.arrival_time, f"current time = {self.get_time()}, but process{process.pid} arrives at {process.arrival_time}"
self.history.append(process.pid)
return 1
def get_time(self):
assert self.history is not None, "processor not running, use boot() first"
return len(self.history)
def wait(self, time: int):
assert self.history is not None, "processor not running, use boot() first"
assert time > 0, "input invalid"
self.history.extend(["free"] * time)
class VirtualOS:
cpu = Processor()
scheduler = None
process_pool = None
process_dict = None
total_process_time = 0
cpu_busy = 0
clock = 0
def __init__(self, scheduler: module):
self.scheduler = scheduler
self.process_pool = []
self.process_dict = {}
pass
def load_processes(self, processes):
for process in processes:
self.load_process(process)
def load_process(self, process): # 所有process加入待处理字典
if not self.process_dict.get(process.pid) is None:
raise KeyError("Duplicate PID")
heapq.heappush(self.process_pool, [process.arrival_time + 0.0000001 * self.get_process_num(), process])
self.process_dict[process.pid] = process
def size_of_pool(self):
return len(self.process_pool)
def next_coming_process(self):
return self.process_pool[0][1]
def _init_processes(self):
for _, process in self.process_pool:
process.remain_time = process.process_time
self.total_process_time += process.process_time
process.status = "undefined"
process.history = []
def _time_pulse(self, clock):
for _, process in self.process_pool:
process.history.append("ready" if process.arrival_time <= clock else "undefined")
if process.remain_time == 0:
process.history[-1] = "Done"
if process.arrival_time == clock: # 判断是否有已经就绪者,加入就绪
self.scheduler.new_process(copy.copy(process))
next_pid = self.scheduler.next_to_run()
if next_pid >= 0:
next_process = self.process_dict[next_pid]
assert next_process.remain_time > 0
self.cpu_busy += self.cpu.run(next_process)
next_process.remain_time -= 1
next_process.history[-1] = "run"
if next_process.remain_time == 0:
self.scheduler.process_done(next_pid)
next_process.finish_time = clock
next_process.total_wait = clock - next_process.arrival_time - next_process.process_time + 1
next_process.turn_around = next_process.total_wait + next_process.process_time
else:
self.cpu.wait(1)
def run_from_start(self, print_history=True):
if self.cpu.is_running():
self.cpu.shutdown()
self.cpu.boot()
self._init_processes()
self.clock = 0
while self.cpu_busy < self.total_process_time:
self._time_pulse(self.clock)
self.clock += 1 # 每一次都要加一然后进入time_pulse
if print_history:
self.print_history(self.cpu.history)
self.print_statistic()
return self.cpu.history
def print_history(self, cpu_his):
print("Process History in Detail")
print("-" * (20 + len(self.process_pool) * 10))
print("%6s%10s" % ("clock", "CPU"), end="")
for _, process in self.process_pool:
print("%10s" % process.pid, end="")
else:
print("")
for clock in range(self.clock):
print("%4d" % clock, end=' ')
print("%10s" % cpu_his[clock], end="")
for _, process in self.process_pool:
print("%10s" % process.history[clock], end="")
else:
print("")
else:
print("")
def _print_attr_foreach_process(self, title: str, attr: str):
print("%15s" % title, end="")
for _, process in self.process_pool:
print("%10s" % process.__getattribute__(attr), end="")
else:
print("")
def _print_single_stat(self, title: str, data: float):
print("%15s%10f" % (title, data))
def print_statistic(self):
print("Process Statistics in Detail")
print("-" * (20 + len(self.process_pool) * 10))
self._print_attr_foreach_process("stat", "pid")
self._print_attr_foreach_process("arrival time", "arrival_time")
self._print_attr_foreach_process("process time", "process_time")
self._print_attr_foreach_process("total wait", "total_wait")
self._print_attr_foreach_process("turn around", "turn_around")
self._print_single_stat("avg wait", self.get_avg_wait())
self._print_single_stat("avg turn around", self.get_avg_turn_around())
def get_total_wait(self):
total_wait = 0
for _, process in self.process_pool:
total_wait += process.total_wait
return total_wait
def get_process_num(self):
return len(self.process_pool)
def get_avg_wait(self):
return self.get_total_wait() / self.get_process_num()
def get_avg_turn_around(self):
total_turn_around= 0
for _, process in self.process_pool:
total_turn_around += process.total_wait + process.process_time
return total_turn_around / self.get_process_num()
|
zh
| 0.827203
|
# 所有process加入待处理字典 # 判断是否有已经就绪者,加入就绪 # 每一次都要加一然后进入time_pulse
| 2.592783
| 3
|
main.py
|
VSBoiko/python-basics
| 0
|
6626877
|
<reponame>VSBoiko/python-basics<gh_stars>0
from math import inf
from itertools import pairwise
class Point:
"""
Класс для представление точки на координатной плоскости
Атрибуты
x (int): координата по оси X
y (int): координата по оси Y
key (str): ключ (короткое название на латинице)
name (str): название
"""
def __init__(self, x: int, y: int, key: str, name: str):
"""
Устанавливает все необходимые атрибуты для объекта Point
Параметры:
x (int): координата по оси X
y (int): координата по оси Y
key (str): ключ (короткое название на латинице)
name (str): название
"""
self.x: float = x
self.y: float = y
self.key: str = key
self.name: str = name
def __str__(self):
"""
Устанавливает формат вывода объекта Point
"""
return f'({self.x}, {self.y}) - {self.name}'
class Route:
"""
Класс для представление маршрута на координатной плоскости
Атрибуты
start (Point): точка начала маршрута
points (list of Point): список точек маршрута
finish (Point): точка конца маршрута
paths (list of tuple): список всех возможных маршрутов
Методы
calc_min_route():
Рассчитывает минимально возможный маршрут (path) и его длину (dist)
get_full_route():
Возвращает список точек маршрута
get_many_to_many_dist(start_list: list, finish_list: list) -> dict:
Возвращает таблицу с расстоянием из каждой точки списка start_list
в каждую точку списка finish_list
get_one_to_many_dist(start: Point, finish_list: list) -> dict:
Возвращает таблицу с расстоянием из точки start в каждую точку
списка finish_list
static method
get_one_to_one_dist(start: Point, finish: Point) -> float:
Возвращает расстояние из точки start в точку finish
"""
def __init__(self, start: Point, points: list, finish: Point):
"""
Устанавливает все необходимые атрибуты для объекта Route
Параметры:
start (Point): точка начала маршрута
points (list of Point): список точек маршрута
finish (Point): точка конца маршрута
"""
self.start: Point = start
self.finish: Point = finish
self.points: list = []
self.points.extend(points)
self.paths: list = []
def __str__(self):
"""
Устанавливает формат вывода объекта Route
Возвращаемое значение
result (str): строка вывода объекта Route
"""
result = ""
for point in self.get_full_route():
result += f'{point.__str__()}\n'
return result
def calc_min_route(self) -> dict:
"""
Рассчитывает минимально возможный маршрут (path) и его длину (dist)
Возвращаемое значение:
result (dict): словарь с ключами:
- path (tuple): (минимальный маршрут)
- dist (float): (длина маршрут)
"""
points = self.get_full_route()
table_dists = self.get_many_to_many_dist(points, points)
self._create_paths()
min_route_dist: float = inf
min_route_path: tuple = ()
for path in self.paths:
route_dist = 0
prev_point_key = path[0]
for point_key in path:
route_dist += table_dists[prev_point_key][point_key]
prev_point_key = point_key
if min_route_dist > route_dist:
min_route_dist = route_dist
min_route_path = path
min_route: list = []
for point_key in min_route_path:
min_route.append(self._get_point_by_key(point_key))
result = {
"route": min_route,
"route_dist": min_route_dist
}
return result
def get_full_route(self) -> list:
"""
Возвращает список точек маршрута, в котором на 0 индексе - старт маршрута,
а на последнем индексе - финиш маршрута. Остальные точки упорядочены в случайном
порядке
Возвращаемое значение:
full_route (list): список точек маршрута
"""
full_route = [self.start, *self.points, self.finish]
return full_route
def get_many_to_many_dist(self, start_list: list, finish_list: list) -> dict:
"""
Возвращает таблицу с расстоянием из каждой точки списка start_list
в каждую точку списка finish_list
Параметры:
start_list (list): список точек "откуда"
finish_list (list): список точек "куда"
Возвращаемое значение:
table_dists (dict): словарь словарей, содержащий расстояния из точек
списка start_list в точки списка finish_list
"""
table_dists = dict()
for start in start_list:
table_dists[start.key] = self.get_one_to_many_dist(start, finish_list)
return table_dists
def get_one_to_many_dist(self, start: Point, finish_list: list) -> dict:
"""
Возвращает таблицу с расстоянием из точки start в каждую точку
списка finish_list
Параметры:
start (Point): точка "откуда"
finish_list (list): список точек "куда"
Возвращаемое значение:
table_dists (dict): словарь, содержащий расстояния из точки
start в точки списка finish_list
"""
dict_dists = dict()
for finish in finish_list:
dict_dists[finish.key] = self.get_one_to_one_dist(start, finish)
return dict_dists
@staticmethod
def get_one_to_one_dist(start: Point, finish: Point) -> float:
"""
Возвращает расстояние из точки start в точку finish
Параметры:
start (Point): точка "откуда"
finish (Point): точка "куда"
Возвращаемое значение:
dist (float): расстояние из точки start в точку finish,
округленное до сотых
"""
diff_x = finish.x - start.x
diff_y = finish.y - start.y
dist = round((diff_x ** 2 + diff_y ** 2) ** 0.5, 2)
return dist
def _add_path(self, new_path: list):
"""
Добавляет новый маршрут в атрибут paths (список всех
возможных маршрутов)
Параметры:
new_path (list): список точек маршрута без точек
старта и финиша
"""
path = [self.start.key, *new_path, self.finish.key]
self.paths.append(path)
def _create_paths(self):
"""
Создает все возможные маршруты с помощью вызова
метода _heap_permutation()
"""
points_keys = list()
for point in self.points:
points_keys.append(point.key)
self._heap_permutation(points_keys, len(points_keys))
def _get_point_by_key(self, key):
points = self.get_full_route()
for point in points:
if point.key == key:
return point
def _heap_permutation(self, arr: list, arr_len: int):
"""
Создает все возможные комбинации элементов из списка arr
по методу перестановок Хипа
Параметры:
arr (list): список элементов
arr_len(int): количество элементов в списке элементов
"""
k = arr_len
if k == 1:
self._add_path(arr)
else:
for i in range(k):
self._heap_permutation(arr, k - 1)
if k % 2 == 0:
arr[i], arr[k - 1] = arr[k - 1], arr[i]
else:
arr[0], arr[k - 1] = arr[k - 1], arr[0]
points = [Point(0, 2, "po", "Почтовое отделение"),
Point(2, 5, "gr", "<NAME>, 104/25"),
Point(5, 2, "bk", "<NAME>, 221б"),
Point(6, 6, "sa", "<NAME>, 302-бис"),
Point(8, 3, "al", "Вечнозелёная Аллея, 742")]
postman_route = Route(points[0], points[1:], points[0])
min_route = postman_route.calc_min_route()
print(f'Минимальный маршрут (длина {min_route["route_dist"]})')
for point in min_route["route"]:
print(point)
|
from math import inf
from itertools import pairwise
class Point:
"""
Класс для представление точки на координатной плоскости
Атрибуты
x (int): координата по оси X
y (int): координата по оси Y
key (str): ключ (короткое название на латинице)
name (str): название
"""
def __init__(self, x: int, y: int, key: str, name: str):
"""
Устанавливает все необходимые атрибуты для объекта Point
Параметры:
x (int): координата по оси X
y (int): координата по оси Y
key (str): ключ (короткое название на латинице)
name (str): название
"""
self.x: float = x
self.y: float = y
self.key: str = key
self.name: str = name
def __str__(self):
"""
Устанавливает формат вывода объекта Point
"""
return f'({self.x}, {self.y}) - {self.name}'
class Route:
"""
Класс для представление маршрута на координатной плоскости
Атрибуты
start (Point): точка начала маршрута
points (list of Point): список точек маршрута
finish (Point): точка конца маршрута
paths (list of tuple): список всех возможных маршрутов
Методы
calc_min_route():
Рассчитывает минимально возможный маршрут (path) и его длину (dist)
get_full_route():
Возвращает список точек маршрута
get_many_to_many_dist(start_list: list, finish_list: list) -> dict:
Возвращает таблицу с расстоянием из каждой точки списка start_list
в каждую точку списка finish_list
get_one_to_many_dist(start: Point, finish_list: list) -> dict:
Возвращает таблицу с расстоянием из точки start в каждую точку
списка finish_list
static method
get_one_to_one_dist(start: Point, finish: Point) -> float:
Возвращает расстояние из точки start в точку finish
"""
def __init__(self, start: Point, points: list, finish: Point):
"""
Устанавливает все необходимые атрибуты для объекта Route
Параметры:
start (Point): точка начала маршрута
points (list of Point): список точек маршрута
finish (Point): точка конца маршрута
"""
self.start: Point = start
self.finish: Point = finish
self.points: list = []
self.points.extend(points)
self.paths: list = []
def __str__(self):
"""
Устанавливает формат вывода объекта Route
Возвращаемое значение
result (str): строка вывода объекта Route
"""
result = ""
for point in self.get_full_route():
result += f'{point.__str__()}\n'
return result
def calc_min_route(self) -> dict:
"""
Рассчитывает минимально возможный маршрут (path) и его длину (dist)
Возвращаемое значение:
result (dict): словарь с ключами:
- path (tuple): (минимальный маршрут)
- dist (float): (длина маршрут)
"""
points = self.get_full_route()
table_dists = self.get_many_to_many_dist(points, points)
self._create_paths()
min_route_dist: float = inf
min_route_path: tuple = ()
for path in self.paths:
route_dist = 0
prev_point_key = path[0]
for point_key in path:
route_dist += table_dists[prev_point_key][point_key]
prev_point_key = point_key
if min_route_dist > route_dist:
min_route_dist = route_dist
min_route_path = path
min_route: list = []
for point_key in min_route_path:
min_route.append(self._get_point_by_key(point_key))
result = {
"route": min_route,
"route_dist": min_route_dist
}
return result
def get_full_route(self) -> list:
"""
Возвращает список точек маршрута, в котором на 0 индексе - старт маршрута,
а на последнем индексе - финиш маршрута. Остальные точки упорядочены в случайном
порядке
Возвращаемое значение:
full_route (list): список точек маршрута
"""
full_route = [self.start, *self.points, self.finish]
return full_route
def get_many_to_many_dist(self, start_list: list, finish_list: list) -> dict:
"""
Возвращает таблицу с расстоянием из каждой точки списка start_list
в каждую точку списка finish_list
Параметры:
start_list (list): список точек "откуда"
finish_list (list): список точек "куда"
Возвращаемое значение:
table_dists (dict): словарь словарей, содержащий расстояния из точек
списка start_list в точки списка finish_list
"""
table_dists = dict()
for start in start_list:
table_dists[start.key] = self.get_one_to_many_dist(start, finish_list)
return table_dists
def get_one_to_many_dist(self, start: Point, finish_list: list) -> dict:
"""
Возвращает таблицу с расстоянием из точки start в каждую точку
списка finish_list
Параметры:
start (Point): точка "откуда"
finish_list (list): список точек "куда"
Возвращаемое значение:
table_dists (dict): словарь, содержащий расстояния из точки
start в точки списка finish_list
"""
dict_dists = dict()
for finish in finish_list:
dict_dists[finish.key] = self.get_one_to_one_dist(start, finish)
return dict_dists
@staticmethod
def get_one_to_one_dist(start: Point, finish: Point) -> float:
"""
Возвращает расстояние из точки start в точку finish
Параметры:
start (Point): точка "откуда"
finish (Point): точка "куда"
Возвращаемое значение:
dist (float): расстояние из точки start в точку finish,
округленное до сотых
"""
diff_x = finish.x - start.x
diff_y = finish.y - start.y
dist = round((diff_x ** 2 + diff_y ** 2) ** 0.5, 2)
return dist
def _add_path(self, new_path: list):
"""
Добавляет новый маршрут в атрибут paths (список всех
возможных маршрутов)
Параметры:
new_path (list): список точек маршрута без точек
старта и финиша
"""
path = [self.start.key, *new_path, self.finish.key]
self.paths.append(path)
def _create_paths(self):
"""
Создает все возможные маршруты с помощью вызова
метода _heap_permutation()
"""
points_keys = list()
for point in self.points:
points_keys.append(point.key)
self._heap_permutation(points_keys, len(points_keys))
def _get_point_by_key(self, key):
points = self.get_full_route()
for point in points:
if point.key == key:
return point
def _heap_permutation(self, arr: list, arr_len: int):
"""
Создает все возможные комбинации элементов из списка arr
по методу перестановок Хипа
Параметры:
arr (list): список элементов
arr_len(int): количество элементов в списке элементов
"""
k = arr_len
if k == 1:
self._add_path(arr)
else:
for i in range(k):
self._heap_permutation(arr, k - 1)
if k % 2 == 0:
arr[i], arr[k - 1] = arr[k - 1], arr[i]
else:
arr[0], arr[k - 1] = arr[k - 1], arr[0]
points = [Point(0, 2, "po", "Почтовое отделение"),
Point(2, 5, "gr", "<NAME>, 104/25"),
Point(5, 2, "bk", "<NAME>, 221б"),
Point(6, 6, "sa", "<NAME>, 302-бис"),
Point(8, 3, "al", "Вечнозелёная Аллея, 742")]
postman_route = Route(points[0], points[1:], points[0])
min_route = postman_route.calc_min_route()
print(f'Минимальный маршрут (длина {min_route["route_dist"]})')
for point in min_route["route"]:
print(point)
|
ru
| 0.966773
|
Класс для представление точки на координатной плоскости Атрибуты x (int): координата по оси X y (int): координата по оси Y key (str): ключ (короткое название на латинице) name (str): название Устанавливает все необходимые атрибуты для объекта Point Параметры: x (int): координата по оси X y (int): координата по оси Y key (str): ключ (короткое название на латинице) name (str): название Устанавливает формат вывода объекта Point Класс для представление маршрута на координатной плоскости Атрибуты start (Point): точка начала маршрута points (list of Point): список точек маршрута finish (Point): точка конца маршрута paths (list of tuple): список всех возможных маршрутов Методы calc_min_route(): Рассчитывает минимально возможный маршрут (path) и его длину (dist) get_full_route(): Возвращает список точек маршрута get_many_to_many_dist(start_list: list, finish_list: list) -> dict: Возвращает таблицу с расстоянием из каждой точки списка start_list в каждую точку списка finish_list get_one_to_many_dist(start: Point, finish_list: list) -> dict: Возвращает таблицу с расстоянием из точки start в каждую точку списка finish_list static method get_one_to_one_dist(start: Point, finish: Point) -> float: Возвращает расстояние из точки start в точку finish Устанавливает все необходимые атрибуты для объекта Route Параметры: start (Point): точка начала маршрута points (list of Point): список точек маршрута finish (Point): точка конца маршрута Устанавливает формат вывода объекта Route Возвращаемое значение result (str): строка вывода объекта Route Рассчитывает минимально возможный маршрут (path) и его длину (dist) Возвращаемое значение: result (dict): словарь с ключами: - path (tuple): (минимальный маршрут) - dist (float): (длина маршрут) Возвращает список точек маршрута, в котором на 0 индексе - старт маршрута, а на последнем индексе - финиш маршрута. Остальные точки упорядочены в случайном порядке Возвращаемое значение: full_route (list): список точек маршрута Возвращает таблицу с расстоянием из каждой точки списка start_list в каждую точку списка finish_list Параметры: start_list (list): список точек "откуда" finish_list (list): список точек "куда" Возвращаемое значение: table_dists (dict): словарь словарей, содержащий расстояния из точек списка start_list в точки списка finish_list Возвращает таблицу с расстоянием из точки start в каждую точку списка finish_list Параметры: start (Point): точка "откуда" finish_list (list): список точек "куда" Возвращаемое значение: table_dists (dict): словарь, содержащий расстояния из точки start в точки списка finish_list Возвращает расстояние из точки start в точку finish Параметры: start (Point): точка "откуда" finish (Point): точка "куда" Возвращаемое значение: dist (float): расстояние из точки start в точку finish, округленное до сотых Добавляет новый маршрут в атрибут paths (список всех возможных маршрутов) Параметры: new_path (list): список точек маршрута без точек старта и финиша Создает все возможные маршруты с помощью вызова метода _heap_permutation() Создает все возможные комбинации элементов из списка arr по методу перестановок Хипа Параметры: arr (list): список элементов arr_len(int): количество элементов в списке элементов
| 3.623612
| 4
|
app/api/domains/__init__.py
|
tsuuki/gulag
| 4
|
6626878
|
<reponame>tsuuki/gulag
from . import api
from . import ava
from . import cho
from . import map
from . import osu
|
from . import api
from . import ava
from . import cho
from . import map
from . import osu
|
none
| 1
| 1.087008
| 1
|
|
config.py
|
SmartNetConf/SmartNetConf
| 1
|
6626879
|
# Builtin config values: http://flask.pocoo.org/docs/0.10/config/
DEBUG = True
HOST = '0.0.0.0'
PORT = 5000
LOGGING_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOGGING_LOCATION = 'smartnetconf_flask.log'
LOGGING_LEVEL = 'DEBUG'
|
# Builtin config values: http://flask.pocoo.org/docs/0.10/config/
DEBUG = True
HOST = '0.0.0.0'
PORT = 5000
LOGGING_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOGGING_LOCATION = 'smartnetconf_flask.log'
LOGGING_LEVEL = 'DEBUG'
|
en
| 0.252248
|
# Builtin config values: http://flask.pocoo.org/docs/0.10/config/
| 1.497754
| 1
|
pandas/io/formats/csvs.py
|
meeseeksmachine/pandas
| 2
|
6626880
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Module for formatting output data into CSV files.
"""
from __future__ import print_function
import warnings
import csv as csvlib
from zipfile import ZipFile
import numpy as np
from pandas._libs import writers as libwriters
from pandas import compat
from pandas.compat import StringIO, range, zip
from pandas.core.dtypes.missing import notna
from pandas.core.dtypes.generic import (
ABCMultiIndex, ABCPeriodIndex, ABCDatetimeIndex, ABCIndexClass)
from pandas.io.common import (_get_handle, UnicodeWriter, _expand_user,
_stringify_path)
class CSVFormatter(object):
def __init__(self, obj, path_or_buf=None, sep=",", na_rep='',
float_format=None, cols=None, header=True, index=True,
index_label=None, mode='w', nanRep=None, encoding=None,
compression=None, quoting=None, line_terminator='\n',
chunksize=None, tupleize_cols=False, quotechar='"',
date_format=None, doublequote=True, escapechar=None,
decimal='.'):
self.obj = obj
if path_or_buf is None:
path_or_buf = StringIO()
self.path_or_buf = _expand_user(_stringify_path(path_or_buf))
self.sep = sep
self.na_rep = na_rep
self.float_format = float_format
self.decimal = decimal
self.header = header
self.index = index
self.index_label = index_label
self.mode = mode
self.encoding = encoding
self.compression = compression
if quoting is None:
quoting = csvlib.QUOTE_MINIMAL
self.quoting = quoting
if quoting == csvlib.QUOTE_NONE:
# prevents crash in _csv
quotechar = None
self.quotechar = quotechar
self.doublequote = doublequote
self.escapechar = escapechar
self.line_terminator = line_terminator
self.date_format = date_format
self.tupleize_cols = tupleize_cols
self.has_mi_columns = (isinstance(obj.columns, ABCMultiIndex) and
not self.tupleize_cols)
# validate mi options
if self.has_mi_columns:
if cols is not None:
raise TypeError("cannot specify cols with a MultiIndex on the "
"columns")
if cols is not None:
if isinstance(cols, ABCIndexClass):
cols = cols.to_native_types(na_rep=na_rep,
float_format=float_format,
date_format=date_format,
quoting=self.quoting)
else:
cols = list(cols)
self.obj = self.obj.loc[:, cols]
# update columns to include possible multiplicity of dupes
# and make sure sure cols is just a list of labels
cols = self.obj.columns
if isinstance(cols, ABCIndexClass):
cols = cols.to_native_types(na_rep=na_rep,
float_format=float_format,
date_format=date_format,
quoting=self.quoting)
else:
cols = list(cols)
# save it
self.cols = cols
# preallocate data 2d list
self.blocks = self.obj._data.blocks
ncols = sum(b.shape[0] for b in self.blocks)
self.data = [None] * ncols
if chunksize is None:
chunksize = (100000 // (len(self.cols) or 1)) or 1
self.chunksize = int(chunksize)
self.data_index = obj.index
if (isinstance(self.data_index, (ABCDatetimeIndex, ABCPeriodIndex)) and
date_format is not None):
from pandas import Index
self.data_index = Index([x.strftime(date_format) if notna(x) else
'' for x in self.data_index])
self.nlevels = getattr(self.data_index, 'nlevels', 1)
if not index:
self.nlevels = 0
def save(self):
# create the writer & save
if self.encoding is None:
if compat.PY2:
encoding = 'ascii'
else:
encoding = 'utf-8'
else:
encoding = self.encoding
# GH 21227 internal compression is not used when file-like passed.
if self.compression and hasattr(self.path_or_buf, 'write'):
msg = ("compression has no effect when passing file-like "
"object as input.")
warnings.warn(msg, RuntimeWarning, stacklevel=2)
# when zip compression is called.
is_zip = isinstance(self.path_or_buf, ZipFile) or (
not hasattr(self.path_or_buf, 'write')
and self.compression == 'zip')
if is_zip:
# zipfile doesn't support writing string to archive. uses string
# buffer to receive csv writing and dump into zip compression
# file handle. GH 21241, 21118
f = StringIO()
close = False
elif hasattr(self.path_or_buf, 'write'):
f = self.path_or_buf
close = False
else:
f, handles = _get_handle(self.path_or_buf, self.mode,
encoding=encoding,
compression=self.compression)
close = True
try:
writer_kwargs = dict(lineterminator=self.line_terminator,
delimiter=self.sep, quoting=self.quoting,
doublequote=self.doublequote,
escapechar=self.escapechar,
quotechar=self.quotechar)
if encoding == 'ascii':
self.writer = csvlib.writer(f, **writer_kwargs)
else:
writer_kwargs['encoding'] = encoding
self.writer = UnicodeWriter(f, **writer_kwargs)
self._save()
finally:
if is_zip:
# GH 17778 handles zip compression separately.
buf = f.getvalue()
if hasattr(self.path_or_buf, 'write'):
self.path_or_buf.write(buf)
else:
f, handles = _get_handle(self.path_or_buf, self.mode,
encoding=encoding,
compression=self.compression)
f.write(buf)
close = True
if close:
f.close()
for _fh in handles:
_fh.close()
def _save_header(self):
writer = self.writer
obj = self.obj
index_label = self.index_label
cols = self.cols
has_mi_columns = self.has_mi_columns
header = self.header
encoded_labels = []
has_aliases = isinstance(header, (tuple, list, np.ndarray,
ABCIndexClass))
if not (has_aliases or self.header):
return
if has_aliases:
if len(header) != len(cols):
raise ValueError(('Writing {ncols} cols but got {nalias} '
'aliases'.format(ncols=len(cols),
nalias=len(header))))
else:
write_cols = header
else:
write_cols = cols
if self.index:
# should write something for index label
if index_label is not False:
if index_label is None:
if isinstance(obj.index, ABCMultiIndex):
index_label = []
for i, name in enumerate(obj.index.names):
if name is None:
name = ''
index_label.append(name)
else:
index_label = obj.index.name
if index_label is None:
index_label = ['']
else:
index_label = [index_label]
elif not isinstance(index_label,
(list, tuple, np.ndarray, ABCIndexClass)):
# given a string for a DF with Index
index_label = [index_label]
encoded_labels = list(index_label)
else:
encoded_labels = []
if not has_mi_columns or has_aliases:
encoded_labels += list(write_cols)
writer.writerow(encoded_labels)
else:
# write out the mi
columns = obj.columns
# write out the names for each level, then ALL of the values for
# each level
for i in range(columns.nlevels):
# we need at least 1 index column to write our col names
col_line = []
if self.index:
# name is the first column
col_line.append(columns.names[i])
if isinstance(index_label, list) and len(index_label) > 1:
col_line.extend([''] * (len(index_label) - 1))
col_line.extend(columns._get_level_values(i))
writer.writerow(col_line)
# Write out the index line if it's not empty.
# Otherwise, we will print out an extraneous
# blank line between the mi and the data rows.
if encoded_labels and set(encoded_labels) != set(['']):
encoded_labels.extend([''] * len(columns))
writer.writerow(encoded_labels)
def _save(self):
self._save_header()
nrows = len(self.data_index)
# write in chunksize bites
chunksize = self.chunksize
chunks = int(nrows / chunksize) + 1
for i in range(chunks):
start_i = i * chunksize
end_i = min((i + 1) * chunksize, nrows)
if start_i >= end_i:
break
self._save_chunk(start_i, end_i)
def _save_chunk(self, start_i, end_i):
data_index = self.data_index
# create the data for a chunk
slicer = slice(start_i, end_i)
for i in range(len(self.blocks)):
b = self.blocks[i]
d = b.to_native_types(slicer=slicer, na_rep=self.na_rep,
float_format=self.float_format,
decimal=self.decimal,
date_format=self.date_format,
quoting=self.quoting)
for col_loc, col in zip(b.mgr_locs, d):
# self.data is a preallocated list
self.data[col_loc] = col
ix = data_index.to_native_types(slicer=slicer, na_rep=self.na_rep,
float_format=self.float_format,
decimal=self.decimal,
date_format=self.date_format,
quoting=self.quoting)
libwriters.write_csv_rows(self.data, ix, self.nlevels,
self.cols, self.writer)
|
# -*- coding: utf-8 -*-
"""
Module for formatting output data into CSV files.
"""
from __future__ import print_function
import warnings
import csv as csvlib
from zipfile import ZipFile
import numpy as np
from pandas._libs import writers as libwriters
from pandas import compat
from pandas.compat import StringIO, range, zip
from pandas.core.dtypes.missing import notna
from pandas.core.dtypes.generic import (
ABCMultiIndex, ABCPeriodIndex, ABCDatetimeIndex, ABCIndexClass)
from pandas.io.common import (_get_handle, UnicodeWriter, _expand_user,
_stringify_path)
class CSVFormatter(object):
def __init__(self, obj, path_or_buf=None, sep=",", na_rep='',
float_format=None, cols=None, header=True, index=True,
index_label=None, mode='w', nanRep=None, encoding=None,
compression=None, quoting=None, line_terminator='\n',
chunksize=None, tupleize_cols=False, quotechar='"',
date_format=None, doublequote=True, escapechar=None,
decimal='.'):
self.obj = obj
if path_or_buf is None:
path_or_buf = StringIO()
self.path_or_buf = _expand_user(_stringify_path(path_or_buf))
self.sep = sep
self.na_rep = na_rep
self.float_format = float_format
self.decimal = decimal
self.header = header
self.index = index
self.index_label = index_label
self.mode = mode
self.encoding = encoding
self.compression = compression
if quoting is None:
quoting = csvlib.QUOTE_MINIMAL
self.quoting = quoting
if quoting == csvlib.QUOTE_NONE:
# prevents crash in _csv
quotechar = None
self.quotechar = quotechar
self.doublequote = doublequote
self.escapechar = escapechar
self.line_terminator = line_terminator
self.date_format = date_format
self.tupleize_cols = tupleize_cols
self.has_mi_columns = (isinstance(obj.columns, ABCMultiIndex) and
not self.tupleize_cols)
# validate mi options
if self.has_mi_columns:
if cols is not None:
raise TypeError("cannot specify cols with a MultiIndex on the "
"columns")
if cols is not None:
if isinstance(cols, ABCIndexClass):
cols = cols.to_native_types(na_rep=na_rep,
float_format=float_format,
date_format=date_format,
quoting=self.quoting)
else:
cols = list(cols)
self.obj = self.obj.loc[:, cols]
# update columns to include possible multiplicity of dupes
# and make sure sure cols is just a list of labels
cols = self.obj.columns
if isinstance(cols, ABCIndexClass):
cols = cols.to_native_types(na_rep=na_rep,
float_format=float_format,
date_format=date_format,
quoting=self.quoting)
else:
cols = list(cols)
# save it
self.cols = cols
# preallocate data 2d list
self.blocks = self.obj._data.blocks
ncols = sum(b.shape[0] for b in self.blocks)
self.data = [None] * ncols
if chunksize is None:
chunksize = (100000 // (len(self.cols) or 1)) or 1
self.chunksize = int(chunksize)
self.data_index = obj.index
if (isinstance(self.data_index, (ABCDatetimeIndex, ABCPeriodIndex)) and
date_format is not None):
from pandas import Index
self.data_index = Index([x.strftime(date_format) if notna(x) else
'' for x in self.data_index])
self.nlevels = getattr(self.data_index, 'nlevels', 1)
if not index:
self.nlevels = 0
def save(self):
# create the writer & save
if self.encoding is None:
if compat.PY2:
encoding = 'ascii'
else:
encoding = 'utf-8'
else:
encoding = self.encoding
# GH 21227 internal compression is not used when file-like passed.
if self.compression and hasattr(self.path_or_buf, 'write'):
msg = ("compression has no effect when passing file-like "
"object as input.")
warnings.warn(msg, RuntimeWarning, stacklevel=2)
# when zip compression is called.
is_zip = isinstance(self.path_or_buf, ZipFile) or (
not hasattr(self.path_or_buf, 'write')
and self.compression == 'zip')
if is_zip:
# zipfile doesn't support writing string to archive. uses string
# buffer to receive csv writing and dump into zip compression
# file handle. GH 21241, 21118
f = StringIO()
close = False
elif hasattr(self.path_or_buf, 'write'):
f = self.path_or_buf
close = False
else:
f, handles = _get_handle(self.path_or_buf, self.mode,
encoding=encoding,
compression=self.compression)
close = True
try:
writer_kwargs = dict(lineterminator=self.line_terminator,
delimiter=self.sep, quoting=self.quoting,
doublequote=self.doublequote,
escapechar=self.escapechar,
quotechar=self.quotechar)
if encoding == 'ascii':
self.writer = csvlib.writer(f, **writer_kwargs)
else:
writer_kwargs['encoding'] = encoding
self.writer = UnicodeWriter(f, **writer_kwargs)
self._save()
finally:
if is_zip:
# GH 17778 handles zip compression separately.
buf = f.getvalue()
if hasattr(self.path_or_buf, 'write'):
self.path_or_buf.write(buf)
else:
f, handles = _get_handle(self.path_or_buf, self.mode,
encoding=encoding,
compression=self.compression)
f.write(buf)
close = True
if close:
f.close()
for _fh in handles:
_fh.close()
def _save_header(self):
writer = self.writer
obj = self.obj
index_label = self.index_label
cols = self.cols
has_mi_columns = self.has_mi_columns
header = self.header
encoded_labels = []
has_aliases = isinstance(header, (tuple, list, np.ndarray,
ABCIndexClass))
if not (has_aliases or self.header):
return
if has_aliases:
if len(header) != len(cols):
raise ValueError(('Writing {ncols} cols but got {nalias} '
'aliases'.format(ncols=len(cols),
nalias=len(header))))
else:
write_cols = header
else:
write_cols = cols
if self.index:
# should write something for index label
if index_label is not False:
if index_label is None:
if isinstance(obj.index, ABCMultiIndex):
index_label = []
for i, name in enumerate(obj.index.names):
if name is None:
name = ''
index_label.append(name)
else:
index_label = obj.index.name
if index_label is None:
index_label = ['']
else:
index_label = [index_label]
elif not isinstance(index_label,
(list, tuple, np.ndarray, ABCIndexClass)):
# given a string for a DF with Index
index_label = [index_label]
encoded_labels = list(index_label)
else:
encoded_labels = []
if not has_mi_columns or has_aliases:
encoded_labels += list(write_cols)
writer.writerow(encoded_labels)
else:
# write out the mi
columns = obj.columns
# write out the names for each level, then ALL of the values for
# each level
for i in range(columns.nlevels):
# we need at least 1 index column to write our col names
col_line = []
if self.index:
# name is the first column
col_line.append(columns.names[i])
if isinstance(index_label, list) and len(index_label) > 1:
col_line.extend([''] * (len(index_label) - 1))
col_line.extend(columns._get_level_values(i))
writer.writerow(col_line)
# Write out the index line if it's not empty.
# Otherwise, we will print out an extraneous
# blank line between the mi and the data rows.
if encoded_labels and set(encoded_labels) != set(['']):
encoded_labels.extend([''] * len(columns))
writer.writerow(encoded_labels)
def _save(self):
self._save_header()
nrows = len(self.data_index)
# write in chunksize bites
chunksize = self.chunksize
chunks = int(nrows / chunksize) + 1
for i in range(chunks):
start_i = i * chunksize
end_i = min((i + 1) * chunksize, nrows)
if start_i >= end_i:
break
self._save_chunk(start_i, end_i)
def _save_chunk(self, start_i, end_i):
data_index = self.data_index
# create the data for a chunk
slicer = slice(start_i, end_i)
for i in range(len(self.blocks)):
b = self.blocks[i]
d = b.to_native_types(slicer=slicer, na_rep=self.na_rep,
float_format=self.float_format,
decimal=self.decimal,
date_format=self.date_format,
quoting=self.quoting)
for col_loc, col in zip(b.mgr_locs, d):
# self.data is a preallocated list
self.data[col_loc] = col
ix = data_index.to_native_types(slicer=slicer, na_rep=self.na_rep,
float_format=self.float_format,
decimal=self.decimal,
date_format=self.date_format,
quoting=self.quoting)
libwriters.write_csv_rows(self.data, ix, self.nlevels,
self.cols, self.writer)
|
en
| 0.815127
|
# -*- coding: utf-8 -*- Module for formatting output data into CSV files. # prevents crash in _csv # validate mi options # update columns to include possible multiplicity of dupes # and make sure sure cols is just a list of labels # save it # preallocate data 2d list # create the writer & save # GH 21227 internal compression is not used when file-like passed. # when zip compression is called. # zipfile doesn't support writing string to archive. uses string # buffer to receive csv writing and dump into zip compression # file handle. GH 21241, 21118 # GH 17778 handles zip compression separately. # should write something for index label # given a string for a DF with Index # write out the mi # write out the names for each level, then ALL of the values for # each level # we need at least 1 index column to write our col names # name is the first column # Write out the index line if it's not empty. # Otherwise, we will print out an extraneous # blank line between the mi and the data rows. # write in chunksize bites # create the data for a chunk # self.data is a preallocated list
| 2.706453
| 3
|
cycli/buffer.py
|
erayon/cycli
| 290
|
6626881
|
from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class UserWantsOut(Exception):
pass
class CypherBuffer(Buffer):
def __init__(self, *args, **kwargs):
@Condition
def is_multiline():
return not self.user_wants_out(self.document.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline, **kwargs)
def user_wants_out(self, text):
return any([
text.endswith(";"),
text.endswith("\n"),
text == "quit",
text == "exit",
text == "help",
text == "refresh",
text == "schema",
text == "schema-constraints",
text == "schema-indexes",
text == "schema-labels",
text == "schema-rels",
text.startswith("env"),
text.startswith("export ")
])
|
from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class UserWantsOut(Exception):
pass
class CypherBuffer(Buffer):
def __init__(self, *args, **kwargs):
@Condition
def is_multiline():
return not self.user_wants_out(self.document.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline, **kwargs)
def user_wants_out(self, text):
return any([
text.endswith(";"),
text.endswith("\n"),
text == "quit",
text == "exit",
text == "help",
text == "refresh",
text == "schema",
text == "schema-constraints",
text == "schema-indexes",
text == "schema-labels",
text == "schema-rels",
text.startswith("env"),
text.startswith("export ")
])
|
none
| 1
| 2.426115
| 2
|
|
src/attributes/osm.py
|
Jugendhackt/SMArt
| 4
|
6626882
|
<reponame>Jugendhackt/SMArt
import datetime
from math import radians, cos, sin, asin, sqrt
import requests
class OSM:
def __init__(self):
self.OVERPASS_URL = "https://lz4.overpass-api.de/api/interpreter"
self.OSM_ID = 0
def get_data(self, attribute: str, req_time: int, lat: float, lon: float, distance: int) -> dict:
# self.check_date('timestring', req_time//1000)
lat = float(lat)
lon = float(lon)
d = 2 # km
lat1 = lat + d * (1 / 110.574)
lon1 = lon + d * (1 / (111.320 * cos(lat)))
lat0 = lat - d * (1 / 110.574)
lon0 = lon - d * (1 / (111.320 * cos(lat)))
print(lat0, lon0, lat1, lon1)
overpass_query = f'''
[out:json][timeout:25];
(
node["shop"="{OSM.get_parent_class(attribute)}"](48.289,9.803,48.474,10.26);
);
out body;
'''
response = requests.post(self.OVERPASS_URL, {'data': overpass_query})
results = response.json()["elements"]
for result in results:
if isinstance(result["lon"], float) and isinstance(result["lat"], float):
result["lng"] = result["lon"]
result["distance"] = self.haversine(lon, lat, result["lon"], result["lat"])
if result["distance"] <= distance:
del result
else:
del result
results.sort(key=lambda x: x["distance"], reverse=False)
return results
@staticmethod
def check_date(date_string: str, req_time: int) -> bool:
date = datetime.datetime.fromtimestamp(req_time)
days = ['Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa', 'Su']
opening_entries = date_string.split(';')
for entry in opening_entries:
split_days, split_hours = entry.split(' ')
if len(split_days) > 2:
d1, d2 = split_days.split('-')
return True
else:
return False
@staticmethod
def haversine(lon0, lat0, lon1, lat1):
lon0, lat0, lon1, lat1 = map(radians, [lon0, lat0, lon1, lat1])
dlon = lon1 - lon0
dlat = lat1 - lat0
a = sin(dlat / 2) ** 2 + cos(lat0) * cos(lat1) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 6371
return c * r
@staticmethod
def get_parent_class(attribute):
return {
"bread": "bakery",
"banana": "supermarket",
"charging_cable": "mobile_phone",
}[attribute.lower()]
|
import datetime
from math import radians, cos, sin, asin, sqrt
import requests
class OSM:
def __init__(self):
self.OVERPASS_URL = "https://lz4.overpass-api.de/api/interpreter"
self.OSM_ID = 0
def get_data(self, attribute: str, req_time: int, lat: float, lon: float, distance: int) -> dict:
# self.check_date('timestring', req_time//1000)
lat = float(lat)
lon = float(lon)
d = 2 # km
lat1 = lat + d * (1 / 110.574)
lon1 = lon + d * (1 / (111.320 * cos(lat)))
lat0 = lat - d * (1 / 110.574)
lon0 = lon - d * (1 / (111.320 * cos(lat)))
print(lat0, lon0, lat1, lon1)
overpass_query = f'''
[out:json][timeout:25];
(
node["shop"="{OSM.get_parent_class(attribute)}"](48.289,9.803,48.474,10.26);
);
out body;
'''
response = requests.post(self.OVERPASS_URL, {'data': overpass_query})
results = response.json()["elements"]
for result in results:
if isinstance(result["lon"], float) and isinstance(result["lat"], float):
result["lng"] = result["lon"]
result["distance"] = self.haversine(lon, lat, result["lon"], result["lat"])
if result["distance"] <= distance:
del result
else:
del result
results.sort(key=lambda x: x["distance"], reverse=False)
return results
@staticmethod
def check_date(date_string: str, req_time: int) -> bool:
date = datetime.datetime.fromtimestamp(req_time)
days = ['Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa', 'Su']
opening_entries = date_string.split(';')
for entry in opening_entries:
split_days, split_hours = entry.split(' ')
if len(split_days) > 2:
d1, d2 = split_days.split('-')
return True
else:
return False
@staticmethod
def haversine(lon0, lat0, lon1, lat1):
lon0, lat0, lon1, lat1 = map(radians, [lon0, lat0, lon1, lat1])
dlon = lon1 - lon0
dlat = lat1 - lat0
a = sin(dlat / 2) ** 2 + cos(lat0) * cos(lat1) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 6371
return c * r
@staticmethod
def get_parent_class(attribute):
return {
"bread": "bakery",
"banana": "supermarket",
"charging_cable": "mobile_phone",
}[attribute.lower()]
|
en
| 0.282085
|
# self.check_date('timestring', req_time//1000) # km [out:json][timeout:25]; ( node["shop"="{OSM.get_parent_class(attribute)}"](48.289,9.803,48.474,10.26); ); out body;
| 3.157907
| 3
|
interp_Pwhile.py
|
IUCompilerCourse/python-student-support-code
| 3
|
6626883
|
<reponame>IUCompilerCourse/python-student-support-code
from ast import *
from interp_Pif import InterpPif
from utils import *
class InterpPwhile(InterpPif):
def interp_stmts(self, ss, env):
if len(ss) == 0:
return
match ss[0]:
case While(test, body, []):
while self.interp_exp(test, env):
self.interp_stmts(body, env)
return self.interp_stmts(ss[1:], env)
case _:
return super().interp_stmts(ss, env)
|
from ast import *
from interp_Pif import InterpPif
from utils import *
class InterpPwhile(InterpPif):
def interp_stmts(self, ss, env):
if len(ss) == 0:
return
match ss[0]:
case While(test, body, []):
while self.interp_exp(test, env):
self.interp_stmts(body, env)
return self.interp_stmts(ss[1:], env)
case _:
return super().interp_stmts(ss, env)
|
none
| 1
| 2.365899
| 2
|
|
src/__init__.py
|
JordanRex/YAAML
| 1
|
6626884
|
# yaaml __init__.py
__version__ = '0.0.5'
__author__ = 'varunrajan'
__name__ = 'yaaml'
__org__ = '...'
# importing packages
import os as os
# the base packages
import collections # for the Counter function
import csv # for reading/writing csv files
import pandas as pd, numpy as np, time as time, gc as gc, bisect as bisect, re as re
import datetime as dt
# Evaluation of the model
from sklearn import model_selection
from sklearn.model_selection import KFold, StratifiedKFold
from sklearn.metrics import roc_auc_score, recall_score, precision_score, accuracy_score, confusion_matrix, f1_score
from datetime import timedelta
from sklearn.model_selection import train_test_split, cross_val_score, StratifiedKFold, StratifiedShuffleSplit
from sklearn import metrics, preprocessing
from sklearn.base import TransformerMixin
from sklearn.utils import shuffle
from sklearn.preprocessing import LabelEncoder
# hyperopt modules
from bayes_opt import BayesianOptimization
from tqdm import tqdm
from hyperopt import hp, tpe, STATUS_OK, fmin, Trials, space_eval
from hyperopt.fmin import fmin
from hyperopt.pyll.stochastic import sample
# modelling/clustering algorithms
# import xgboost as xgb
# import lightgbm as lgb
# from sklearn.covariance import EllipticEnvelope
# from sklearn.ensemble import IsolationForest
# from sklearn.svm import OneClassSVM
# main modules from root same directory
import helper_funcs as helpers
import miss_imputation as missimp
import encoding as encoders
import feature_engineering as feateng
import feature_selection as featsel
from sampling import sampler
# call the main script
import main
|
# yaaml __init__.py
__version__ = '0.0.5'
__author__ = 'varunrajan'
__name__ = 'yaaml'
__org__ = '...'
# importing packages
import os as os
# the base packages
import collections # for the Counter function
import csv # for reading/writing csv files
import pandas as pd, numpy as np, time as time, gc as gc, bisect as bisect, re as re
import datetime as dt
# Evaluation of the model
from sklearn import model_selection
from sklearn.model_selection import KFold, StratifiedKFold
from sklearn.metrics import roc_auc_score, recall_score, precision_score, accuracy_score, confusion_matrix, f1_score
from datetime import timedelta
from sklearn.model_selection import train_test_split, cross_val_score, StratifiedKFold, StratifiedShuffleSplit
from sklearn import metrics, preprocessing
from sklearn.base import TransformerMixin
from sklearn.utils import shuffle
from sklearn.preprocessing import LabelEncoder
# hyperopt modules
from bayes_opt import BayesianOptimization
from tqdm import tqdm
from hyperopt import hp, tpe, STATUS_OK, fmin, Trials, space_eval
from hyperopt.fmin import fmin
from hyperopt.pyll.stochastic import sample
# modelling/clustering algorithms
# import xgboost as xgb
# import lightgbm as lgb
# from sklearn.covariance import EllipticEnvelope
# from sklearn.ensemble import IsolationForest
# from sklearn.svm import OneClassSVM
# main modules from root same directory
import helper_funcs as helpers
import miss_imputation as missimp
import encoding as encoders
import feature_engineering as feateng
import feature_selection as featsel
from sampling import sampler
# call the main script
import main
|
en
| 0.628452
|
# yaaml __init__.py # importing packages # the base packages # for the Counter function # for reading/writing csv files # Evaluation of the model # hyperopt modules # modelling/clustering algorithms # import xgboost as xgb # import lightgbm as lgb # from sklearn.covariance import EllipticEnvelope # from sklearn.ensemble import IsolationForest # from sklearn.svm import OneClassSVM # main modules from root same directory # call the main script
| 1.906024
| 2
|
gladier_kanzus/tools/dials_stills.py
|
globus-gladier/kanzus_client
| 0
|
6626885
|
from gladier import GladierBaseTool, generate_flow_definition
def stills_process(**data):
import os
import subprocess
from subprocess import PIPE
proc_dir = data['proc_dir']
data_dir = data['data_dir']
input_files = data['input_files']
run_num = data['input_files'].split("_")[-2]
if 'suffix' in data:
phil_name = f"{proc_dir}/process_{run_num}_{data['suffix']}.phil"
else:
phil_name = f"{proc_dir}/process_{run_num}.phil"
file_end = data['input_range'].split("..")[-1]
if not "timeout" in data:
data["timeout"] = 1200
dials_path = data.get('dials_path','')
cmd = f'source {dials_path}/dials_env.sh && timeout {data["timeout"]} dials.stills_process {phil_name} {data_dir}/{input_files} > log-{file_end}.txt'
os.chdir(proc_dir) ##Need to guarantee the worker is at the correct location..
res = subprocess.run(cmd, stdout=PIPE, stderr=PIPE,
shell=True, executable='/bin/bash')
return cmd, str(res.stdout)
@generate_flow_definition()
class DialsStills(GladierBaseTool):
funcx_functions = [stills_process]
|
from gladier import GladierBaseTool, generate_flow_definition
def stills_process(**data):
import os
import subprocess
from subprocess import PIPE
proc_dir = data['proc_dir']
data_dir = data['data_dir']
input_files = data['input_files']
run_num = data['input_files'].split("_")[-2]
if 'suffix' in data:
phil_name = f"{proc_dir}/process_{run_num}_{data['suffix']}.phil"
else:
phil_name = f"{proc_dir}/process_{run_num}.phil"
file_end = data['input_range'].split("..")[-1]
if not "timeout" in data:
data["timeout"] = 1200
dials_path = data.get('dials_path','')
cmd = f'source {dials_path}/dials_env.sh && timeout {data["timeout"]} dials.stills_process {phil_name} {data_dir}/{input_files} > log-{file_end}.txt'
os.chdir(proc_dir) ##Need to guarantee the worker is at the correct location..
res = subprocess.run(cmd, stdout=PIPE, stderr=PIPE,
shell=True, executable='/bin/bash')
return cmd, str(res.stdout)
@generate_flow_definition()
class DialsStills(GladierBaseTool):
funcx_functions = [stills_process]
|
en
| 0.958943
|
##Need to guarantee the worker is at the correct location..
| 2.250767
| 2
|
scripts/sell_unused_slots.py
|
DataBiosphere/azul
| 17
|
6626886
|
<gh_stars>10-100
"""
Delete BigQuery reservation resources if no ongoing reindex is detected.
"""
import argparse
from datetime import (
datetime,
timedelta,
)
import sys
import time
from typing import (
Dict,
FrozenSet,
Iterable,
List,
)
from azul import (
RequirementError,
cached_property,
config,
logging,
)
from azul.args import (
AzulArgumentHelpFormatter,
)
from azul.bigquery_reservation import (
BigQueryReservation,
)
from azul.deployment import (
aws,
)
from azul.logging import (
configure_script_logging,
)
from azul.modules import (
load_app_module,
)
log = logging.getLogger(__name__)
class ReindexDetector:
# Minutes
interval = 5
# Maximum number of contribution Lambda invocations per interval for a
# reindexing to be considered inactive
threshold = 0
@cached_property
def _cloudwatch(self):
return aws.client('cloudwatch')
@cached_property
def _lambda(self):
return aws.client('lambda')
def is_reindex_active(self) -> bool:
functions = self._list_contribution_lambda_functions()
reindex_active = False
for (function,
num_invocations) in self._lambda_invocation_counts(functions).items():
description = (f'{function}: {num_invocations} invocations in '
f'the last {self.interval} minutes')
if num_invocations > self.threshold:
log.info(f'Active reindex for {description}')
reindex_active = True
# Keep looping to log status of remaining lambdas
else:
log.debug(f'No active reindex for {description}')
return reindex_active
def _list_contribution_lambda_functions(self) -> List[str]:
"""
Search Lambda functions for the names of contribution Lambdas.
"""
contribution_lambdas = []
paginator = self._lambda.get_paginator('list_functions')
for response in paginator.paginate():
for function in response['Functions']:
function_name = function['FunctionName']
if self._is_contribution_lambda(function_name):
contribution_lambdas.append(function_name)
return contribution_lambdas
@cached_property
def _contribution_lambda_names(self) -> FrozenSet[str]:
indexer = load_app_module('indexer')
return frozenset((
indexer.contribute.name,
indexer.contribute_retry.name
))
def _is_contribution_lambda(self, function_name: str) -> bool:
for lambda_name in self._contribution_lambda_names:
try:
# FIXME: Eliminate hardcoded separator
# https://github.com/databiosphere/azul/issues/2964
resource_name, _ = config.unqualified_resource_name(function_name,
suffix='-' + lambda_name)
except RequirementError:
pass
else:
if resource_name == 'indexer':
return True
return False
def _lambda_invocation_counts(self, function_names: Iterable[str]) -> Dict[str, int]:
end = datetime.utcnow()
start = end - timedelta(minutes=self.interval)
response = self._cloudwatch.get_metric_data(
MetricDataQueries=[
{
'Id': f'invocation_count_{i}',
'Label': function_name,
'MetricStat': {
'Metric': {
'Namespace': 'AWS/Lambda',
'MetricName': 'Invocations',
'Dimensions': [{
'Name': 'FunctionName',
'Value': function_name
}]
},
'Period': 60 * self.interval,
'Stat': 'Sum'
}
}
for i, function_name in enumerate(function_names)
],
StartTime=start,
EndTime=end,
)
return {m['Label']: sum(m['Values']) for m in response['MetricDataResults']}
def main(argv):
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=AzulArgumentHelpFormatter,
add_help=True)
parser.add_argument('--dry-run',
action='store_true',
help='Report status without altering resources')
args = parser.parse_args(argv)
# Listing BigQuery reservations is quicker than checking for an active
# reindex, hence the order of checks
reservation = BigQueryReservation(dry_run=args.dry_run)
is_active = reservation.is_active
if is_active is False:
log.info('No slots are currently reserved.')
elif is_active is True:
min_reservation_age = 30 * 60
reservation_age = time.time() - reservation.update_time
assert reservation_age > 0, reservation_age
if reservation_age < min_reservation_age:
# Avoid race with recently started reindexing
log.info('Reservation was updated %r < %r seconds ago; '
'taking no action.', reservation_age, min_reservation_age)
else:
monitor = ReindexDetector()
# FIXME: BigQuery slot management assumes all slots are in the same region
# https://github.com/DataBiosphere/azul/issues/3454
if not monitor.is_reindex_active():
reservation.deactivate()
elif is_active is None:
log.warning('BigQuery slot commitment state is inconsistent. '
'Dangling resources will be deleted.')
reservation.deactivate()
else:
assert False
if __name__ == '__main__':
configure_script_logging(log)
main(sys.argv[1:])
|
"""
Delete BigQuery reservation resources if no ongoing reindex is detected.
"""
import argparse
from datetime import (
datetime,
timedelta,
)
import sys
import time
from typing import (
Dict,
FrozenSet,
Iterable,
List,
)
from azul import (
RequirementError,
cached_property,
config,
logging,
)
from azul.args import (
AzulArgumentHelpFormatter,
)
from azul.bigquery_reservation import (
BigQueryReservation,
)
from azul.deployment import (
aws,
)
from azul.logging import (
configure_script_logging,
)
from azul.modules import (
load_app_module,
)
log = logging.getLogger(__name__)
class ReindexDetector:
# Minutes
interval = 5
# Maximum number of contribution Lambda invocations per interval for a
# reindexing to be considered inactive
threshold = 0
@cached_property
def _cloudwatch(self):
return aws.client('cloudwatch')
@cached_property
def _lambda(self):
return aws.client('lambda')
def is_reindex_active(self) -> bool:
functions = self._list_contribution_lambda_functions()
reindex_active = False
for (function,
num_invocations) in self._lambda_invocation_counts(functions).items():
description = (f'{function}: {num_invocations} invocations in '
f'the last {self.interval} minutes')
if num_invocations > self.threshold:
log.info(f'Active reindex for {description}')
reindex_active = True
# Keep looping to log status of remaining lambdas
else:
log.debug(f'No active reindex for {description}')
return reindex_active
def _list_contribution_lambda_functions(self) -> List[str]:
"""
Search Lambda functions for the names of contribution Lambdas.
"""
contribution_lambdas = []
paginator = self._lambda.get_paginator('list_functions')
for response in paginator.paginate():
for function in response['Functions']:
function_name = function['FunctionName']
if self._is_contribution_lambda(function_name):
contribution_lambdas.append(function_name)
return contribution_lambdas
@cached_property
def _contribution_lambda_names(self) -> FrozenSet[str]:
indexer = load_app_module('indexer')
return frozenset((
indexer.contribute.name,
indexer.contribute_retry.name
))
def _is_contribution_lambda(self, function_name: str) -> bool:
for lambda_name in self._contribution_lambda_names:
try:
# FIXME: Eliminate hardcoded separator
# https://github.com/databiosphere/azul/issues/2964
resource_name, _ = config.unqualified_resource_name(function_name,
suffix='-' + lambda_name)
except RequirementError:
pass
else:
if resource_name == 'indexer':
return True
return False
def _lambda_invocation_counts(self, function_names: Iterable[str]) -> Dict[str, int]:
end = datetime.utcnow()
start = end - timedelta(minutes=self.interval)
response = self._cloudwatch.get_metric_data(
MetricDataQueries=[
{
'Id': f'invocation_count_{i}',
'Label': function_name,
'MetricStat': {
'Metric': {
'Namespace': 'AWS/Lambda',
'MetricName': 'Invocations',
'Dimensions': [{
'Name': 'FunctionName',
'Value': function_name
}]
},
'Period': 60 * self.interval,
'Stat': 'Sum'
}
}
for i, function_name in enumerate(function_names)
],
StartTime=start,
EndTime=end,
)
return {m['Label']: sum(m['Values']) for m in response['MetricDataResults']}
def main(argv):
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=AzulArgumentHelpFormatter,
add_help=True)
parser.add_argument('--dry-run',
action='store_true',
help='Report status without altering resources')
args = parser.parse_args(argv)
# Listing BigQuery reservations is quicker than checking for an active
# reindex, hence the order of checks
reservation = BigQueryReservation(dry_run=args.dry_run)
is_active = reservation.is_active
if is_active is False:
log.info('No slots are currently reserved.')
elif is_active is True:
min_reservation_age = 30 * 60
reservation_age = time.time() - reservation.update_time
assert reservation_age > 0, reservation_age
if reservation_age < min_reservation_age:
# Avoid race with recently started reindexing
log.info('Reservation was updated %r < %r seconds ago; '
'taking no action.', reservation_age, min_reservation_age)
else:
monitor = ReindexDetector()
# FIXME: BigQuery slot management assumes all slots are in the same region
# https://github.com/DataBiosphere/azul/issues/3454
if not monitor.is_reindex_active():
reservation.deactivate()
elif is_active is None:
log.warning('BigQuery slot commitment state is inconsistent. '
'Dangling resources will be deleted.')
reservation.deactivate()
else:
assert False
if __name__ == '__main__':
configure_script_logging(log)
main(sys.argv[1:])
|
en
| 0.77516
|
Delete BigQuery reservation resources if no ongoing reindex is detected. # Minutes # Maximum number of contribution Lambda invocations per interval for a # reindexing to be considered inactive # Keep looping to log status of remaining lambdas Search Lambda functions for the names of contribution Lambdas. # FIXME: Eliminate hardcoded separator # https://github.com/databiosphere/azul/issues/2964 # Listing BigQuery reservations is quicker than checking for an active # reindex, hence the order of checks # Avoid race with recently started reindexing # FIXME: BigQuery slot management assumes all slots are in the same region # https://github.com/DataBiosphere/azul/issues/3454
| 2.157458
| 2
|
gtfspy/routing/test/test_forward_journey.py
|
Leo-Ryu/gtfspy
| 118
|
6626887
|
<gh_stars>100-1000
import unittest
from gtfspy.routing.forwardjourney import ForwardJourney
from gtfspy.routing.connection import Connection
class ForwardJourneyTest(unittest.TestCase):
def test_add_leg(self):
journey = ForwardJourney()
leg1 = Connection(departure_stop=0, arrival_stop=1, departure_time=0, arrival_time=1,
trip_id="tripI", seq=1, is_walk=False)
journey.add_leg(leg1)
self.assertEqual(len(journey.legs), 1)
self.assertEqual(journey.departure_time, leg1.departure_time)
self.assertEqual(journey.arrival_time, leg1.arrival_time)
self.assertEqual(journey.n_boardings, 1)
leg2 = Connection(departure_stop=1, arrival_stop=2, departure_time=1, arrival_time=2,
trip_id="tripI", seq=1, is_walk=False)
journey.add_leg(leg2)
self.assertEqual(len(journey.legs), 2)
self.assertEqual(journey.departure_time, leg1.departure_time)
self.assertEqual(journey.arrival_time, leg2.arrival_time)
self.assertEqual(journey.n_boardings, 1)
def test_dominates(self):
leg1 = Connection(departure_stop=0, arrival_stop=1, departure_time=0, arrival_time=1,
trip_id="tripI", seq=1, is_walk=False)
leg2 = Connection(departure_stop=1, arrival_stop=2, departure_time=1, arrival_time=2,
trip_id="tripI", seq=1, is_walk=False)
leg3 = Connection(departure_stop=1, arrival_stop=2, departure_time=1, arrival_time=3,
trip_id="tripI", seq=1, is_walk=False)
journey1 = ForwardJourney(legs=[leg1])
journey2 = ForwardJourney(legs=[leg2])
journey12 = ForwardJourney(legs=[leg1, leg2])
journey13 = ForwardJourney(legs=[leg1, leg3])
self.assertTrue(journey12.dominates(journey13))
self.assertFalse(journey1.dominates(journey2))
self.assertTrue(journey1.dominates(journey1, consider_time=False, consider_boardings=False))
def test_basics(self):
event_list_raw_data = [
(0, 1, 0, 10, "trip_1", 1),
(1, 100, 32, 36, "trip_5", 1),
(100, 3, 36, 40, "trip_5", 2),
(3, 4, 40, 41, "trip_4", 1),
(4, 2, 44, 50, None, 1)
]
legs = list(map(lambda el: Connection(*el), event_list_raw_data))
test_journey = ForwardJourney(legs)
self.assertIsInstance(test_journey, ForwardJourney)
self.assertIsInstance(test_journey.get_legs(), list)
self.assertEqual(test_journey.n_boardings, 3)
self.assertEqual(test_journey.get_transfers(), 2)
self.assertEqual(test_journey.get_travel_time(), 50)
self.assertIsInstance(test_journey.get_waiting_times(), list)
self.assertEqual(test_journey.get_total_waiting_time(), 22 + 0 + 3)
self.assertEqual(len(test_journey.get_all_stops()), 6)
def test_transfer_stop_pairs(self):
event_list_raw_data = [
(0, 1, 0, 10, "trip_1", 1),
(1, 100, 32, 36, "trip_5", 1),
(100, 3, 36, 40, "trip_5", 2),
(3, 4, 40, 41, "trip_4", 1),
(4, 2, 44, 50, None, 1),
(10, 11, 52, 55, "trip_6", 1)
]
legs = list(map(lambda el: Connection(*el), event_list_raw_data))
test_journey = ForwardJourney(legs)
transfer_stop_pairs = test_journey.get_transfer_stop_pairs()
print(transfer_stop_pairs)
self.assertEqual(len(transfer_stop_pairs), 3)
self.assertEqual(transfer_stop_pairs[0][0], 1)
self.assertEqual(transfer_stop_pairs[0][1], 1)
self.assertEqual(transfer_stop_pairs[1][0], 3)
self.assertEqual(transfer_stop_pairs[1][1], 3)
self.assertEqual(transfer_stop_pairs[2][0], 2)
self.assertEqual(transfer_stop_pairs[2][1], 10)
|
import unittest
from gtfspy.routing.forwardjourney import ForwardJourney
from gtfspy.routing.connection import Connection
class ForwardJourneyTest(unittest.TestCase):
def test_add_leg(self):
journey = ForwardJourney()
leg1 = Connection(departure_stop=0, arrival_stop=1, departure_time=0, arrival_time=1,
trip_id="tripI", seq=1, is_walk=False)
journey.add_leg(leg1)
self.assertEqual(len(journey.legs), 1)
self.assertEqual(journey.departure_time, leg1.departure_time)
self.assertEqual(journey.arrival_time, leg1.arrival_time)
self.assertEqual(journey.n_boardings, 1)
leg2 = Connection(departure_stop=1, arrival_stop=2, departure_time=1, arrival_time=2,
trip_id="tripI", seq=1, is_walk=False)
journey.add_leg(leg2)
self.assertEqual(len(journey.legs), 2)
self.assertEqual(journey.departure_time, leg1.departure_time)
self.assertEqual(journey.arrival_time, leg2.arrival_time)
self.assertEqual(journey.n_boardings, 1)
def test_dominates(self):
leg1 = Connection(departure_stop=0, arrival_stop=1, departure_time=0, arrival_time=1,
trip_id="tripI", seq=1, is_walk=False)
leg2 = Connection(departure_stop=1, arrival_stop=2, departure_time=1, arrival_time=2,
trip_id="tripI", seq=1, is_walk=False)
leg3 = Connection(departure_stop=1, arrival_stop=2, departure_time=1, arrival_time=3,
trip_id="tripI", seq=1, is_walk=False)
journey1 = ForwardJourney(legs=[leg1])
journey2 = ForwardJourney(legs=[leg2])
journey12 = ForwardJourney(legs=[leg1, leg2])
journey13 = ForwardJourney(legs=[leg1, leg3])
self.assertTrue(journey12.dominates(journey13))
self.assertFalse(journey1.dominates(journey2))
self.assertTrue(journey1.dominates(journey1, consider_time=False, consider_boardings=False))
def test_basics(self):
event_list_raw_data = [
(0, 1, 0, 10, "trip_1", 1),
(1, 100, 32, 36, "trip_5", 1),
(100, 3, 36, 40, "trip_5", 2),
(3, 4, 40, 41, "trip_4", 1),
(4, 2, 44, 50, None, 1)
]
legs = list(map(lambda el: Connection(*el), event_list_raw_data))
test_journey = ForwardJourney(legs)
self.assertIsInstance(test_journey, ForwardJourney)
self.assertIsInstance(test_journey.get_legs(), list)
self.assertEqual(test_journey.n_boardings, 3)
self.assertEqual(test_journey.get_transfers(), 2)
self.assertEqual(test_journey.get_travel_time(), 50)
self.assertIsInstance(test_journey.get_waiting_times(), list)
self.assertEqual(test_journey.get_total_waiting_time(), 22 + 0 + 3)
self.assertEqual(len(test_journey.get_all_stops()), 6)
def test_transfer_stop_pairs(self):
event_list_raw_data = [
(0, 1, 0, 10, "trip_1", 1),
(1, 100, 32, 36, "trip_5", 1),
(100, 3, 36, 40, "trip_5", 2),
(3, 4, 40, 41, "trip_4", 1),
(4, 2, 44, 50, None, 1),
(10, 11, 52, 55, "trip_6", 1)
]
legs = list(map(lambda el: Connection(*el), event_list_raw_data))
test_journey = ForwardJourney(legs)
transfer_stop_pairs = test_journey.get_transfer_stop_pairs()
print(transfer_stop_pairs)
self.assertEqual(len(transfer_stop_pairs), 3)
self.assertEqual(transfer_stop_pairs[0][0], 1)
self.assertEqual(transfer_stop_pairs[0][1], 1)
self.assertEqual(transfer_stop_pairs[1][0], 3)
self.assertEqual(transfer_stop_pairs[1][1], 3)
self.assertEqual(transfer_stop_pairs[2][0], 2)
self.assertEqual(transfer_stop_pairs[2][1], 10)
|
none
| 1
| 2.89887
| 3
|
|
Scripts/simulation/objects/gallery_tuning.py
|
velocist/TS4CheatsInfo
| 0
|
6626888
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\objects\gallery_tuning.py
# Compiled at: 2019-09-03 17:10:04
# Size of source mod 2**32: 1108 bytes
from sims4.tuning.tunable import TunableEnumEntry
import enum, tag
class GalleryGameplayTuning:
EXPORT_SAVE_DATA_TO_GALLERY_TAG = TunableEnumEntry(description='\n Reference to the tag used for marking objects that require their \n save data to be stored in the gallery.\n i.e. Craftables, books, etc.\n ',
tunable_type=(tag.Tag),
default=(tag.Tag.INVALID))
class ContentSource(enum.Int, export=False):
DEFAULT = 0
LIBRARY = 1
GALLERY = 2
HOUSEHOLD_INVENTORY_PROXY = 3
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\objects\gallery_tuning.py
# Compiled at: 2019-09-03 17:10:04
# Size of source mod 2**32: 1108 bytes
from sims4.tuning.tunable import TunableEnumEntry
import enum, tag
class GalleryGameplayTuning:
EXPORT_SAVE_DATA_TO_GALLERY_TAG = TunableEnumEntry(description='\n Reference to the tag used for marking objects that require their \n save data to be stored in the gallery.\n i.e. Craftables, books, etc.\n ',
tunable_type=(tag.Tag),
default=(tag.Tag.INVALID))
class ContentSource(enum.Int, export=False):
DEFAULT = 0
LIBRARY = 1
GALLERY = 2
HOUSEHOLD_INVENTORY_PROXY = 3
|
en
| 0.51459
|
# uncompyle6 version 3.7.4 # Python bytecode 3.7 (3394) # Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # Embedded file name: T:\InGame\Gameplay\Scripts\Server\objects\gallery_tuning.py # Compiled at: 2019-09-03 17:10:04 # Size of source mod 2**32: 1108 bytes
| 1.90311
| 2
|
src/app/db/models.py
|
pyronear/pyro-api
| 8
|
6626889
|
# Copyright (C) 2021, Pyronear contributors.
# This program is licensed under the Apache License version 2.
# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
import enum
from .session import Base
from sqlalchemy.sql import func
from sqlalchemy import Column, DateTime, Integer, Float, String, Enum, Boolean, ForeignKey, MetaData
from sqlalchemy.orm import relationship
class Users(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
login = Column(String(50), unique=True)
access_id = Column(Integer, ForeignKey("accesses.id", ondelete="CASCADE"), unique=True)
created_at = Column(DateTime, default=func.now())
access = relationship("Accesses", uselist=False, back_populates="user")
device = relationship("Devices", uselist=False, back_populates="owner")
def __repr__(self):
return f"<User(login='{self.login}', created_at='{self.created_at}'>"
class AccessType(str, enum.Enum):
user: str = 'user'
admin: str = 'admin'
device: str = 'device'
class Accesses(Base):
__tablename__ = "accesses"
id = Column(Integer, primary_key=True)
login = Column(String(50), unique=True, index=True) # index for fast lookup
hashed_password = Column(String(70), nullable=False)
scope = Column(Enum(AccessType), default=AccessType.user, nullable=False)
group_id = Column(Integer, ForeignKey("groups.id", ondelete="CASCADE"), nullable=False)
user = relationship("Users", uselist=False, back_populates="access")
device = relationship("Devices", uselist=False, back_populates="access")
group = relationship("Groups", uselist=False, back_populates="accesses")
def __repr__(self):
return f"<Access(login='{self.login}', scope='{self.scope}', group_id='{self.group_id}')>"
class Groups(Base):
__tablename__ = "groups"
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
accesses = relationship("Accesses", back_populates="group")
sites = relationship("Sites", back_populates="group")
def __repr__(self):
return f"<Group(name='{self.name}')>"
class SiteType(str, enum.Enum):
tower: str = 'tower'
station: str = 'station'
no_alert: str = 'no_alert'
class Sites(Base):
__tablename__ = "sites"
id = Column(Integer, primary_key=True)
name = Column(String(50))
group_id = Column(Integer, ForeignKey("groups.id", ondelete="CASCADE"), nullable=False)
lat = Column(Float(4, asdecimal=True))
lon = Column(Float(4, asdecimal=True))
country = Column(String(5), nullable=False)
geocode = Column(String(10), nullable=False)
type = Column(Enum(SiteType), default=SiteType.tower)
created_at = Column(DateTime, default=func.now())
installations = relationship("Installations", back_populates="site")
group = relationship("Groups", uselist=False, back_populates="sites")
def __repr__(self):
return (f"<Site(name='{self.name}', group_id='{self.group_id}', lat='{self.lat}', lon='{self.lon}', "
f"country='{self.country}', geocode='{self.geocode}', type='{self.type}')>")
class EventType(str, enum.Enum):
wildfire: str = 'wildfire'
class Events(Base):
__tablename__ = "events"
id = Column(Integer, primary_key=True)
lat = Column(Float(4, asdecimal=True))
lon = Column(Float(4, asdecimal=True))
type = Column(Enum(EventType), default=EventType.wildfire)
start_ts = Column(DateTime, default=None, nullable=True)
end_ts = Column(DateTime, default=None, nullable=True)
is_acknowledged = Column(Boolean, default=False)
created_at = Column(DateTime, default=func.now())
alerts = relationship("Alerts", back_populates="event")
def __repr__(self):
return (f"<Event(lat='{self.lat}', lon='{self.lon}', type='{self.type}', "
f"is_acknowledged='{self.is_acknowledged}')>")
# Linked tables
class Devices(Base):
__tablename__ = "devices"
id = Column(Integer, primary_key=True)
login = Column(String(50), unique=True)
owner_id = Column(Integer, ForeignKey("users.id"))
access_id = Column(Integer, ForeignKey("accesses.id", ondelete="CASCADE"), unique=True)
specs = Column(String(50))
software_hash = Column(String(16), default=None, nullable=True)
angle_of_view = Column(Float(2, asdecimal=True))
elevation = Column(Float(1, asdecimal=True), default=None, nullable=True)
lat = Column(Float(4, asdecimal=True), default=None, nullable=True)
lon = Column(Float(4, asdecimal=True), default=None, nullable=True)
yaw = Column(Float(1, asdecimal=True), default=None, nullable=True)
pitch = Column(Float(1, asdecimal=True), default=None, nullable=True)
last_ping = Column(DateTime, default=None, nullable=True)
created_at = Column(DateTime, default=func.now())
access = relationship("Accesses", uselist=False, back_populates="device")
owner = relationship("Users", uselist=False, back_populates="device")
media = relationship("Media", back_populates="device")
alerts = relationship("Alerts", back_populates="device")
installation = relationship("Installations", back_populates="device")
def __repr__(self):
return (f"<Device(login='{self.login}', owner_id='{self.owner_id}', access_id='{self.access_id}', "
f"specs='{self.specs}', software_hash='{self.software_hash}', last_ping='{self.last_ping}')>")
class MediaType(str, enum.Enum):
image: str = 'image'
video: str = 'video'
class Media(Base):
__tablename__ = "media"
id = Column(Integer, primary_key=True)
device_id = Column(Integer, ForeignKey("devices.id"))
bucket_key = Column(String(100), nullable=True)
type = Column(Enum(MediaType), default=MediaType.image)
created_at = Column(DateTime, default=func.now())
device = relationship("Devices", uselist=False, back_populates="media")
alerts = relationship("Alerts", back_populates="media")
def __repr__(self):
return f"<Media(device_id='{self.device_id}', bucket_key='{self.bucket_key}', type='{self.type}'>"
class Installations(Base):
__tablename__ = "installations"
id = Column(Integer, primary_key=True)
device_id = Column(Integer, ForeignKey("devices.id"))
site_id = Column(Integer, ForeignKey("sites.id"))
start_ts = Column(DateTime, nullable=False)
end_ts = Column(DateTime, default=None, nullable=True)
is_trustworthy = Column(Boolean, default=True)
created_at = Column(DateTime, default=func.now())
device = relationship("Devices", back_populates="installation")
site = relationship("Sites", back_populates="installations")
def __repr__(self):
return (f"<Installation(device_id='{self.device_id}', site_id='{self.site_id}', "
f"is_trustworthy='{self.is_trustworthy}'>")
class Alerts(Base):
__tablename__ = "alerts"
id = Column(Integer, primary_key=True)
device_id = Column(Integer, ForeignKey("devices.id"))
event_id = Column(Integer, ForeignKey("events.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=True)
media_id = Column(Integer, ForeignKey("media.id"), default=None)
azimuth = Column(Float(4, asdecimal=True), default=None)
lat = Column(Float(4, asdecimal=True))
lon = Column(Float(4, asdecimal=True))
created_at = Column(DateTime, default=func.now())
device = relationship("Devices", back_populates="alerts")
event = relationship("Events", back_populates="alerts")
media = relationship("Media", back_populates="alerts")
def __repr__(self):
return f"<Alert(device_id='{self.device_id}', event_id='{self.event_id}', media_id='{self.media_id}'>"
class Webhooks(Base):
__tablename__ = "webhooks"
id = Column(Integer, primary_key=True)
callback = Column(String(50), nullable=False)
url = Column(String(100), nullable=False)
def __repr__(self):
return f"<Webhook(callback='{self.callback}', url='{self.url}'>"
|
# Copyright (C) 2021, Pyronear contributors.
# This program is licensed under the Apache License version 2.
# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
import enum
from .session import Base
from sqlalchemy.sql import func
from sqlalchemy import Column, DateTime, Integer, Float, String, Enum, Boolean, ForeignKey, MetaData
from sqlalchemy.orm import relationship
class Users(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
login = Column(String(50), unique=True)
access_id = Column(Integer, ForeignKey("accesses.id", ondelete="CASCADE"), unique=True)
created_at = Column(DateTime, default=func.now())
access = relationship("Accesses", uselist=False, back_populates="user")
device = relationship("Devices", uselist=False, back_populates="owner")
def __repr__(self):
return f"<User(login='{self.login}', created_at='{self.created_at}'>"
class AccessType(str, enum.Enum):
user: str = 'user'
admin: str = 'admin'
device: str = 'device'
class Accesses(Base):
__tablename__ = "accesses"
id = Column(Integer, primary_key=True)
login = Column(String(50), unique=True, index=True) # index for fast lookup
hashed_password = Column(String(70), nullable=False)
scope = Column(Enum(AccessType), default=AccessType.user, nullable=False)
group_id = Column(Integer, ForeignKey("groups.id", ondelete="CASCADE"), nullable=False)
user = relationship("Users", uselist=False, back_populates="access")
device = relationship("Devices", uselist=False, back_populates="access")
group = relationship("Groups", uselist=False, back_populates="accesses")
def __repr__(self):
return f"<Access(login='{self.login}', scope='{self.scope}', group_id='{self.group_id}')>"
class Groups(Base):
__tablename__ = "groups"
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
accesses = relationship("Accesses", back_populates="group")
sites = relationship("Sites", back_populates="group")
def __repr__(self):
return f"<Group(name='{self.name}')>"
class SiteType(str, enum.Enum):
tower: str = 'tower'
station: str = 'station'
no_alert: str = 'no_alert'
class Sites(Base):
__tablename__ = "sites"
id = Column(Integer, primary_key=True)
name = Column(String(50))
group_id = Column(Integer, ForeignKey("groups.id", ondelete="CASCADE"), nullable=False)
lat = Column(Float(4, asdecimal=True))
lon = Column(Float(4, asdecimal=True))
country = Column(String(5), nullable=False)
geocode = Column(String(10), nullable=False)
type = Column(Enum(SiteType), default=SiteType.tower)
created_at = Column(DateTime, default=func.now())
installations = relationship("Installations", back_populates="site")
group = relationship("Groups", uselist=False, back_populates="sites")
def __repr__(self):
return (f"<Site(name='{self.name}', group_id='{self.group_id}', lat='{self.lat}', lon='{self.lon}', "
f"country='{self.country}', geocode='{self.geocode}', type='{self.type}')>")
class EventType(str, enum.Enum):
wildfire: str = 'wildfire'
class Events(Base):
__tablename__ = "events"
id = Column(Integer, primary_key=True)
lat = Column(Float(4, asdecimal=True))
lon = Column(Float(4, asdecimal=True))
type = Column(Enum(EventType), default=EventType.wildfire)
start_ts = Column(DateTime, default=None, nullable=True)
end_ts = Column(DateTime, default=None, nullable=True)
is_acknowledged = Column(Boolean, default=False)
created_at = Column(DateTime, default=func.now())
alerts = relationship("Alerts", back_populates="event")
def __repr__(self):
return (f"<Event(lat='{self.lat}', lon='{self.lon}', type='{self.type}', "
f"is_acknowledged='{self.is_acknowledged}')>")
# Linked tables
class Devices(Base):
__tablename__ = "devices"
id = Column(Integer, primary_key=True)
login = Column(String(50), unique=True)
owner_id = Column(Integer, ForeignKey("users.id"))
access_id = Column(Integer, ForeignKey("accesses.id", ondelete="CASCADE"), unique=True)
specs = Column(String(50))
software_hash = Column(String(16), default=None, nullable=True)
angle_of_view = Column(Float(2, asdecimal=True))
elevation = Column(Float(1, asdecimal=True), default=None, nullable=True)
lat = Column(Float(4, asdecimal=True), default=None, nullable=True)
lon = Column(Float(4, asdecimal=True), default=None, nullable=True)
yaw = Column(Float(1, asdecimal=True), default=None, nullable=True)
pitch = Column(Float(1, asdecimal=True), default=None, nullable=True)
last_ping = Column(DateTime, default=None, nullable=True)
created_at = Column(DateTime, default=func.now())
access = relationship("Accesses", uselist=False, back_populates="device")
owner = relationship("Users", uselist=False, back_populates="device")
media = relationship("Media", back_populates="device")
alerts = relationship("Alerts", back_populates="device")
installation = relationship("Installations", back_populates="device")
def __repr__(self):
return (f"<Device(login='{self.login}', owner_id='{self.owner_id}', access_id='{self.access_id}', "
f"specs='{self.specs}', software_hash='{self.software_hash}', last_ping='{self.last_ping}')>")
class MediaType(str, enum.Enum):
image: str = 'image'
video: str = 'video'
class Media(Base):
__tablename__ = "media"
id = Column(Integer, primary_key=True)
device_id = Column(Integer, ForeignKey("devices.id"))
bucket_key = Column(String(100), nullable=True)
type = Column(Enum(MediaType), default=MediaType.image)
created_at = Column(DateTime, default=func.now())
device = relationship("Devices", uselist=False, back_populates="media")
alerts = relationship("Alerts", back_populates="media")
def __repr__(self):
return f"<Media(device_id='{self.device_id}', bucket_key='{self.bucket_key}', type='{self.type}'>"
class Installations(Base):
__tablename__ = "installations"
id = Column(Integer, primary_key=True)
device_id = Column(Integer, ForeignKey("devices.id"))
site_id = Column(Integer, ForeignKey("sites.id"))
start_ts = Column(DateTime, nullable=False)
end_ts = Column(DateTime, default=None, nullable=True)
is_trustworthy = Column(Boolean, default=True)
created_at = Column(DateTime, default=func.now())
device = relationship("Devices", back_populates="installation")
site = relationship("Sites", back_populates="installations")
def __repr__(self):
return (f"<Installation(device_id='{self.device_id}', site_id='{self.site_id}', "
f"is_trustworthy='{self.is_trustworthy}'>")
class Alerts(Base):
__tablename__ = "alerts"
id = Column(Integer, primary_key=True)
device_id = Column(Integer, ForeignKey("devices.id"))
event_id = Column(Integer, ForeignKey("events.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=True)
media_id = Column(Integer, ForeignKey("media.id"), default=None)
azimuth = Column(Float(4, asdecimal=True), default=None)
lat = Column(Float(4, asdecimal=True))
lon = Column(Float(4, asdecimal=True))
created_at = Column(DateTime, default=func.now())
device = relationship("Devices", back_populates="alerts")
event = relationship("Events", back_populates="alerts")
media = relationship("Media", back_populates="alerts")
def __repr__(self):
return f"<Alert(device_id='{self.device_id}', event_id='{self.event_id}', media_id='{self.media_id}'>"
class Webhooks(Base):
__tablename__ = "webhooks"
id = Column(Integer, primary_key=True)
callback = Column(String(50), nullable=False)
url = Column(String(100), nullable=False)
def __repr__(self):
return f"<Webhook(callback='{self.callback}', url='{self.url}'>"
|
en
| 0.789458
|
# Copyright (C) 2021, Pyronear contributors. # This program is licensed under the Apache License version 2. # See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details. # index for fast lookup # Linked tables
| 2.293704
| 2
|
oneflow/python/test/ops/test_fused_bias_add_gelu.py
|
wanghongsheng01/framework_enflame
| 1
|
6626890
|
<filename>oneflow/python/test/ops/test_fused_bias_add_gelu.py
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import os
from collections import OrderedDict
import numpy as np
import oneflow as flow
import test_global_storage
from test_util import GenArgList, type_name_to_flow_type
import test_global_storage
from test_util import Args, GenArgDict
import oneflow.typing as oft
def compare_with_not_fused(test_case, device_type, x_shape, data_type, data_format):
assert device_type in ["gpu", "cpu"]
flow.clear_default_session()
func_config = flow.FunctionConfig()
if data_type == "float16":
dtype = flow.float
else:
dtype = type_name_to_flow_type[data_type]
if data_format == "NCHW":
bias_shape = (x_shape[1],)
elif data_format == "NHWC":
bias_shape = (x_shape[len(x_shape) - 1],)
@flow.global_function(type="train", function_config=func_config)
def FlowJob(
value: oft.Numpy.Placeholder(x_shape), bias: oft.Numpy.Placeholder(bias_shape),
):
with flow.scope.placement(device_type, "0:0"):
value += flow.get_variable(
name="v1",
shape=(1,),
dtype=flow.float,
initializer=flow.zeros_initializer(),
)
bias += flow.get_variable(
name="v2",
shape=(1,),
dtype=flow.float,
initializer=flow.zeros_initializer(),
)
x1 = flow.identity(value)
x2 = flow.identity(value)
bias1 = flow.identity(bias)
bias2 = flow.identity(bias)
flow.watch_diff(x1, test_global_storage.Setter("x1_diff"))
flow.watch_diff(x2, test_global_storage.Setter("x2_diff"))
flow.watch_diff(bias1, test_global_storage.Setter("bias1_diff"))
flow.watch_diff(bias2, test_global_storage.Setter("bias2_diff"))
if data_type == "float16":
y1 = flow.cast(
flow.math.gelu(
flow.nn.bias_add(
flow.cast(x1, dtype=flow.float16),
flow.cast(bias1, dtype=flow.float16),
data_format=data_format,
),
),
dtype=flow.float,
)
y2 = flow.cast(
flow.nn.fused_bias_add_gelu(
flow.cast(x2, dtype=flow.float16),
flow.cast(bias2, dtype=flow.float16),
data_format=data_format,
),
dtype=flow.float,
)
else:
y1 = flow.math.gelu(
flow.nn.bias_add(x1, bias1, data_format=data_format)
)
y2 = flow.nn.fused_bias_add_gelu(x2, bias2, data_format=data_format)
flow.watch(y1, test_global_storage.Setter("y1"))
flow.watch(y2, test_global_storage.Setter("y2"))
flow.watch_diff(y1, test_global_storage.Setter("y1_diff"))
flow.watch_diff(y2, test_global_storage.Setter("y2_diff"))
loss = y1 + y2
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [0.001]), momentum=0
).minimize(flow.math.reduce_sum(loss))
return loss
x = np.random.uniform(low=0, high=10, size=x_shape).astype(np.float32)
bias = np.random.uniform(low=0, high=10, size=bias_shape).astype(np.float32)
of_out = FlowJob(x, bias).get()
y1 = test_global_storage.Get("y1")
y2 = test_global_storage.Get("y2")
tol = 1e-5
test_case.assertTrue(np.allclose(y1, y2, rtol=tol, atol=tol, equal_nan=True))
x1_diff = test_global_storage.Get("x1_diff")
x2_diff = test_global_storage.Get("x2_diff")
test_case.assertTrue(
np.allclose(x1_diff, x2_diff, rtol=tol, atol=tol, equal_nan=True)
)
bias1_diff = test_global_storage.Get("bias1_diff")
bias2_diff = test_global_storage.Get("bias2_diff")
test_case.assertTrue(
np.allclose(bias1_diff, bias2_diff, rtol=tol, atol=tol, equal_nan=True)
)
@flow.unittest.skip_unless_1n1d()
class TestFusedBiasAdd(flow.unittest.TestCase):
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_fused_bias_add(test_case):
arg_dict = OrderedDict()
arg_dict["device_type"] = ["gpu"]
arg_dict["x_shape"] = [
(10, 10),
(10, 5),
(1, 10, 10, 10),
(2, 10, 10, 10),
]
arg_dict["data_type"] = ["float16", "float32", "double"]
arg_dict["data_format"] = ["NCHW"]
for arg in GenArgList(arg_dict):
if arg[0] == "cpu" and arg[2] == "float16":
continue
compare_with_not_fused(test_case, *arg)
if __name__ == "__main__":
unittest.main()
|
<filename>oneflow/python/test/ops/test_fused_bias_add_gelu.py
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import os
from collections import OrderedDict
import numpy as np
import oneflow as flow
import test_global_storage
from test_util import GenArgList, type_name_to_flow_type
import test_global_storage
from test_util import Args, GenArgDict
import oneflow.typing as oft
def compare_with_not_fused(test_case, device_type, x_shape, data_type, data_format):
assert device_type in ["gpu", "cpu"]
flow.clear_default_session()
func_config = flow.FunctionConfig()
if data_type == "float16":
dtype = flow.float
else:
dtype = type_name_to_flow_type[data_type]
if data_format == "NCHW":
bias_shape = (x_shape[1],)
elif data_format == "NHWC":
bias_shape = (x_shape[len(x_shape) - 1],)
@flow.global_function(type="train", function_config=func_config)
def FlowJob(
value: oft.Numpy.Placeholder(x_shape), bias: oft.Numpy.Placeholder(bias_shape),
):
with flow.scope.placement(device_type, "0:0"):
value += flow.get_variable(
name="v1",
shape=(1,),
dtype=flow.float,
initializer=flow.zeros_initializer(),
)
bias += flow.get_variable(
name="v2",
shape=(1,),
dtype=flow.float,
initializer=flow.zeros_initializer(),
)
x1 = flow.identity(value)
x2 = flow.identity(value)
bias1 = flow.identity(bias)
bias2 = flow.identity(bias)
flow.watch_diff(x1, test_global_storage.Setter("x1_diff"))
flow.watch_diff(x2, test_global_storage.Setter("x2_diff"))
flow.watch_diff(bias1, test_global_storage.Setter("bias1_diff"))
flow.watch_diff(bias2, test_global_storage.Setter("bias2_diff"))
if data_type == "float16":
y1 = flow.cast(
flow.math.gelu(
flow.nn.bias_add(
flow.cast(x1, dtype=flow.float16),
flow.cast(bias1, dtype=flow.float16),
data_format=data_format,
),
),
dtype=flow.float,
)
y2 = flow.cast(
flow.nn.fused_bias_add_gelu(
flow.cast(x2, dtype=flow.float16),
flow.cast(bias2, dtype=flow.float16),
data_format=data_format,
),
dtype=flow.float,
)
else:
y1 = flow.math.gelu(
flow.nn.bias_add(x1, bias1, data_format=data_format)
)
y2 = flow.nn.fused_bias_add_gelu(x2, bias2, data_format=data_format)
flow.watch(y1, test_global_storage.Setter("y1"))
flow.watch(y2, test_global_storage.Setter("y2"))
flow.watch_diff(y1, test_global_storage.Setter("y1_diff"))
flow.watch_diff(y2, test_global_storage.Setter("y2_diff"))
loss = y1 + y2
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [0.001]), momentum=0
).minimize(flow.math.reduce_sum(loss))
return loss
x = np.random.uniform(low=0, high=10, size=x_shape).astype(np.float32)
bias = np.random.uniform(low=0, high=10, size=bias_shape).astype(np.float32)
of_out = FlowJob(x, bias).get()
y1 = test_global_storage.Get("y1")
y2 = test_global_storage.Get("y2")
tol = 1e-5
test_case.assertTrue(np.allclose(y1, y2, rtol=tol, atol=tol, equal_nan=True))
x1_diff = test_global_storage.Get("x1_diff")
x2_diff = test_global_storage.Get("x2_diff")
test_case.assertTrue(
np.allclose(x1_diff, x2_diff, rtol=tol, atol=tol, equal_nan=True)
)
bias1_diff = test_global_storage.Get("bias1_diff")
bias2_diff = test_global_storage.Get("bias2_diff")
test_case.assertTrue(
np.allclose(bias1_diff, bias2_diff, rtol=tol, atol=tol, equal_nan=True)
)
@flow.unittest.skip_unless_1n1d()
class TestFusedBiasAdd(flow.unittest.TestCase):
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_fused_bias_add(test_case):
arg_dict = OrderedDict()
arg_dict["device_type"] = ["gpu"]
arg_dict["x_shape"] = [
(10, 10),
(10, 5),
(1, 10, 10, 10),
(2, 10, 10, 10),
]
arg_dict["data_type"] = ["float16", "float32", "double"]
arg_dict["data_format"] = ["NCHW"]
for arg in GenArgList(arg_dict):
if arg[0] == "cpu" and arg[2] == "float16":
continue
compare_with_not_fused(test_case, *arg)
if __name__ == "__main__":
unittest.main()
|
en
| 0.864155
|
Copyright 2020 The OneFlow Authors. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
| 2.059255
| 2
|
python/euclid_test.py
|
smenjas/programming-languages-compared
| 0
|
6626891
|
import unittest
from euclid import euclid
class TestEuclid(unittest.TestCase):
def test_types(self):
self.assertRaises(TypeError, euclid, 1, None)
self.assertRaises(TypeError, euclid, 1, 1.0)
self.assertRaises(TypeError, euclid, 1, "")
self.assertRaises(TypeError, euclid, 1, ())
self.assertRaises(TypeError, euclid, 1, [])
self.assertRaises(TypeError, euclid, 1, {})
def test_values(self):
self.assertRaises(ValueError, euclid, 1, 0)
self.assertRaises(ValueError, euclid, 0, 1)
self.assertRaises(ValueError, euclid, 1, -1)
self.assertRaises(ValueError, euclid, -1, 1)
def test_success(self):
self.assertEqual(euclid(1, 1), 1)
self.assertEqual(euclid(1, 2), 1)
self.assertEqual(euclid(2, 2), 2)
self.assertEqual(euclid(2, 3), 1)
self.assertEqual(euclid(2, 4), 2)
self.assertEqual(euclid(48, 64), 16)
|
import unittest
from euclid import euclid
class TestEuclid(unittest.TestCase):
def test_types(self):
self.assertRaises(TypeError, euclid, 1, None)
self.assertRaises(TypeError, euclid, 1, 1.0)
self.assertRaises(TypeError, euclid, 1, "")
self.assertRaises(TypeError, euclid, 1, ())
self.assertRaises(TypeError, euclid, 1, [])
self.assertRaises(TypeError, euclid, 1, {})
def test_values(self):
self.assertRaises(ValueError, euclid, 1, 0)
self.assertRaises(ValueError, euclid, 0, 1)
self.assertRaises(ValueError, euclid, 1, -1)
self.assertRaises(ValueError, euclid, -1, 1)
def test_success(self):
self.assertEqual(euclid(1, 1), 1)
self.assertEqual(euclid(1, 2), 1)
self.assertEqual(euclid(2, 2), 2)
self.assertEqual(euclid(2, 3), 1)
self.assertEqual(euclid(2, 4), 2)
self.assertEqual(euclid(48, 64), 16)
|
none
| 1
| 3.245981
| 3
|
|
grapl_analyzerlib/nodes/process_outbound_network_connection.py
|
wittekm/grapl_analyzerlib
| 3
|
6626892
|
from typing import *
from pydgraph import DgraphClient
from grapl_analyzerlib.nodes.comparators import (
Cmp,
IntCmp,
_int_cmps,
StrCmp,
_str_cmps,
PropertyFilter,
)
from grapl_analyzerlib.nodes.queryable import NQ, Queryable
from grapl_analyzerlib.nodes.types import PropertyT, Property
from grapl_analyzerlib.nodes.viewable import EdgeViewT, ForwardEdgeView, Viewable
IProcessOutboundConnectionQuery = TypeVar(
"IProcessOutboundConnectionQuery", bound="ProcessOutboundConnectionQuery",
)
class ProcessOutboundConnectionQuery(Queryable):
def __init__(self):
super(ProcessOutboundConnectionQuery, self).__init__(
ProcessOutboundConnectionView
)
self._created_timestamp = [] # type: List[List[Cmp[int]]]
self._terminated_timestamp = [] # type: List[List[Cmp[int]]]
self._last_seen_timestamp = [] # type: List[List[Cmp[int]]]
self._port = [] # type: List[List[Cmp[int]]]
self._ip_address = [] # type: List[List[Cmp[str]]]
self._protocol = [] # type: List[List[Cmp[str]]]
self._connected_over = None # type: Optional[IIpPortQuery]
self._connected_to = None # type: Optional[IIpPortQuery]
# Reverse edge
self._connecting_processes = None # type: Optional[IProcessQuery]
def with_ip_address(
self,
eq: Optional[StrCmp] = None,
contains: Optional[StrCmp] = None,
ends_with: Optional[StrCmp] = None,
starts_with: Optional[StrCmp] = None,
regexp: Optional[StrCmp] = None,
distance: Optional[Tuple[StrCmp, int]] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._ip_address.extend(
_str_cmps(
"ip_address",
eq=eq,
contains=contains,
ends_with=ends_with,
starts_with=starts_with,
regexp=regexp,
distance=distance,
),
)
return self
def with_protocol(
self,
eq: Optional[StrCmp] = None,
contains: Optional[StrCmp] = None,
ends_with: Optional[StrCmp] = None,
starts_with: Optional[StrCmp] = None,
regexp: Optional[StrCmp] = None,
distance: Optional[Tuple[StrCmp, int]] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._protocol.extend(
_str_cmps(
"protocol",
eq=eq,
contains=contains,
ends_with=ends_with,
starts_with=starts_with,
regexp=regexp,
distance=distance,
),
)
return self
def with_created_timestamp(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._created_timestamp.extend(
_int_cmps("created_timestamp", eq=eq, gt=gt, lt=lt)
)
return self
def with_terminated_timestamp(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._terminated_timestamp.extend(
_int_cmps("terminated_timestamp", eq=eq, gt=gt, lt=lt),
)
return self
def with_last_seen_timestamp(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._last_seen_timestamp.extend(
_int_cmps("last_seen_timestamp", eq=eq, gt=gt, lt=lt)
)
return self
def with_port(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._port.extend(
_int_cmps("port", eq=eq, gt=gt, lt=lt)
)
return self
def with_connecting_processess(
self: "NQ", connecting_processess_query: Optional["ProcessQuery"] = None
) -> "NQ":
connecting_processess = connecting_processess_query or IpPortQuery()
connecting_processess._created_connections = self
cast(
ProcessOutboundConnectionQuery, self
)._connecting_processes = connecting_processess
return self
def with_connected_over(
self: "NQ", connected_over_query: Optional["IpPortQuery"] = None
) -> "NQ":
connected_over = connected_over_query or IpPortQuery()
self.set_forward_edge_filter("connected_over", connected_over)
connected_over.set_reverse_edge_filter(
"~connected_over", self, "connected_over"
)
return self
def with_connected_to(
self: "NQ", connected_to_query: Optional["IpPortQuery"] = None
) -> "NQ":
connected_to = connected_to_query or IpPortQuery()
self.set_forward_edge_filter("connected_to", connected_to)
connected_to.set_reverse_edge_filter("~connected_to", self, "connected_to")
return self
def _get_unique_predicate(self) -> Optional[Tuple[str, "PropertyT"]]:
return None
def _get_node_type_name(self) -> str:
return "ProcessOutboundConnection"
def _get_property_filters(self) -> Mapping[str, "PropertyFilter[Property]"]:
props = {
"created_timestamp": self._created_timestamp,
"terminated_timestamp": self._terminated_timestamp,
"last_seen_timestamp": self._last_seen_timestamp,
"port": self._port,
"ip_address": self._ip_address,
"protocol": self._protocol,
}
combined = {}
for prop_name, prop_filter in props.items():
if prop_filter:
combined[prop_name] = cast("PropertyFilter[Property]", prop_filter)
return combined
def _get_forward_edges(self) -> Mapping[str, "Queryable"]:
forward_edges = {"connected_over": self._connected_over}
return {fe[0]: fe[1] for fe in forward_edges.items() if fe[1] is not None}
def _get_reverse_edges(self) -> Mapping[str, Tuple["Queryable", str]]:
reverse_edges = {
"~created_connections": (self._connecting_processes, "connecting_processes")
}
return {
fe[0]: (fe[1][0], fe[1][1])
for fe in reverse_edges.items()
if fe[1][0] is not None
}
IProcessOutboundConnectionView = TypeVar(
"IProcessOutboundConnectionView", bound="ProcessOutboundConnectionView",
)
class ProcessOutboundConnectionView(Viewable):
def __init__(
self,
dgraph_client: DgraphClient,
node_key: str,
uid: str,
node_type: str,
created_timestamp: Optional[int] = None,
terminated_timestamp: Optional[int] = None,
last_seen_timestamp: Optional[int] = None,
port: Optional[int] = None,
ip_address: Optional[str] = None,
protocol: Optional[str] = None,
connecting_processes: "Optional[IProcessView]" = None,
connected_over: "Optional[IpPortView]" = None,
connected_to: "Optional[IpPortView]" = None,
):
super(ProcessOutboundConnectionView, self).__init__(
dgraph_client=dgraph_client, node_key=node_key, uid=uid, node_type=node_type
)
self.dgraph_client = dgraph_client
self.node_key = node_key
self.uid = uid
self.node_type = node_type
self.created_timestamp = created_timestamp
self.terminated_timestamp = terminated_timestamp
self.last_seen_timestamp = last_seen_timestamp
self.port = port
self.ip_address = ip_address
self.protocol = protocol
self.connecting_processes = connecting_processes
self.connected_over = connected_over
self.connected_to = connected_to
def get_node_type(self) -> str:
return "ProcessOutboundConnection"
def get_created_timestamp(self) -> Optional[int]:
if not self.created_timestamp:
self.created_timestamp = cast(
Optional[int], self.fetch_property("created_timestamp", int)
)
return self.created_timestamp
def get_terminated_timestamp(self) -> Optional[int]:
if not self.terminated_timestamp:
self.terminated_timestamp = cast(
Optional[int], self.fetch_property("terminated_timestamp", int)
)
return self.terminated_timestamp
def get_last_seen_timestamp(self) -> Optional[int]:
if not self.last_seen_timestamp:
self.last_seen_timestamp = cast(
Optional[int], self.fetch_property("last_seen_timestamp", int)
)
return self.last_seen_timestamp
def get_port(self) -> Optional[int]:
if not self.port:
self.port = cast(Optional[int], self.fetch_property("port", int))
return self.port
def get_ip_address(self) -> Optional[str]:
if not self.ip_address:
self.ip_address = cast(
Optional[str], self.fetch_property("ip_address", str)
)
return self.ip_address
def get_protocol(self) -> Optional[str]:
if not self.protocol:
self.protocol = cast(Optional[str], self.fetch_property("protocol", str))
return self.protocol
def get_connecting_processes(self) -> List["ProcessView"]:
return cast(
List[ProcessView], self.fetch_edges("~created_connections", ProcessView),
)
def get_connected_over(self) -> Optional["IpPortView"]:
return cast(
Optional[IpPortView], self.fetch_edge("connected_over", IpPortView),
)
def get_connected_to(self) -> Optional["IpPortView"]:
return cast(Optional[IpPortView], self.fetch_edge("connected_to", IpPortView),)
@staticmethod
def _get_property_types() -> Mapping[str, "PropertyT"]:
return {
"created_timestamp": int,
"terminated_timestamp": int,
"last_seen_timestamp": int,
"port": int,
"ip_address": str,
"protocol": str,
}
@staticmethod
def _get_forward_edge_types() -> Mapping[str, "EdgeViewT"]:
f_edges = {
"connected_over": IpPortView,
"connected_to": IpPortView,
} # type: Dict[str, Optional["EdgeViewT"]]
return cast(
Mapping[str, "EdgeViewT"], {fe[0]: fe[1] for fe in f_edges.items() if fe[1]}
)
def _get_forward_edges(self) -> "Mapping[str, ForwardEdgeView]":
f_edges = {
"connected_over": self.connected_over,
"connected_to": self.connected_over,
} # type: Dict[str, Optional[ForwardEdgeView]]
return cast(
Mapping[str, ForwardEdgeView],
{fe[0]: fe[1] for fe in f_edges.items() if fe[1]},
)
def _get_properties(self, fetch: bool = False) -> Mapping[str, Union[str, int]]:
props = {
"created_timestamp": self.created_timestamp,
"terminated_timestamp": self.terminated_timestamp,
"last_seen_timestamp": self.last_seen_timestamp,
"port": self.port,
"ip_address": self.ip_address,
"protocol": self.protocol,
}
return {p[0]: p[1] for p in props.items() if p[1] is not None}
@staticmethod
def _get_reverse_edge_types() -> Mapping[str, Tuple["EdgeViewT", str]]:
return {"~created_connections": ([ProcessView], "connecting_processes")}
def _get_reverse_edges(self) -> Mapping[str, Tuple["Queryable", str]]:
reverse_edges = {
"~created_connections": (self.connecting_processes, "connecting_processes")
}
return {
fe[0]: (fe[1][0], fe[1][1])
for fe in reverse_edges.items()
if fe[1][0] is not None
}
from grapl_analyzerlib.nodes.ip_port_node import IpPortQuery, IIpPortQuery, IpPortView
from grapl_analyzerlib.nodes.process_node import (
IProcessQuery,
ProcessQuery,
ProcessView,
IProcessView,
)
|
from typing import *
from pydgraph import DgraphClient
from grapl_analyzerlib.nodes.comparators import (
Cmp,
IntCmp,
_int_cmps,
StrCmp,
_str_cmps,
PropertyFilter,
)
from grapl_analyzerlib.nodes.queryable import NQ, Queryable
from grapl_analyzerlib.nodes.types import PropertyT, Property
from grapl_analyzerlib.nodes.viewable import EdgeViewT, ForwardEdgeView, Viewable
IProcessOutboundConnectionQuery = TypeVar(
"IProcessOutboundConnectionQuery", bound="ProcessOutboundConnectionQuery",
)
class ProcessOutboundConnectionQuery(Queryable):
def __init__(self):
super(ProcessOutboundConnectionQuery, self).__init__(
ProcessOutboundConnectionView
)
self._created_timestamp = [] # type: List[List[Cmp[int]]]
self._terminated_timestamp = [] # type: List[List[Cmp[int]]]
self._last_seen_timestamp = [] # type: List[List[Cmp[int]]]
self._port = [] # type: List[List[Cmp[int]]]
self._ip_address = [] # type: List[List[Cmp[str]]]
self._protocol = [] # type: List[List[Cmp[str]]]
self._connected_over = None # type: Optional[IIpPortQuery]
self._connected_to = None # type: Optional[IIpPortQuery]
# Reverse edge
self._connecting_processes = None # type: Optional[IProcessQuery]
def with_ip_address(
self,
eq: Optional[StrCmp] = None,
contains: Optional[StrCmp] = None,
ends_with: Optional[StrCmp] = None,
starts_with: Optional[StrCmp] = None,
regexp: Optional[StrCmp] = None,
distance: Optional[Tuple[StrCmp, int]] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._ip_address.extend(
_str_cmps(
"ip_address",
eq=eq,
contains=contains,
ends_with=ends_with,
starts_with=starts_with,
regexp=regexp,
distance=distance,
),
)
return self
def with_protocol(
self,
eq: Optional[StrCmp] = None,
contains: Optional[StrCmp] = None,
ends_with: Optional[StrCmp] = None,
starts_with: Optional[StrCmp] = None,
regexp: Optional[StrCmp] = None,
distance: Optional[Tuple[StrCmp, int]] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._protocol.extend(
_str_cmps(
"protocol",
eq=eq,
contains=contains,
ends_with=ends_with,
starts_with=starts_with,
regexp=regexp,
distance=distance,
),
)
return self
def with_created_timestamp(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._created_timestamp.extend(
_int_cmps("created_timestamp", eq=eq, gt=gt, lt=lt)
)
return self
def with_terminated_timestamp(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._terminated_timestamp.extend(
_int_cmps("terminated_timestamp", eq=eq, gt=gt, lt=lt),
)
return self
def with_last_seen_timestamp(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._last_seen_timestamp.extend(
_int_cmps("last_seen_timestamp", eq=eq, gt=gt, lt=lt)
)
return self
def with_port(
self: "NQ",
eq: Optional["IntCmp"] = None,
gt: Optional["IntCmp"] = None,
lt: Optional["IntCmp"] = None,
) -> "NQ":
cast("ProcessOutboundConnectionQuery", self)._port.extend(
_int_cmps("port", eq=eq, gt=gt, lt=lt)
)
return self
def with_connecting_processess(
self: "NQ", connecting_processess_query: Optional["ProcessQuery"] = None
) -> "NQ":
connecting_processess = connecting_processess_query or IpPortQuery()
connecting_processess._created_connections = self
cast(
ProcessOutboundConnectionQuery, self
)._connecting_processes = connecting_processess
return self
def with_connected_over(
self: "NQ", connected_over_query: Optional["IpPortQuery"] = None
) -> "NQ":
connected_over = connected_over_query or IpPortQuery()
self.set_forward_edge_filter("connected_over", connected_over)
connected_over.set_reverse_edge_filter(
"~connected_over", self, "connected_over"
)
return self
def with_connected_to(
self: "NQ", connected_to_query: Optional["IpPortQuery"] = None
) -> "NQ":
connected_to = connected_to_query or IpPortQuery()
self.set_forward_edge_filter("connected_to", connected_to)
connected_to.set_reverse_edge_filter("~connected_to", self, "connected_to")
return self
def _get_unique_predicate(self) -> Optional[Tuple[str, "PropertyT"]]:
return None
def _get_node_type_name(self) -> str:
return "ProcessOutboundConnection"
def _get_property_filters(self) -> Mapping[str, "PropertyFilter[Property]"]:
props = {
"created_timestamp": self._created_timestamp,
"terminated_timestamp": self._terminated_timestamp,
"last_seen_timestamp": self._last_seen_timestamp,
"port": self._port,
"ip_address": self._ip_address,
"protocol": self._protocol,
}
combined = {}
for prop_name, prop_filter in props.items():
if prop_filter:
combined[prop_name] = cast("PropertyFilter[Property]", prop_filter)
return combined
def _get_forward_edges(self) -> Mapping[str, "Queryable"]:
forward_edges = {"connected_over": self._connected_over}
return {fe[0]: fe[1] for fe in forward_edges.items() if fe[1] is not None}
def _get_reverse_edges(self) -> Mapping[str, Tuple["Queryable", str]]:
reverse_edges = {
"~created_connections": (self._connecting_processes, "connecting_processes")
}
return {
fe[0]: (fe[1][0], fe[1][1])
for fe in reverse_edges.items()
if fe[1][0] is not None
}
IProcessOutboundConnectionView = TypeVar(
"IProcessOutboundConnectionView", bound="ProcessOutboundConnectionView",
)
class ProcessOutboundConnectionView(Viewable):
def __init__(
self,
dgraph_client: DgraphClient,
node_key: str,
uid: str,
node_type: str,
created_timestamp: Optional[int] = None,
terminated_timestamp: Optional[int] = None,
last_seen_timestamp: Optional[int] = None,
port: Optional[int] = None,
ip_address: Optional[str] = None,
protocol: Optional[str] = None,
connecting_processes: "Optional[IProcessView]" = None,
connected_over: "Optional[IpPortView]" = None,
connected_to: "Optional[IpPortView]" = None,
):
super(ProcessOutboundConnectionView, self).__init__(
dgraph_client=dgraph_client, node_key=node_key, uid=uid, node_type=node_type
)
self.dgraph_client = dgraph_client
self.node_key = node_key
self.uid = uid
self.node_type = node_type
self.created_timestamp = created_timestamp
self.terminated_timestamp = terminated_timestamp
self.last_seen_timestamp = last_seen_timestamp
self.port = port
self.ip_address = ip_address
self.protocol = protocol
self.connecting_processes = connecting_processes
self.connected_over = connected_over
self.connected_to = connected_to
def get_node_type(self) -> str:
return "ProcessOutboundConnection"
def get_created_timestamp(self) -> Optional[int]:
if not self.created_timestamp:
self.created_timestamp = cast(
Optional[int], self.fetch_property("created_timestamp", int)
)
return self.created_timestamp
def get_terminated_timestamp(self) -> Optional[int]:
if not self.terminated_timestamp:
self.terminated_timestamp = cast(
Optional[int], self.fetch_property("terminated_timestamp", int)
)
return self.terminated_timestamp
def get_last_seen_timestamp(self) -> Optional[int]:
if not self.last_seen_timestamp:
self.last_seen_timestamp = cast(
Optional[int], self.fetch_property("last_seen_timestamp", int)
)
return self.last_seen_timestamp
def get_port(self) -> Optional[int]:
if not self.port:
self.port = cast(Optional[int], self.fetch_property("port", int))
return self.port
def get_ip_address(self) -> Optional[str]:
if not self.ip_address:
self.ip_address = cast(
Optional[str], self.fetch_property("ip_address", str)
)
return self.ip_address
def get_protocol(self) -> Optional[str]:
if not self.protocol:
self.protocol = cast(Optional[str], self.fetch_property("protocol", str))
return self.protocol
def get_connecting_processes(self) -> List["ProcessView"]:
return cast(
List[ProcessView], self.fetch_edges("~created_connections", ProcessView),
)
def get_connected_over(self) -> Optional["IpPortView"]:
return cast(
Optional[IpPortView], self.fetch_edge("connected_over", IpPortView),
)
def get_connected_to(self) -> Optional["IpPortView"]:
return cast(Optional[IpPortView], self.fetch_edge("connected_to", IpPortView),)
@staticmethod
def _get_property_types() -> Mapping[str, "PropertyT"]:
return {
"created_timestamp": int,
"terminated_timestamp": int,
"last_seen_timestamp": int,
"port": int,
"ip_address": str,
"protocol": str,
}
@staticmethod
def _get_forward_edge_types() -> Mapping[str, "EdgeViewT"]:
f_edges = {
"connected_over": IpPortView,
"connected_to": IpPortView,
} # type: Dict[str, Optional["EdgeViewT"]]
return cast(
Mapping[str, "EdgeViewT"], {fe[0]: fe[1] for fe in f_edges.items() if fe[1]}
)
def _get_forward_edges(self) -> "Mapping[str, ForwardEdgeView]":
f_edges = {
"connected_over": self.connected_over,
"connected_to": self.connected_over,
} # type: Dict[str, Optional[ForwardEdgeView]]
return cast(
Mapping[str, ForwardEdgeView],
{fe[0]: fe[1] for fe in f_edges.items() if fe[1]},
)
def _get_properties(self, fetch: bool = False) -> Mapping[str, Union[str, int]]:
props = {
"created_timestamp": self.created_timestamp,
"terminated_timestamp": self.terminated_timestamp,
"last_seen_timestamp": self.last_seen_timestamp,
"port": self.port,
"ip_address": self.ip_address,
"protocol": self.protocol,
}
return {p[0]: p[1] for p in props.items() if p[1] is not None}
@staticmethod
def _get_reverse_edge_types() -> Mapping[str, Tuple["EdgeViewT", str]]:
return {"~created_connections": ([ProcessView], "connecting_processes")}
def _get_reverse_edges(self) -> Mapping[str, Tuple["Queryable", str]]:
reverse_edges = {
"~created_connections": (self.connecting_processes, "connecting_processes")
}
return {
fe[0]: (fe[1][0], fe[1][1])
for fe in reverse_edges.items()
if fe[1][0] is not None
}
from grapl_analyzerlib.nodes.ip_port_node import IpPortQuery, IIpPortQuery, IpPortView
from grapl_analyzerlib.nodes.process_node import (
IProcessQuery,
ProcessQuery,
ProcessView,
IProcessView,
)
|
en
| 0.224699
|
# type: List[List[Cmp[int]]] # type: List[List[Cmp[int]]] # type: List[List[Cmp[int]]] # type: List[List[Cmp[int]]] # type: List[List[Cmp[str]]] # type: List[List[Cmp[str]]] # type: Optional[IIpPortQuery] # type: Optional[IIpPortQuery] # Reverse edge # type: Optional[IProcessQuery] # type: Dict[str, Optional["EdgeViewT"]] # type: Dict[str, Optional[ForwardEdgeView]]
| 2.206487
| 2
|
Allura/allura/ext/admin/admin_main.py
|
isabella232/allura
| 113
|
6626893
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
import logging
import re
import os
from random import randint
from collections import OrderedDict
from datetime import datetime
from six.moves.urllib.parse import urlparse
import json
from operator import itemgetter, attrgetter
import pkg_resources
from tg import tmpl_context as c, app_globals as g, response
from tg import request
from paste.deploy.converters import asbool, aslist
from tg import expose, redirect, flash, validate, config, jsonify
from tg.decorators import with_trailing_slash, without_trailing_slash
from webob import exc
from bson import ObjectId
from ming.orm.ormsession import ThreadLocalORMSession
from ming.odm import session
import PIL
from allura.app import Application, DefaultAdminController, SitemapEntry
from allura.lib import helpers as h
from allura import version
from allura import model as M
from allura.lib.security import has_access, require_access, is_site_admin
from allura.lib.widgets import form_fields as ffw
from allura.lib import exceptions as forge_exc
from allura.lib import plugin
from allura.controllers import BaseController
from allura.lib.decorators import require_post
from allura.tasks import export_tasks
from allura.lib.widgets.project_list import ProjectScreenshots
from . import widgets as aw
import six
from six.moves import map
log = logging.getLogger(__name__)
class W:
label_edit = ffw.LabelEdit()
group_card = aw.GroupCard()
permission_card = aw.PermissionCard()
new_group_settings = aw.NewGroupSettings()
screenshot_admin = aw.ScreenshotAdmin()
screenshot_list = ProjectScreenshots(draggable=True)
metadata_admin = aw.MetadataAdmin()
audit = aw.AuditLog()
page_list = ffw.PageList()
class AdminApp(Application):
'''This is the admin app. It is pretty much required for
a functioning allura project.
'''
__version__ = version.__version__
_installable_tools = None
max_instances = 0
tool_label = 'admin'
icons = {
24: 'images/admin_24.png',
32: 'images/admin_32.png',
48: 'images/admin_48.png'
}
exportable = True
has_notifications = False
def __init__(self, project, config):
Application.__init__(self, project, config)
self.root = ProjectAdminController()
self.api_root = ProjectAdminRestController()
self.admin = AdminAppAdminController(self)
self.templates = pkg_resources.resource_filename(
'allura.ext.admin', 'templates')
self.sitemap = [SitemapEntry('Admin', '.')]
def is_visible_to(self, user):
'''Whether the user can view the app.'''
return has_access(c.project, 'create')(user=user)
@staticmethod
def installable_tools_for(project):
tools = []
for name, App in six.iteritems(g.entry_points['tool']):
cfg = M.AppConfig(project_id=project._id, tool_name=name)
if App._installable(name, project.neighborhood, project.app_configs):
tools.append(dict(name=name, app=App))
# prevent from saving temporary config to db
session(cfg).expunge(cfg)
tools.sort(key=lambda t: (t['app'].status_int(), t['app'].ordinal or 0))
return [t for t in tools
if t['app'].status in project.allowed_tool_status]
@staticmethod
def exportable_tools_for(project):
tools = []
for tool in project.app_configs:
if project.app_instance(tool).exportable:
tools.append(tool)
return sorted(tools, key=lambda t: t.options.mount_point)
def main_menu(self):
'''Apps should provide their entries to be added to the main nav
:return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
'''
return [SitemapEntry('Admin', '.')]
@h.exceptionless([], log)
def sidebar_menu(self):
links = []
admin_url = c.project.url() + 'admin/'
if c.project.is_nbhd_project:
links.append(SitemapEntry('Add Project', c.project.url()
+ 'add_project', ui_icon=g.icons['add']))
nbhd_admin_url = c.project.neighborhood.url() + '_admin/'
links = links + [
SitemapEntry('Neighborhood'),
SitemapEntry('Overview', nbhd_admin_url + 'overview'),
SitemapEntry('Awards', nbhd_admin_url + 'accolades')]
else:
links += [
SitemapEntry('Welcome', admin_url),
SitemapEntry('Metadata', admin_url + 'overview', className="admin-nav-metadata"),
]
if c.project.neighborhood.name != "Users":
links += [
SitemapEntry('Screenshots', admin_url + 'screenshots'),
SitemapEntry('Categorization', admin_url + 'trove')
]
if plugin.ProjectRegistrationProvider.get().registration_date(c.project) < datetime(2016, 6, 1):
# only show transitional Tools page to older projects that may be used to it
# no point is showing it to new projects
links.append(SitemapEntry('Tools', admin_url + 'tools_moved'))
if asbool(config.get('bulk_export_enabled', True)):
links.append(SitemapEntry('Export', admin_url + 'export'))
if c.project.is_root and has_access(c.project, 'admin')():
links.append(
SitemapEntry('User Permissions', admin_url + 'groups/', className="admin-nav-user-perms"))
if not c.project.is_root and has_access(c.project, 'admin')():
links.append(
SitemapEntry('Permissions', admin_url + 'permissions/'))
if len(c.project.neighborhood_invitations):
links.append(
SitemapEntry('Invitation(s)', admin_url + 'invitations'))
links.append(SitemapEntry('Audit Trail', admin_url + 'audit/'))
if c.project.is_nbhd_project:
links.append(SitemapEntry('Statistics', nbhd_admin_url + 'stats/'))
links.append(None)
links.append(SitemapEntry('Help', nbhd_admin_url + 'help/'))
for ep_name in sorted(g.entry_points['admin'].keys()):
admin_extension = g.entry_points['admin'][ep_name]
admin_extension().update_project_sidebar_menu(links)
return links
def admin_menu(self):
return []
def install(self, project):
pass
def bulk_export(self, f, export_path='', with_attachments=False):
json.dump(self.project, f, cls=jsonify.JSONEncoder, indent=2)
class AdminExtensionLookup(object):
@expose()
def _lookup(self, name, *remainder):
for ep_name in sorted(g.entry_points['admin'].keys()):
admin_extension = g.entry_points['admin'][ep_name]
controller = admin_extension().project_admin_controllers.get(name)
if controller:
return controller(), remainder
raise exc.HTTPNotFound(name)
class ProjectAdminController(BaseController):
def _check_security(self):
require_access(c.project, 'admin')
def __init__(self):
self.permissions = PermissionsController()
self.groups = GroupsController()
self.audit = AuditController()
self.ext = AdminExtensionLookup()
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_admin.html')
def index(self, **kw):
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_invitations.html')
def invitations(self):
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_overview.html')
def overview(self, **kw):
c.metadata_admin = W.metadata_admin
# need this because features field expects data in specific format
metadata_admin_value = h.fixed_attrs_proxy(
c.project,
features=[{'feature': f} for f in c.project.features])
allow_project_delete = asbool(config.get('allow_project_delete', True))
return dict(allow_project_delete=allow_project_delete,
metadata_admin_value=metadata_admin_value,
)
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_screenshots.html')
def screenshots(self, **kw):
c.screenshot_admin = W.screenshot_admin
c.screenshot_list = W.screenshot_list
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_trove.html')
def trove(self):
c.label_edit = W.label_edit
base_troves_by_name = {t.shortname: t
for t in M.TroveCategory.query.find(dict(trove_parent_id=0))}
first_troves = aslist(config.get('trovecategories.admin.order', 'topic,license,os'), ',')
base_troves = [
base_troves_by_name.pop(t) for t in first_troves
] + sorted(list(base_troves_by_name.values()), key=attrgetter('fullname'))
trove_recommendations = {}
for trove in base_troves:
config_name = 'trovecategories.admin.recommended.{}'.format(trove.shortname)
recommendation_pairs = aslist(config.get(config_name, []), ',')
trove_recommendations[trove.shortname] = OrderedDict()
for pair in recommendation_pairs:
trove_id, label = pair.split('=')
trove_recommendations[trove.shortname][trove_id] = label
return dict(base_troves=base_troves,
trove_recommendations=trove_recommendations)
@expose('jinja:allura.ext.admin:templates/project_tools_moved.html')
def tools_moved(self, **kw):
return {}
@expose()
@require_post()
def update_labels(self, labels=None, **kw):
require_access(c.project, 'admin')
c.project.labels = labels.split(',')
M.AuditLog.log('updated labels')
redirect('trove')
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_install_tool.html')
def install_tool(self, tool_name=None, **kw):
if tool_name == 'subproject':
tool = {
'tool_label': 'Sub Project',
'default_mount_label': 'SubProject',
'default_mount_point': 'subproject'
}
options = []
else:
tool = g.entry_points['tool'][tool_name]
options = tool.options_on_install()
return dict(
tool_name=tool_name,
tool=tool,
options=options,
existing_mount_points=c.project.mount_points()
)
@expose()
def _lookup(self, name, *remainder):
app = c.project.app_instance(name)
if app is None:
raise exc.HTTPNotFound(name)
return app.admin, remainder
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_permissions.html')
def groups(self, **kw):
return dict()
@expose()
@require_post()
@validate(W.metadata_admin, error_handler=overview)
@h.vardec
def update(self, name=None,
short_description=None,
summary='',
icon=None,
category=None,
external_homepage='',
video_url='',
support_page='',
support_page_url='',
twitter_handle='',
facebook_page='',
removal='',
moved_to_url='',
tracking_id='',
features=None,
**kw):
require_access(c.project, 'update')
flash_status = 'success'
flash_message = 'Form values saved'
if removal != c.project.removal:
M.AuditLog.log('change project removal status to %s', removal)
c.project.removal = removal
c.project.removal_changed_date = datetime.utcnow()
if 'delete_icon' in kw:
M.ProjectFile.query.remove(dict(project_id=c.project._id, category=re.compile(r'^icon')))
c.project.set_tool_data('allura', icon_original_size=None, icon_sha256=None)
M.AuditLog.log('remove project icon')
g.post_event('project_updated')
redirect('overview')
elif 'delete' in kw:
allow_project_delete = asbool(
config.get('allow_project_delete', True))
if allow_project_delete or not c.project.is_root:
M.AuditLog.log('delete project')
plugin.ProjectRegistrationProvider.get().delete_project(
c.project, c.user)
redirect('overview')
elif 'undelete' in kw:
M.AuditLog.log('undelete project')
plugin.ProjectRegistrationProvider.get().undelete_project(
c.project, c.user)
redirect('overview')
if name and name != c.project.name:
M.AuditLog.log('change project name to %s', name)
c.project.name = name
if short_description != c.project.short_description:
M.AuditLog.log('change short description to %s', short_description)
c.project.short_description = short_description
if summary != c.project.summary:
M.AuditLog.log('change summary to %s', summary)
c.project.summary = summary
category = category and ObjectId(category) or None
if category != c.project.category_id:
M.AuditLog.log('change category to %s', category)
c.project.category_id = category
if external_homepage != c.project.external_homepage:
M.AuditLog.log('change external home page to %s',
external_homepage)
c.project.external_homepage = external_homepage
if video_url != c.project.video_url:
M.AuditLog.log('change video url to %s', video_url)
c.project.video_url = video_url
if support_page != c.project.support_page:
M.AuditLog.log('change project support page to %s', support_page)
c.project.support_page = support_page
old_twitter = c.project.social_account('Twitter')
if not old_twitter or twitter_handle != old_twitter.accounturl:
M.AuditLog.log('change project twitter handle to %s',
twitter_handle)
c.project.set_social_account('Twitter', twitter_handle)
old_facebook = c.project.social_account('Facebook')
if not old_facebook or facebook_page != old_facebook.accounturl:
if not facebook_page or 'facebook.com' in urlparse(facebook_page).netloc:
M.AuditLog.log(
'change project facebook page to %s', facebook_page)
c.project.set_social_account('Facebook', facebook_page)
if support_page_url != c.project.support_page_url:
M.AuditLog.log('change project support page url to %s',
support_page_url)
c.project.support_page_url = support_page_url
if moved_to_url != c.project.moved_to_url:
M.AuditLog.log('change project moved to url to %s', moved_to_url)
c.project.moved_to_url = moved_to_url
if tracking_id != c.project.tracking_id:
M.AuditLog.log('change project tracking ID to %s', tracking_id)
c.project.tracking_id = tracking_id
features = [f['feature'].strip() for f in features or []
if f.get('feature', '').strip()]
if features != c.project.features:
M.AuditLog.log('change project features to %s', features)
c.project.features = features
if icon is not None and icon != b'':
if c.project.icon:
M.ProjectFile.query.remove(dict(project_id=c.project._id, category=re.compile(r'^icon')))
save_icon = c.project.save_icon(icon.filename, icon.file, content_type=icon.type)
if not save_icon:
M.AuditLog.log('could not update project icon')
flash_message = f'{flash_message}, but image upload failed'
flash_status = 'warning'
else:
M.AuditLog.log('update project icon')
g.post_event('project_updated')
flash(flash_message, flash_status)
redirect('overview')
def _add_trove(self, type, new_trove):
current_troves = getattr(c.project, 'trove_%s' % type)
trove_obj = M.TroveCategory.query.get(trove_cat_id=int(new_trove))
error_msg = None
if type in ['license', 'audience', 'developmentstatus', 'language'] and len(current_troves) >= 6:
error_msg = 'You may not have more than 6 of this category.'
elif type in ['topic'] and len(current_troves) >= 3:
error_msg = 'You may not have more than 3 of this category.'
elif trove_obj is not None:
if trove_obj._id not in current_troves:
current_troves.append(trove_obj._id)
M.AuditLog.log('add trove %s: %s', type, trove_obj.fullpath)
# just in case the event handling is super fast
ThreadLocalORMSession.flush_all()
c.project.last_updated = datetime.utcnow()
g.post_event('project_updated')
else:
error_msg = 'This category has already been assigned to the project.'
return (trove_obj, error_msg)
@expose('json:')
@require_post()
def add_trove_js(self, type, new_trove, **kw):
require_access(c.project, 'update')
trove_obj, error_msg = self._add_trove(type, new_trove)
return dict(trove_full_path=trove_obj.fullpath_within_type, trove_cat_id=trove_obj.trove_cat_id, error_msg=error_msg)
@expose()
@require_post()
def add_trove(self, type, new_trove, **kw):
require_access(c.project, 'update')
trove_obj, error_msg = self._add_trove(type, new_trove)
if error_msg:
flash(error_msg, 'error')
redirect('trove')
@expose()
@require_post()
def delete_trove(self, type, trove, **kw):
require_access(c.project, 'update')
trove_obj = M.TroveCategory.query.get(trove_cat_id=int(trove))
current_troves = getattr(c.project, 'trove_%s' % type)
if trove_obj is not None and trove_obj._id in current_troves:
M.AuditLog.log('remove trove %s: %s', type, trove_obj.fullpath)
current_troves.remove(trove_obj._id)
# just in case the event handling is super fast
ThreadLocalORMSession.flush_all()
c.project.last_updated = datetime.utcnow()
g.post_event('project_updated')
redirect('trove')
@expose()
@require_post()
@validate(W.screenshot_admin)
def add_screenshot(self, screenshot=None, caption=None, **kw):
require_access(c.project, 'update')
screenshots = c.project.get_screenshots()
if len(screenshots) >= 6:
flash('You may not have more than 6 screenshots per project.',
'error')
elif screenshot is not None and screenshot != '':
future_bmp = False
e_filename, e_fileext = os.path.splitext(screenshot.filename)
for screen in screenshots:
c_filename, c_fileext = os.path.splitext(screen.filename)
if c_fileext == '.png' and e_fileext.lower() == '.bmp' and e_filename == c_filename:
future_bmp = True
# If both filename(without ext.) equals and exiting file ext. is png and given file ext is bmp, there will be two similar png files.
if screen.filename == screenshot.filename or future_bmp:
screenshot.filename = re.sub(r'(.*)\.(.*)', r'\1-' + str(randint(1000,9999)) + r'.\2', screenshot.filename)
# if filename already exists append a random number
break
M.AuditLog.log('add screenshot')
sort = 1 + max([ss.sort or 0 for ss in screenshots] or [0])
M.ProjectFile.save_image(
screenshot.filename, screenshot.file, content_type=screenshot.type,
save_original=True,
original_meta=dict(
project_id=c.project._id,
category='screenshot',
caption=caption,
sort=sort),
square=True, thumbnail_size=(150, 150),
thumbnail_meta=dict(project_id=c.project._id, category='screenshot_thumb'), convert_bmp=True)
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def sort_screenshots(self, **kw):
"""Sort project screenshots.
Called via ajax when screenshots are reordered via drag/drop on
the Screenshots admin page.
``kw`` is a mapping of (screenshot._id, sort_order) pairs.
"""
for s in c.project.get_screenshots():
if str(s._id) in kw:
s.sort = int(kw[str(s._id)])
g.post_event('project_updated')
@expose()
@require_post()
def delete_screenshot(self, id=None, **kw):
require_access(c.project, 'update')
if id is not None and id != '':
M.AuditLog.log('remove screenshot')
M.ProjectFile.query.remove(
dict(project_id=c.project._id, _id=ObjectId(id)))
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def edit_screenshot(self, id=None, caption=None, **kw):
require_access(c.project, 'update')
if id is not None and id != '':
M.ProjectFile.query.get(
project_id=c.project._id, _id=ObjectId(id)).caption = caption
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def join_neighborhood(self, nid):
require_access(c.project, 'admin')
if not nid:
n = M.Neighborhood.query.get(name='Projects')
c.project.neighborhood_id = n._id
flash('Joined %s' % n.name)
redirect(c.project.url() + 'admin/')
nid = ObjectId(str(nid))
if nid not in c.project.neighborhood_invitations:
flash('No invitation to that neighborhood', 'error')
redirect('.')
c.project.neighborhood_id = nid
n = M.Neighborhood.query.get(_id=nid)
flash('Joined %s' % n.name)
redirect('invitations')
def _update_mounts(self, subproject=None, tool=None, new=None, **kw):
'''
Returns the new App or Subproject, if one was installed.
Returns None otherwise.
'''
if subproject is None:
subproject = []
if tool is None:
tool = []
new_app = None
for sp in subproject:
p = M.Project.query.get(shortname=sp['shortname'],
neighborhood_id=c.project.neighborhood_id)
if sp.get('delete'):
require_access(c.project, 'admin')
M.AuditLog.log('delete subproject %s', sp['shortname'])
p.removal = 'deleted'
plugin.ProjectRegistrationProvider.get().delete_project(
p, c.user)
elif not new:
M.AuditLog.log('update subproject %s', sp['shortname'])
p.name = sp['name']
p.ordinal = int(sp['ordinal'])
for p in tool:
if p.get('delete'):
require_access(c.project, 'admin')
M.AuditLog.log('uninstall tool %s', p['mount_point'])
c.project.uninstall_app(p['mount_point'])
elif not new:
M.AuditLog.log('update tool %s', p['mount_point'])
options = c.project.app_config(p['mount_point']).options
options.mount_label = p['mount_label']
options.ordinal = int(p['ordinal'])
if new and new.get('install'):
ep_name = new.get('ep_name', None)
if not ep_name:
require_access(c.project, 'create')
mount_point = new['mount_point'].lower() or h.nonce()
M.AuditLog.log('create subproject %s', mount_point)
sp = c.project.new_subproject(mount_point)
sp.name = new['mount_label']
if 'ordinal' in new:
sp.ordinal = int(new['ordinal'])
else:
sp.ordinal = c.project.last_ordinal_value() + 1
new_app = sp
else:
require_access(c.project, 'admin')
installable_tools = AdminApp.installable_tools_for(c.project)
if not ep_name.lower() in [t['name'].lower() for t in installable_tools]:
flash('Installation limit exceeded.', 'error')
return
mount_point = new['mount_point'] or ep_name
M.AuditLog.log('install tool %s', mount_point)
App = g.entry_points['tool'][ep_name]
# pass only options which app expects
config_on_install = {
k: v for (k, v) in six.iteritems(kw)
if k in [o.name for o in App.options_on_install()]
}
new_app = c.project.install_app(
ep_name,
mount_point,
mount_label=new['mount_label'],
ordinal=int(new['ordinal']) if 'ordinal' in new else None,
**config_on_install)
g.post_event('project_updated')
g.post_event('project_menu_updated')
return new_app
@h.vardec
@expose()
@require_post()
def update_mounts(self, subproject=None, tool=None, new=None, page=0, limit=200, **kw):
if new and new['ep_name'] == 'subproject':
new['ep_name'] = ""
try:
new_app = self._update_mounts(subproject, tool, new, **kw)
if new_app:
if getattr(new_app, 'tool_label', '') == 'External Link':
flash('{} installed successfully.'.format(new_app.tool_label))
else:
new_url = new_app.url
if callable(new_url): # subprojects have a method instead of property
new_url = new_url()
redirect(new_url)
except forge_exc.ForgeError as exc:
flash('%s: %s' % (exc.__class__.__name__, exc.args[0]),
'error')
if request.referer is not None and tool is not None and 'delete' in tool[0] and \
re.search(c.project.url() + r'(admin\/|)' + tool[0]['mount_point']+ r'\/*',
six.ensure_text(request.referer)):
# Redirect to root when deleting currect module
redirect('../')
redirect(six.ensure_text(request.referer or '/'))
@expose('jinja:allura.ext.admin:templates/export.html')
def export(self, tools=None, with_attachments=False):
if not asbool(config.get('bulk_export_enabled', True)):
raise exc.HTTPNotFound()
if request.method == 'POST':
try:
ProjectAdminRestController().export(tools, send_email=True, with_attachments=with_attachments)
except (exc.HTTPBadRequest, exc.HTTPServiceUnavailable) as e:
flash(str(e), 'error')
redirect('.')
else:
flash(
'Export scheduled. You will recieve an email with download instructions when complete.', 'ok')
redirect('export')
exportable_tools = AdminApp.exportable_tools_for(c.project)
apps_id = [tool._id for tool in exportable_tools]
db = M.session.project_doc_session.db
files_id = db.attachment.find({"app_config_id": {"$in": apps_id}}).distinct("file_id")
try:
total_size = list(db.attachment.files.aggregate([
{
"$match": {"_id": {"$in": files_id}}
},
{
"$group": {"_id": "total", "total_size": {"$sum": "$length"}}
},
{
"$project": {"_id": 0, "total_size": {"$divide": ["$total_size", 1000000]}}
}
], cursor={}))[0].get('total_size')
except IndexError:
total_size = 0
return {
'tools': exportable_tools,
'status': c.project.bulk_export_status(),
'total_size': round(total_size, 3)
}
class ProjectAdminRestController(BaseController):
"""
Exposes RESTful API for project admin actions.
"""
def _check_security(self):
require_access(c.project, 'admin')
@expose('json:')
@require_post()
def mount_order(self, **kw):
if not kw:
raise exc.HTTPBadRequest('Expected kw params in the form of "ordinal: mount_point"')
try:
sorted_tools = sorted(list(kw.items()), key=lambda x: int(x[0]))
except ValueError:
raise exc.HTTPBadRequest('Invalid kw: expected "ordinal: mount_point"')
for ordinal, mount_point in sorted_tools:
try:
c.project.app_config(mount_point).options.ordinal = int(ordinal)
except AttributeError as e:
# Handle sub project
p = M.Project.query.get(shortname="{}/{}".format(c.project.shortname, mount_point),
neighborhood_id=c.project.neighborhood_id)
if p:
p.ordinal = int(ordinal)
M.AuditLog.log('Updated tool order')
g.post_event('project_menu_updated')
return {'status': 'ok'}
@expose('json:')
@require_post()
def configure_tool_grouping(self, grouping_threshold='1', **kw):
try:
grouping_threshold = int(grouping_threshold)
if grouping_threshold < 1 or grouping_threshold > 10:
raise exc.HTTPBadRequest('Invalid threshold. Expected a value between 1 and 10')
c.project.set_tool_data(
'allura', grouping_threshold=grouping_threshold)
except ValueError:
raise exc.HTTPBadRequest('Invalid threshold. Expected a value between 1 and 10')
M.AuditLog.log('Updated tool grouping threshold')
g.post_event('project_menu_updated')
return {'status': 'ok'}
@expose('json:')
def installable_tools(self, **kw):
""" List of installable tools and their default options.
"""
tools = []
for tool in AdminApp.installable_tools_for(c.project):
tools.append({
'name': tool['name'],
'description': " ".join(tool['app'].tool_description.split()),
'icons': tool['app'].icons,
'tool_label': tool['app'].tool_label,
'defaults': {
'default_options': tool['app'].default_options(),
'default_mount_label': tool['app'].default_mount_label,
'default_mount_point': tool['app'].admin_menu_delete_button,
}
})
if c.project.is_root:
# subprojects only allowed on top-level projects (no nesting)
tools.append({
'name': 'subproject',
'description': "With a Sub Project you can add an entire project just like any other tool.",
'tool_label': 'Sub Project',
'defaults': {
'default_mount_label': 'Sub',
'default_mount_point': 'sub',
}
})
return {'tools': tools}
@expose('json:')
@require_post()
def export(self, tools=None, send_email=False, with_attachments=False, **kw):
"""
Initiate a bulk export of the project data.
Must be given a list of tool mount points to include in the export.
The list can either be comma-separated or a repeated param, e.g.,
`export?tools=tickets&tools=discussion`.
If the tools are not provided, an invalid mount point is listed, or
there is some other problems with the arguments, a `400 Bad Request`
response will be returned.
If an export is already currently running for this project, a
`503 Unavailable` response will be returned.
Otherwise, a JSON object of the form
`{"status": "in progress", "filename": FILENAME}` will be returned,
where `FILENAME` is the filename of the export artifact relative to
the users shell account directory.
"""
if not asbool(config.get('bulk_export_enabled', True)):
raise exc.HTTPNotFound()
if not tools:
raise exc.HTTPBadRequest(
'Must give at least one tool mount point to export')
tools = aslist(tools, ',')
exportable_tools = AdminApp.exportable_tools_for(c.project)
allowed = set(t.options.mount_point for t in exportable_tools)
if not set(tools).issubset(allowed):
raise exc.HTTPBadRequest('Invalid tool')
if c.project.bulk_export_status() == 'busy':
raise exc.HTTPServiceUnavailable(
'Export for project %s already running' % c.project.shortname)
# filename (potentially) includes a timestamp, so we have
# to pre-generate to be able to return it to the user
filename = c.project.bulk_export_filename()
export_tasks.bulk_export.post(tools, filename, send_email=send_email, with_attachments=with_attachments)
return {
'status': 'in progress',
'filename': filename,
}
@expose('json:')
def admin_options(self, mount_point=None, **kw):
"""
Returns the admin options for a given mount_point
:type mount_point: str|allura.model.project.AppConfig
"""
if not mount_point:
raise exc.HTTPBadRequest('Must provide a mount point')
tool = c.project.app_instance(mount_point)
if tool is None:
raise exc.HTTPBadRequest('The mount point you provided was invalid')
admin_menu = tool.admin_menu()
if tool.admin_menu_delete_button:
admin_menu.append(tool.admin_menu_delete_button)
return {
'options': [dict(text=m.label, href=m.url, className=m.className)
for m in admin_menu]
}
@expose('json:')
def export_status(self, **kw):
"""
Check the status of a bulk export.
Returns an object containing only one key, `status`, whose value is
either `'busy'` or `'ready'`.
"""
status = c.project.bulk_export_status()
return {'status': status or 'ready'}
@expose('json:')
@require_post()
def install_tool(self, tool=None, mount_point=None, mount_label=None, order=None, **kw):
"""API for installing tools in current project.
Requires a valid tool, mount point and mount label names.
(All arguments are required.)
Usage example::
POST to:
/rest/p/testproject/admin/install_tool/
with params:
{
'tool': 'tickets',
'mount_point': 'mountpoint',
'mount_label': 'mountlabel',
'order': 'first|last|alpha_tool'
}
Example output (in successful case)::
{
"info": "Tool tickets with mount_point mountpoint and mount_label mountlabel was created.",
"success": true
}
"""
controller = ProjectAdminController()
if not tool or not mount_point or not mount_label:
return {
'success': False,
'info': 'All arguments required.'
}
installable_tools = AdminApp.installable_tools_for(c.project)
tools_names = [t['name'] for t in installable_tools]
if not (tool in tools_names):
return {
'success': False,
'info': 'Incorrect tool name, or limit is reached.'
}
if c.project.app_instance(mount_point) is not None:
return {
'success': False,
'info': 'Mount point already exists.',
}
if order is None:
order = 'last'
mounts = [{'ordinal': ac.options.ordinal,
'label': ac.options.mount_label,
'mount': ac.options.mount_point,
'type': ac.tool_name.lower()}
for ac in c.project.app_configs]
subs = {p.shortname: p for p in M.Project.query.find({'parent_id': c.project._id})}
for sub in subs.values():
mounts.append({'ordinal': sub.ordinal,
'mount': sub.shortname,
'type': 'sub-project'})
mounts.sort(key=itemgetter('ordinal'))
if order == 'first':
ordinal = 0
elif order == 'last':
ordinal = len(mounts)
elif order == 'alpha_tool':
tool = tool.lower()
for i, mount in enumerate(mounts):
if mount['type'] == tool and mount['label'] > mount_label:
ordinal = i
break
else:
ordinal = len(mounts)
mounts.insert(ordinal, {'ordinal': ordinal, 'type': 'new'})
for i, mount in enumerate(mounts):
if mount['type'] == 'new':
pass
elif mount['type'] == 'sub-project':
subs[mount['mount']].ordinal = i
else:
c.project.app_config(mount['mount']).options.ordinal = i
data = {
'install': 'install',
'ep_name': tool,
'ordinal': ordinal,
'mount_point': mount_point,
'mount_label': mount_label
}
params = {
'new': data
}
if kw:
params.update(**kw)
try:
controller._update_mounts(**params)
except forge_exc.ForgeError as e:
return {
'success': False,
'info': str(e),
}
return {
'success': True,
'info': 'Tool %s with mount_point %s and mount_label %s was created.'
% (tool, mount_point, mount_label)
}
@expose()
def _lookup(self, *args):
if len(args) == 0:
raise exc.HTTPNotFound(args)
name, remainder = args[0], args[1:]
app = c.project.app_instance(name)
if app is None or app.admin_api_root is None:
raise exc.HTTPNotFound(name)
return app.admin_api_root, remainder
class PermissionsController(BaseController):
def _check_security(self):
# Do not allow access to 'permissions' page for root projects.
# Users should use 'groups' instead. This is to prevent creating 'private' projects
# - subprojects are still allowed.
# - tools pages are also still allowed, but are in a different controller
if c.project.is_root:
redirect('../groups')
require_access(c.project, 'admin')
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_permissions.html')
def index(self, **kw):
c.card = W.permission_card
return dict(permissions=self._index_permissions())
@without_trailing_slash
@expose()
@h.vardec
@require_post()
def update(self, card=None, **kw):
permissions = self._index_permissions()
old_permissions = dict(permissions)
for args in card:
perm = args['id']
new_group_ids = args.get('new', [])
group_ids = args.get('value', [])
if isinstance(new_group_ids, six.string_types):
new_group_ids = [new_group_ids]
if isinstance(group_ids, six.string_types):
group_ids = [group_ids]
# make sure the admin group has the admin permission
if perm == 'admin':
if c.project.is_root:
pid = c.project._id
else:
pid = c.project.parent_id
admin_group_id = str(
M.ProjectRole.query.get(project_id=pid, name='Admin')._id)
if admin_group_id not in group_ids + new_group_ids:
flash(
'You cannot remove the admin group from the admin permission.', 'warning')
group_ids.append(admin_group_id)
permissions[perm] = []
role_ids = list(map(ObjectId, group_ids + new_group_ids))
permissions[perm] = role_ids
c.project.acl = []
for perm, role_ids in six.iteritems(permissions):
role_names = lambda ids: ','.join(sorted(
pr.name for pr in M.ProjectRole.query.find(dict(_id={'$in': ids}))))
old_role_ids = old_permissions.get(perm, [])
if old_role_ids != role_ids:
M.AuditLog.log('updated "%s" permissions: "%s" => "%s"',
perm, role_names(old_role_ids), role_names(role_ids))
c.project.acl += [M.ACE.allow(rid, perm) for rid in role_ids]
g.post_event('project_updated')
redirect('.')
def _index_permissions(self):
permissions = dict(
(p, []) for p in c.project.permissions)
for ace in c.project.acl:
if ace.access == M.ACE.ALLOW:
permissions[ace.permission].append(ace.role_id)
return permissions
class GroupsController(BaseController):
def _check_security(self):
require_access(c.project, 'admin')
def _index_permissions(self):
permissions = dict(
(p, []) for p in c.project.permissions)
for ace in c.project.acl:
if ace.access == M.ACE.ALLOW:
permissions[ace.permission].append(ace.role_id)
return permissions
def _map_group_permissions(self):
roles = c.project.named_roles
permissions = self._index_permissions()
permissions_by_role = dict()
auth_role = M.ProjectRole.authenticated()
anon_role = M.ProjectRole.anonymous()
for role in roles + [auth_role, anon_role]:
permissions_by_role[str(role._id)] = []
for perm in permissions:
perm_info = dict(has="no", text="Does not have permission %s" %
perm, name=perm)
role_ids = permissions[perm]
if role._id in role_ids:
perm_info['text'] = "Has permission %s" % perm
perm_info['has'] = "yes"
else:
for r in role.child_roles():
if r._id in role_ids:
perm_info['text'] = "Inherited permission %s from %s" % (
perm, r.name)
perm_info['has'] = "inherit"
break
if perm_info['has'] == "no":
if anon_role._id in role_ids:
perm_info[
'text'] = "Inherited permission %s from Anonymous" % perm
perm_info['has'] = "inherit"
elif auth_role._id in role_ids and role != anon_role:
perm_info[
'text'] = "Inherited permission %s from Authenticated" % perm
perm_info['has'] = "inherit"
permissions_by_role[str(role._id)].append(perm_info)
return permissions_by_role
@without_trailing_slash
@expose()
@require_post()
@h.vardec
def delete_group(self, group_name, **kw):
role = M.ProjectRole.by_name(group_name)
if not role:
flash('Group "%s" does not exist.' % group_name, 'error')
else:
role.delete()
M.AuditLog.log('delete group %s', group_name)
flash('Group "%s" deleted successfully.' % group_name)
g.post_event('project_updated')
redirect('.')
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_groups.html')
def index(self, **kw):
c.card = W.group_card
permissions_by_role = self._map_group_permissions()
auth_role = M.ProjectRole.authenticated()
anon_role = M.ProjectRole.anonymous()
roles = c.project.named_roles
roles.append(None)
return dict(roles=roles, permissions_by_role=permissions_by_role,
auth_role=auth_role, anon_role=anon_role)
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def change_perm(self, role_id, permission, allow="true", **kw):
if allow == "true":
M.AuditLog.log('granted permission %s to group %s', permission,
M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
c.project.acl.append(M.ACE.allow(ObjectId(role_id), permission))
else:
admin_group_id = str(M.ProjectRole.by_name('Admin')._id)
if admin_group_id == role_id and permission == 'admin':
return dict(error='You cannot remove the admin permission from the admin group.')
M.AuditLog.log('revoked permission %s from group %s', permission,
M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
c.project.acl.remove(M.ACE.allow(ObjectId(role_id), permission))
g.post_event('project_updated')
return self._map_group_permissions()
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def add_user(self, role_id, username, **kw):
if not username or username == '*anonymous':
return dict(error='You must choose a user to add.')
group = M.ProjectRole.query.get(_id=ObjectId(role_id))
user = M.User.query.get(username=username.strip(), pending=False)
if not group:
return dict(error='Could not find group with id %s' % role_id)
if not user:
return dict(error='User %s not found' % username)
user_role = M.ProjectRole.by_user(user, upsert=True)
if group._id in user_role.roles:
return dict(error='%s (%s) is already in the group %s.' % (user.display_name, username, group.name))
M.AuditLog.log('add user %s to %s', username, group.name)
user_role.roles.append(group._id)
if group.name == 'Admin':
for ac in c.project.app_configs:
c.project.app_instance(ac).subscribe(user)
g.post_event('project_updated')
return dict(username=username, displayname=user.display_name)
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def remove_user(self, role_id, username, **kw):
group = M.ProjectRole.query.get(_id=ObjectId(role_id))
user = M.User.by_username(username.strip())
if group.name == 'Admin' and len(group.users_with_role()) == 1:
return dict(error='You must have at least one user with the Admin role.')
if not group:
return dict(error='Could not find group with id %s' % role_id)
if not user:
return dict(error='User %s not found' % username)
user_role = M.ProjectRole.by_user(user)
if not user_role or group._id not in user_role.roles:
return dict(error='%s (%s) is not in the group %s.' % (user.display_name, username, group.name))
M.AuditLog.log('remove user %s from %s', username, group.name)
user_role.roles.remove(group._id)
if len(user_role.roles) == 0:
# user has no roles in this project any more, so don't leave a useless doc around
user_role.delete()
g.post_event('project_updated')
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_group.html')
def new(self):
c.form = W.new_group_settings
return dict(
group=None,
action="create")
@expose()
@require_post()
@validate(W.new_group_settings)
@h.vardec
def create(self, name=None, **kw):
if M.ProjectRole.by_name(name):
flash('%s already exists' % name, 'error')
else:
M.ProjectRole(project_id=c.project._id, name=name)
M.AuditLog.log('create group %s', name)
g.post_event('project_updated')
redirect('.')
class AuditController(BaseController):
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/audit.html')
def index(self, limit=25, page=0, **kwargs):
limit = int(limit)
page = int(page)
count = M.AuditLog.query.find(dict(project_id=c.project._id)).count()
q = M.AuditLog.query.find(dict(project_id=c.project._id))
q = q.sort('timestamp', -1)
q = q.skip(page * limit)
if count > limit:
q = q.limit(limit)
else:
limit = count
c.widget = W.audit
return dict(
entries=q.all(),
limit=limit,
page=page,
count=count)
class AdminAppAdminController(DefaultAdminController):
'''Administer the admin app'''
pass
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
import logging
import re
import os
from random import randint
from collections import OrderedDict
from datetime import datetime
from six.moves.urllib.parse import urlparse
import json
from operator import itemgetter, attrgetter
import pkg_resources
from tg import tmpl_context as c, app_globals as g, response
from tg import request
from paste.deploy.converters import asbool, aslist
from tg import expose, redirect, flash, validate, config, jsonify
from tg.decorators import with_trailing_slash, without_trailing_slash
from webob import exc
from bson import ObjectId
from ming.orm.ormsession import ThreadLocalORMSession
from ming.odm import session
import PIL
from allura.app import Application, DefaultAdminController, SitemapEntry
from allura.lib import helpers as h
from allura import version
from allura import model as M
from allura.lib.security import has_access, require_access, is_site_admin
from allura.lib.widgets import form_fields as ffw
from allura.lib import exceptions as forge_exc
from allura.lib import plugin
from allura.controllers import BaseController
from allura.lib.decorators import require_post
from allura.tasks import export_tasks
from allura.lib.widgets.project_list import ProjectScreenshots
from . import widgets as aw
import six
from six.moves import map
log = logging.getLogger(__name__)
class W:
label_edit = ffw.LabelEdit()
group_card = aw.GroupCard()
permission_card = aw.PermissionCard()
new_group_settings = aw.NewGroupSettings()
screenshot_admin = aw.ScreenshotAdmin()
screenshot_list = ProjectScreenshots(draggable=True)
metadata_admin = aw.MetadataAdmin()
audit = aw.AuditLog()
page_list = ffw.PageList()
class AdminApp(Application):
'''This is the admin app. It is pretty much required for
a functioning allura project.
'''
__version__ = version.__version__
_installable_tools = None
max_instances = 0
tool_label = 'admin'
icons = {
24: 'images/admin_24.png',
32: 'images/admin_32.png',
48: 'images/admin_48.png'
}
exportable = True
has_notifications = False
def __init__(self, project, config):
Application.__init__(self, project, config)
self.root = ProjectAdminController()
self.api_root = ProjectAdminRestController()
self.admin = AdminAppAdminController(self)
self.templates = pkg_resources.resource_filename(
'allura.ext.admin', 'templates')
self.sitemap = [SitemapEntry('Admin', '.')]
def is_visible_to(self, user):
'''Whether the user can view the app.'''
return has_access(c.project, 'create')(user=user)
@staticmethod
def installable_tools_for(project):
tools = []
for name, App in six.iteritems(g.entry_points['tool']):
cfg = M.AppConfig(project_id=project._id, tool_name=name)
if App._installable(name, project.neighborhood, project.app_configs):
tools.append(dict(name=name, app=App))
# prevent from saving temporary config to db
session(cfg).expunge(cfg)
tools.sort(key=lambda t: (t['app'].status_int(), t['app'].ordinal or 0))
return [t for t in tools
if t['app'].status in project.allowed_tool_status]
@staticmethod
def exportable_tools_for(project):
tools = []
for tool in project.app_configs:
if project.app_instance(tool).exportable:
tools.append(tool)
return sorted(tools, key=lambda t: t.options.mount_point)
def main_menu(self):
'''Apps should provide their entries to be added to the main nav
:return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
'''
return [SitemapEntry('Admin', '.')]
@h.exceptionless([], log)
def sidebar_menu(self):
links = []
admin_url = c.project.url() + 'admin/'
if c.project.is_nbhd_project:
links.append(SitemapEntry('Add Project', c.project.url()
+ 'add_project', ui_icon=g.icons['add']))
nbhd_admin_url = c.project.neighborhood.url() + '_admin/'
links = links + [
SitemapEntry('Neighborhood'),
SitemapEntry('Overview', nbhd_admin_url + 'overview'),
SitemapEntry('Awards', nbhd_admin_url + 'accolades')]
else:
links += [
SitemapEntry('Welcome', admin_url),
SitemapEntry('Metadata', admin_url + 'overview', className="admin-nav-metadata"),
]
if c.project.neighborhood.name != "Users":
links += [
SitemapEntry('Screenshots', admin_url + 'screenshots'),
SitemapEntry('Categorization', admin_url + 'trove')
]
if plugin.ProjectRegistrationProvider.get().registration_date(c.project) < datetime(2016, 6, 1):
# only show transitional Tools page to older projects that may be used to it
# no point is showing it to new projects
links.append(SitemapEntry('Tools', admin_url + 'tools_moved'))
if asbool(config.get('bulk_export_enabled', True)):
links.append(SitemapEntry('Export', admin_url + 'export'))
if c.project.is_root and has_access(c.project, 'admin')():
links.append(
SitemapEntry('User Permissions', admin_url + 'groups/', className="admin-nav-user-perms"))
if not c.project.is_root and has_access(c.project, 'admin')():
links.append(
SitemapEntry('Permissions', admin_url + 'permissions/'))
if len(c.project.neighborhood_invitations):
links.append(
SitemapEntry('Invitation(s)', admin_url + 'invitations'))
links.append(SitemapEntry('Audit Trail', admin_url + 'audit/'))
if c.project.is_nbhd_project:
links.append(SitemapEntry('Statistics', nbhd_admin_url + 'stats/'))
links.append(None)
links.append(SitemapEntry('Help', nbhd_admin_url + 'help/'))
for ep_name in sorted(g.entry_points['admin'].keys()):
admin_extension = g.entry_points['admin'][ep_name]
admin_extension().update_project_sidebar_menu(links)
return links
def admin_menu(self):
return []
def install(self, project):
pass
def bulk_export(self, f, export_path='', with_attachments=False):
json.dump(self.project, f, cls=jsonify.JSONEncoder, indent=2)
class AdminExtensionLookup(object):
@expose()
def _lookup(self, name, *remainder):
for ep_name in sorted(g.entry_points['admin'].keys()):
admin_extension = g.entry_points['admin'][ep_name]
controller = admin_extension().project_admin_controllers.get(name)
if controller:
return controller(), remainder
raise exc.HTTPNotFound(name)
class ProjectAdminController(BaseController):
def _check_security(self):
require_access(c.project, 'admin')
def __init__(self):
self.permissions = PermissionsController()
self.groups = GroupsController()
self.audit = AuditController()
self.ext = AdminExtensionLookup()
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_admin.html')
def index(self, **kw):
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_invitations.html')
def invitations(self):
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_overview.html')
def overview(self, **kw):
c.metadata_admin = W.metadata_admin
# need this because features field expects data in specific format
metadata_admin_value = h.fixed_attrs_proxy(
c.project,
features=[{'feature': f} for f in c.project.features])
allow_project_delete = asbool(config.get('allow_project_delete', True))
return dict(allow_project_delete=allow_project_delete,
metadata_admin_value=metadata_admin_value,
)
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_screenshots.html')
def screenshots(self, **kw):
c.screenshot_admin = W.screenshot_admin
c.screenshot_list = W.screenshot_list
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_trove.html')
def trove(self):
c.label_edit = W.label_edit
base_troves_by_name = {t.shortname: t
for t in M.TroveCategory.query.find(dict(trove_parent_id=0))}
first_troves = aslist(config.get('trovecategories.admin.order', 'topic,license,os'), ',')
base_troves = [
base_troves_by_name.pop(t) for t in first_troves
] + sorted(list(base_troves_by_name.values()), key=attrgetter('fullname'))
trove_recommendations = {}
for trove in base_troves:
config_name = 'trovecategories.admin.recommended.{}'.format(trove.shortname)
recommendation_pairs = aslist(config.get(config_name, []), ',')
trove_recommendations[trove.shortname] = OrderedDict()
for pair in recommendation_pairs:
trove_id, label = pair.split('=')
trove_recommendations[trove.shortname][trove_id] = label
return dict(base_troves=base_troves,
trove_recommendations=trove_recommendations)
@expose('jinja:allura.ext.admin:templates/project_tools_moved.html')
def tools_moved(self, **kw):
return {}
@expose()
@require_post()
def update_labels(self, labels=None, **kw):
require_access(c.project, 'admin')
c.project.labels = labels.split(',')
M.AuditLog.log('updated labels')
redirect('trove')
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_install_tool.html')
def install_tool(self, tool_name=None, **kw):
if tool_name == 'subproject':
tool = {
'tool_label': 'Sub Project',
'default_mount_label': 'SubProject',
'default_mount_point': 'subproject'
}
options = []
else:
tool = g.entry_points['tool'][tool_name]
options = tool.options_on_install()
return dict(
tool_name=tool_name,
tool=tool,
options=options,
existing_mount_points=c.project.mount_points()
)
@expose()
def _lookup(self, name, *remainder):
app = c.project.app_instance(name)
if app is None:
raise exc.HTTPNotFound(name)
return app.admin, remainder
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_permissions.html')
def groups(self, **kw):
return dict()
@expose()
@require_post()
@validate(W.metadata_admin, error_handler=overview)
@h.vardec
def update(self, name=None,
short_description=None,
summary='',
icon=None,
category=None,
external_homepage='',
video_url='',
support_page='',
support_page_url='',
twitter_handle='',
facebook_page='',
removal='',
moved_to_url='',
tracking_id='',
features=None,
**kw):
require_access(c.project, 'update')
flash_status = 'success'
flash_message = 'Form values saved'
if removal != c.project.removal:
M.AuditLog.log('change project removal status to %s', removal)
c.project.removal = removal
c.project.removal_changed_date = datetime.utcnow()
if 'delete_icon' in kw:
M.ProjectFile.query.remove(dict(project_id=c.project._id, category=re.compile(r'^icon')))
c.project.set_tool_data('allura', icon_original_size=None, icon_sha256=None)
M.AuditLog.log('remove project icon')
g.post_event('project_updated')
redirect('overview')
elif 'delete' in kw:
allow_project_delete = asbool(
config.get('allow_project_delete', True))
if allow_project_delete or not c.project.is_root:
M.AuditLog.log('delete project')
plugin.ProjectRegistrationProvider.get().delete_project(
c.project, c.user)
redirect('overview')
elif 'undelete' in kw:
M.AuditLog.log('undelete project')
plugin.ProjectRegistrationProvider.get().undelete_project(
c.project, c.user)
redirect('overview')
if name and name != c.project.name:
M.AuditLog.log('change project name to %s', name)
c.project.name = name
if short_description != c.project.short_description:
M.AuditLog.log('change short description to %s', short_description)
c.project.short_description = short_description
if summary != c.project.summary:
M.AuditLog.log('change summary to %s', summary)
c.project.summary = summary
category = category and ObjectId(category) or None
if category != c.project.category_id:
M.AuditLog.log('change category to %s', category)
c.project.category_id = category
if external_homepage != c.project.external_homepage:
M.AuditLog.log('change external home page to %s',
external_homepage)
c.project.external_homepage = external_homepage
if video_url != c.project.video_url:
M.AuditLog.log('change video url to %s', video_url)
c.project.video_url = video_url
if support_page != c.project.support_page:
M.AuditLog.log('change project support page to %s', support_page)
c.project.support_page = support_page
old_twitter = c.project.social_account('Twitter')
if not old_twitter or twitter_handle != old_twitter.accounturl:
M.AuditLog.log('change project twitter handle to %s',
twitter_handle)
c.project.set_social_account('Twitter', twitter_handle)
old_facebook = c.project.social_account('Facebook')
if not old_facebook or facebook_page != old_facebook.accounturl:
if not facebook_page or 'facebook.com' in urlparse(facebook_page).netloc:
M.AuditLog.log(
'change project facebook page to %s', facebook_page)
c.project.set_social_account('Facebook', facebook_page)
if support_page_url != c.project.support_page_url:
M.AuditLog.log('change project support page url to %s',
support_page_url)
c.project.support_page_url = support_page_url
if moved_to_url != c.project.moved_to_url:
M.AuditLog.log('change project moved to url to %s', moved_to_url)
c.project.moved_to_url = moved_to_url
if tracking_id != c.project.tracking_id:
M.AuditLog.log('change project tracking ID to %s', tracking_id)
c.project.tracking_id = tracking_id
features = [f['feature'].strip() for f in features or []
if f.get('feature', '').strip()]
if features != c.project.features:
M.AuditLog.log('change project features to %s', features)
c.project.features = features
if icon is not None and icon != b'':
if c.project.icon:
M.ProjectFile.query.remove(dict(project_id=c.project._id, category=re.compile(r'^icon')))
save_icon = c.project.save_icon(icon.filename, icon.file, content_type=icon.type)
if not save_icon:
M.AuditLog.log('could not update project icon')
flash_message = f'{flash_message}, but image upload failed'
flash_status = 'warning'
else:
M.AuditLog.log('update project icon')
g.post_event('project_updated')
flash(flash_message, flash_status)
redirect('overview')
def _add_trove(self, type, new_trove):
current_troves = getattr(c.project, 'trove_%s' % type)
trove_obj = M.TroveCategory.query.get(trove_cat_id=int(new_trove))
error_msg = None
if type in ['license', 'audience', 'developmentstatus', 'language'] and len(current_troves) >= 6:
error_msg = 'You may not have more than 6 of this category.'
elif type in ['topic'] and len(current_troves) >= 3:
error_msg = 'You may not have more than 3 of this category.'
elif trove_obj is not None:
if trove_obj._id not in current_troves:
current_troves.append(trove_obj._id)
M.AuditLog.log('add trove %s: %s', type, trove_obj.fullpath)
# just in case the event handling is super fast
ThreadLocalORMSession.flush_all()
c.project.last_updated = datetime.utcnow()
g.post_event('project_updated')
else:
error_msg = 'This category has already been assigned to the project.'
return (trove_obj, error_msg)
@expose('json:')
@require_post()
def add_trove_js(self, type, new_trove, **kw):
require_access(c.project, 'update')
trove_obj, error_msg = self._add_trove(type, new_trove)
return dict(trove_full_path=trove_obj.fullpath_within_type, trove_cat_id=trove_obj.trove_cat_id, error_msg=error_msg)
@expose()
@require_post()
def add_trove(self, type, new_trove, **kw):
require_access(c.project, 'update')
trove_obj, error_msg = self._add_trove(type, new_trove)
if error_msg:
flash(error_msg, 'error')
redirect('trove')
@expose()
@require_post()
def delete_trove(self, type, trove, **kw):
require_access(c.project, 'update')
trove_obj = M.TroveCategory.query.get(trove_cat_id=int(trove))
current_troves = getattr(c.project, 'trove_%s' % type)
if trove_obj is not None and trove_obj._id in current_troves:
M.AuditLog.log('remove trove %s: %s', type, trove_obj.fullpath)
current_troves.remove(trove_obj._id)
# just in case the event handling is super fast
ThreadLocalORMSession.flush_all()
c.project.last_updated = datetime.utcnow()
g.post_event('project_updated')
redirect('trove')
@expose()
@require_post()
@validate(W.screenshot_admin)
def add_screenshot(self, screenshot=None, caption=None, **kw):
require_access(c.project, 'update')
screenshots = c.project.get_screenshots()
if len(screenshots) >= 6:
flash('You may not have more than 6 screenshots per project.',
'error')
elif screenshot is not None and screenshot != '':
future_bmp = False
e_filename, e_fileext = os.path.splitext(screenshot.filename)
for screen in screenshots:
c_filename, c_fileext = os.path.splitext(screen.filename)
if c_fileext == '.png' and e_fileext.lower() == '.bmp' and e_filename == c_filename:
future_bmp = True
# If both filename(without ext.) equals and exiting file ext. is png and given file ext is bmp, there will be two similar png files.
if screen.filename == screenshot.filename or future_bmp:
screenshot.filename = re.sub(r'(.*)\.(.*)', r'\1-' + str(randint(1000,9999)) + r'.\2', screenshot.filename)
# if filename already exists append a random number
break
M.AuditLog.log('add screenshot')
sort = 1 + max([ss.sort or 0 for ss in screenshots] or [0])
M.ProjectFile.save_image(
screenshot.filename, screenshot.file, content_type=screenshot.type,
save_original=True,
original_meta=dict(
project_id=c.project._id,
category='screenshot',
caption=caption,
sort=sort),
square=True, thumbnail_size=(150, 150),
thumbnail_meta=dict(project_id=c.project._id, category='screenshot_thumb'), convert_bmp=True)
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def sort_screenshots(self, **kw):
"""Sort project screenshots.
Called via ajax when screenshots are reordered via drag/drop on
the Screenshots admin page.
``kw`` is a mapping of (screenshot._id, sort_order) pairs.
"""
for s in c.project.get_screenshots():
if str(s._id) in kw:
s.sort = int(kw[str(s._id)])
g.post_event('project_updated')
@expose()
@require_post()
def delete_screenshot(self, id=None, **kw):
require_access(c.project, 'update')
if id is not None and id != '':
M.AuditLog.log('remove screenshot')
M.ProjectFile.query.remove(
dict(project_id=c.project._id, _id=ObjectId(id)))
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def edit_screenshot(self, id=None, caption=None, **kw):
require_access(c.project, 'update')
if id is not None and id != '':
M.ProjectFile.query.get(
project_id=c.project._id, _id=ObjectId(id)).caption = caption
g.post_event('project_updated')
redirect('screenshots')
@expose()
@require_post()
def join_neighborhood(self, nid):
require_access(c.project, 'admin')
if not nid:
n = M.Neighborhood.query.get(name='Projects')
c.project.neighborhood_id = n._id
flash('Joined %s' % n.name)
redirect(c.project.url() + 'admin/')
nid = ObjectId(str(nid))
if nid not in c.project.neighborhood_invitations:
flash('No invitation to that neighborhood', 'error')
redirect('.')
c.project.neighborhood_id = nid
n = M.Neighborhood.query.get(_id=nid)
flash('Joined %s' % n.name)
redirect('invitations')
def _update_mounts(self, subproject=None, tool=None, new=None, **kw):
'''
Returns the new App or Subproject, if one was installed.
Returns None otherwise.
'''
if subproject is None:
subproject = []
if tool is None:
tool = []
new_app = None
for sp in subproject:
p = M.Project.query.get(shortname=sp['shortname'],
neighborhood_id=c.project.neighborhood_id)
if sp.get('delete'):
require_access(c.project, 'admin')
M.AuditLog.log('delete subproject %s', sp['shortname'])
p.removal = 'deleted'
plugin.ProjectRegistrationProvider.get().delete_project(
p, c.user)
elif not new:
M.AuditLog.log('update subproject %s', sp['shortname'])
p.name = sp['name']
p.ordinal = int(sp['ordinal'])
for p in tool:
if p.get('delete'):
require_access(c.project, 'admin')
M.AuditLog.log('uninstall tool %s', p['mount_point'])
c.project.uninstall_app(p['mount_point'])
elif not new:
M.AuditLog.log('update tool %s', p['mount_point'])
options = c.project.app_config(p['mount_point']).options
options.mount_label = p['mount_label']
options.ordinal = int(p['ordinal'])
if new and new.get('install'):
ep_name = new.get('ep_name', None)
if not ep_name:
require_access(c.project, 'create')
mount_point = new['mount_point'].lower() or h.nonce()
M.AuditLog.log('create subproject %s', mount_point)
sp = c.project.new_subproject(mount_point)
sp.name = new['mount_label']
if 'ordinal' in new:
sp.ordinal = int(new['ordinal'])
else:
sp.ordinal = c.project.last_ordinal_value() + 1
new_app = sp
else:
require_access(c.project, 'admin')
installable_tools = AdminApp.installable_tools_for(c.project)
if not ep_name.lower() in [t['name'].lower() for t in installable_tools]:
flash('Installation limit exceeded.', 'error')
return
mount_point = new['mount_point'] or ep_name
M.AuditLog.log('install tool %s', mount_point)
App = g.entry_points['tool'][ep_name]
# pass only options which app expects
config_on_install = {
k: v for (k, v) in six.iteritems(kw)
if k in [o.name for o in App.options_on_install()]
}
new_app = c.project.install_app(
ep_name,
mount_point,
mount_label=new['mount_label'],
ordinal=int(new['ordinal']) if 'ordinal' in new else None,
**config_on_install)
g.post_event('project_updated')
g.post_event('project_menu_updated')
return new_app
@h.vardec
@expose()
@require_post()
def update_mounts(self, subproject=None, tool=None, new=None, page=0, limit=200, **kw):
if new and new['ep_name'] == 'subproject':
new['ep_name'] = ""
try:
new_app = self._update_mounts(subproject, tool, new, **kw)
if new_app:
if getattr(new_app, 'tool_label', '') == 'External Link':
flash('{} installed successfully.'.format(new_app.tool_label))
else:
new_url = new_app.url
if callable(new_url): # subprojects have a method instead of property
new_url = new_url()
redirect(new_url)
except forge_exc.ForgeError as exc:
flash('%s: %s' % (exc.__class__.__name__, exc.args[0]),
'error')
if request.referer is not None and tool is not None and 'delete' in tool[0] and \
re.search(c.project.url() + r'(admin\/|)' + tool[0]['mount_point']+ r'\/*',
six.ensure_text(request.referer)):
# Redirect to root when deleting currect module
redirect('../')
redirect(six.ensure_text(request.referer or '/'))
@expose('jinja:allura.ext.admin:templates/export.html')
def export(self, tools=None, with_attachments=False):
if not asbool(config.get('bulk_export_enabled', True)):
raise exc.HTTPNotFound()
if request.method == 'POST':
try:
ProjectAdminRestController().export(tools, send_email=True, with_attachments=with_attachments)
except (exc.HTTPBadRequest, exc.HTTPServiceUnavailable) as e:
flash(str(e), 'error')
redirect('.')
else:
flash(
'Export scheduled. You will recieve an email with download instructions when complete.', 'ok')
redirect('export')
exportable_tools = AdminApp.exportable_tools_for(c.project)
apps_id = [tool._id for tool in exportable_tools]
db = M.session.project_doc_session.db
files_id = db.attachment.find({"app_config_id": {"$in": apps_id}}).distinct("file_id")
try:
total_size = list(db.attachment.files.aggregate([
{
"$match": {"_id": {"$in": files_id}}
},
{
"$group": {"_id": "total", "total_size": {"$sum": "$length"}}
},
{
"$project": {"_id": 0, "total_size": {"$divide": ["$total_size", 1000000]}}
}
], cursor={}))[0].get('total_size')
except IndexError:
total_size = 0
return {
'tools': exportable_tools,
'status': c.project.bulk_export_status(),
'total_size': round(total_size, 3)
}
class ProjectAdminRestController(BaseController):
"""
Exposes RESTful API for project admin actions.
"""
def _check_security(self):
require_access(c.project, 'admin')
@expose('json:')
@require_post()
def mount_order(self, **kw):
if not kw:
raise exc.HTTPBadRequest('Expected kw params in the form of "ordinal: mount_point"')
try:
sorted_tools = sorted(list(kw.items()), key=lambda x: int(x[0]))
except ValueError:
raise exc.HTTPBadRequest('Invalid kw: expected "ordinal: mount_point"')
for ordinal, mount_point in sorted_tools:
try:
c.project.app_config(mount_point).options.ordinal = int(ordinal)
except AttributeError as e:
# Handle sub project
p = M.Project.query.get(shortname="{}/{}".format(c.project.shortname, mount_point),
neighborhood_id=c.project.neighborhood_id)
if p:
p.ordinal = int(ordinal)
M.AuditLog.log('Updated tool order')
g.post_event('project_menu_updated')
return {'status': 'ok'}
@expose('json:')
@require_post()
def configure_tool_grouping(self, grouping_threshold='1', **kw):
try:
grouping_threshold = int(grouping_threshold)
if grouping_threshold < 1 or grouping_threshold > 10:
raise exc.HTTPBadRequest('Invalid threshold. Expected a value between 1 and 10')
c.project.set_tool_data(
'allura', grouping_threshold=grouping_threshold)
except ValueError:
raise exc.HTTPBadRequest('Invalid threshold. Expected a value between 1 and 10')
M.AuditLog.log('Updated tool grouping threshold')
g.post_event('project_menu_updated')
return {'status': 'ok'}
@expose('json:')
def installable_tools(self, **kw):
""" List of installable tools and their default options.
"""
tools = []
for tool in AdminApp.installable_tools_for(c.project):
tools.append({
'name': tool['name'],
'description': " ".join(tool['app'].tool_description.split()),
'icons': tool['app'].icons,
'tool_label': tool['app'].tool_label,
'defaults': {
'default_options': tool['app'].default_options(),
'default_mount_label': tool['app'].default_mount_label,
'default_mount_point': tool['app'].admin_menu_delete_button,
}
})
if c.project.is_root:
# subprojects only allowed on top-level projects (no nesting)
tools.append({
'name': 'subproject',
'description': "With a Sub Project you can add an entire project just like any other tool.",
'tool_label': 'Sub Project',
'defaults': {
'default_mount_label': 'Sub',
'default_mount_point': 'sub',
}
})
return {'tools': tools}
@expose('json:')
@require_post()
def export(self, tools=None, send_email=False, with_attachments=False, **kw):
"""
Initiate a bulk export of the project data.
Must be given a list of tool mount points to include in the export.
The list can either be comma-separated or a repeated param, e.g.,
`export?tools=tickets&tools=discussion`.
If the tools are not provided, an invalid mount point is listed, or
there is some other problems with the arguments, a `400 Bad Request`
response will be returned.
If an export is already currently running for this project, a
`503 Unavailable` response will be returned.
Otherwise, a JSON object of the form
`{"status": "in progress", "filename": FILENAME}` will be returned,
where `FILENAME` is the filename of the export artifact relative to
the users shell account directory.
"""
if not asbool(config.get('bulk_export_enabled', True)):
raise exc.HTTPNotFound()
if not tools:
raise exc.HTTPBadRequest(
'Must give at least one tool mount point to export')
tools = aslist(tools, ',')
exportable_tools = AdminApp.exportable_tools_for(c.project)
allowed = set(t.options.mount_point for t in exportable_tools)
if not set(tools).issubset(allowed):
raise exc.HTTPBadRequest('Invalid tool')
if c.project.bulk_export_status() == 'busy':
raise exc.HTTPServiceUnavailable(
'Export for project %s already running' % c.project.shortname)
# filename (potentially) includes a timestamp, so we have
# to pre-generate to be able to return it to the user
filename = c.project.bulk_export_filename()
export_tasks.bulk_export.post(tools, filename, send_email=send_email, with_attachments=with_attachments)
return {
'status': 'in progress',
'filename': filename,
}
@expose('json:')
def admin_options(self, mount_point=None, **kw):
"""
Returns the admin options for a given mount_point
:type mount_point: str|allura.model.project.AppConfig
"""
if not mount_point:
raise exc.HTTPBadRequest('Must provide a mount point')
tool = c.project.app_instance(mount_point)
if tool is None:
raise exc.HTTPBadRequest('The mount point you provided was invalid')
admin_menu = tool.admin_menu()
if tool.admin_menu_delete_button:
admin_menu.append(tool.admin_menu_delete_button)
return {
'options': [dict(text=m.label, href=m.url, className=m.className)
for m in admin_menu]
}
@expose('json:')
def export_status(self, **kw):
"""
Check the status of a bulk export.
Returns an object containing only one key, `status`, whose value is
either `'busy'` or `'ready'`.
"""
status = c.project.bulk_export_status()
return {'status': status or 'ready'}
@expose('json:')
@require_post()
def install_tool(self, tool=None, mount_point=None, mount_label=None, order=None, **kw):
"""API for installing tools in current project.
Requires a valid tool, mount point and mount label names.
(All arguments are required.)
Usage example::
POST to:
/rest/p/testproject/admin/install_tool/
with params:
{
'tool': 'tickets',
'mount_point': 'mountpoint',
'mount_label': 'mountlabel',
'order': 'first|last|alpha_tool'
}
Example output (in successful case)::
{
"info": "Tool tickets with mount_point mountpoint and mount_label mountlabel was created.",
"success": true
}
"""
controller = ProjectAdminController()
if not tool or not mount_point or not mount_label:
return {
'success': False,
'info': 'All arguments required.'
}
installable_tools = AdminApp.installable_tools_for(c.project)
tools_names = [t['name'] for t in installable_tools]
if not (tool in tools_names):
return {
'success': False,
'info': 'Incorrect tool name, or limit is reached.'
}
if c.project.app_instance(mount_point) is not None:
return {
'success': False,
'info': 'Mount point already exists.',
}
if order is None:
order = 'last'
mounts = [{'ordinal': ac.options.ordinal,
'label': ac.options.mount_label,
'mount': ac.options.mount_point,
'type': ac.tool_name.lower()}
for ac in c.project.app_configs]
subs = {p.shortname: p for p in M.Project.query.find({'parent_id': c.project._id})}
for sub in subs.values():
mounts.append({'ordinal': sub.ordinal,
'mount': sub.shortname,
'type': 'sub-project'})
mounts.sort(key=itemgetter('ordinal'))
if order == 'first':
ordinal = 0
elif order == 'last':
ordinal = len(mounts)
elif order == 'alpha_tool':
tool = tool.lower()
for i, mount in enumerate(mounts):
if mount['type'] == tool and mount['label'] > mount_label:
ordinal = i
break
else:
ordinal = len(mounts)
mounts.insert(ordinal, {'ordinal': ordinal, 'type': 'new'})
for i, mount in enumerate(mounts):
if mount['type'] == 'new':
pass
elif mount['type'] == 'sub-project':
subs[mount['mount']].ordinal = i
else:
c.project.app_config(mount['mount']).options.ordinal = i
data = {
'install': 'install',
'ep_name': tool,
'ordinal': ordinal,
'mount_point': mount_point,
'mount_label': mount_label
}
params = {
'new': data
}
if kw:
params.update(**kw)
try:
controller._update_mounts(**params)
except forge_exc.ForgeError as e:
return {
'success': False,
'info': str(e),
}
return {
'success': True,
'info': 'Tool %s with mount_point %s and mount_label %s was created.'
% (tool, mount_point, mount_label)
}
@expose()
def _lookup(self, *args):
if len(args) == 0:
raise exc.HTTPNotFound(args)
name, remainder = args[0], args[1:]
app = c.project.app_instance(name)
if app is None or app.admin_api_root is None:
raise exc.HTTPNotFound(name)
return app.admin_api_root, remainder
class PermissionsController(BaseController):
def _check_security(self):
# Do not allow access to 'permissions' page for root projects.
# Users should use 'groups' instead. This is to prevent creating 'private' projects
# - subprojects are still allowed.
# - tools pages are also still allowed, but are in a different controller
if c.project.is_root:
redirect('../groups')
require_access(c.project, 'admin')
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_permissions.html')
def index(self, **kw):
c.card = W.permission_card
return dict(permissions=self._index_permissions())
@without_trailing_slash
@expose()
@h.vardec
@require_post()
def update(self, card=None, **kw):
permissions = self._index_permissions()
old_permissions = dict(permissions)
for args in card:
perm = args['id']
new_group_ids = args.get('new', [])
group_ids = args.get('value', [])
if isinstance(new_group_ids, six.string_types):
new_group_ids = [new_group_ids]
if isinstance(group_ids, six.string_types):
group_ids = [group_ids]
# make sure the admin group has the admin permission
if perm == 'admin':
if c.project.is_root:
pid = c.project._id
else:
pid = c.project.parent_id
admin_group_id = str(
M.ProjectRole.query.get(project_id=pid, name='Admin')._id)
if admin_group_id not in group_ids + new_group_ids:
flash(
'You cannot remove the admin group from the admin permission.', 'warning')
group_ids.append(admin_group_id)
permissions[perm] = []
role_ids = list(map(ObjectId, group_ids + new_group_ids))
permissions[perm] = role_ids
c.project.acl = []
for perm, role_ids in six.iteritems(permissions):
role_names = lambda ids: ','.join(sorted(
pr.name for pr in M.ProjectRole.query.find(dict(_id={'$in': ids}))))
old_role_ids = old_permissions.get(perm, [])
if old_role_ids != role_ids:
M.AuditLog.log('updated "%s" permissions: "%s" => "%s"',
perm, role_names(old_role_ids), role_names(role_ids))
c.project.acl += [M.ACE.allow(rid, perm) for rid in role_ids]
g.post_event('project_updated')
redirect('.')
def _index_permissions(self):
permissions = dict(
(p, []) for p in c.project.permissions)
for ace in c.project.acl:
if ace.access == M.ACE.ALLOW:
permissions[ace.permission].append(ace.role_id)
return permissions
class GroupsController(BaseController):
def _check_security(self):
require_access(c.project, 'admin')
def _index_permissions(self):
permissions = dict(
(p, []) for p in c.project.permissions)
for ace in c.project.acl:
if ace.access == M.ACE.ALLOW:
permissions[ace.permission].append(ace.role_id)
return permissions
def _map_group_permissions(self):
roles = c.project.named_roles
permissions = self._index_permissions()
permissions_by_role = dict()
auth_role = M.ProjectRole.authenticated()
anon_role = M.ProjectRole.anonymous()
for role in roles + [auth_role, anon_role]:
permissions_by_role[str(role._id)] = []
for perm in permissions:
perm_info = dict(has="no", text="Does not have permission %s" %
perm, name=perm)
role_ids = permissions[perm]
if role._id in role_ids:
perm_info['text'] = "Has permission %s" % perm
perm_info['has'] = "yes"
else:
for r in role.child_roles():
if r._id in role_ids:
perm_info['text'] = "Inherited permission %s from %s" % (
perm, r.name)
perm_info['has'] = "inherit"
break
if perm_info['has'] == "no":
if anon_role._id in role_ids:
perm_info[
'text'] = "Inherited permission %s from Anonymous" % perm
perm_info['has'] = "inherit"
elif auth_role._id in role_ids and role != anon_role:
perm_info[
'text'] = "Inherited permission %s from Authenticated" % perm
perm_info['has'] = "inherit"
permissions_by_role[str(role._id)].append(perm_info)
return permissions_by_role
@without_trailing_slash
@expose()
@require_post()
@h.vardec
def delete_group(self, group_name, **kw):
role = M.ProjectRole.by_name(group_name)
if not role:
flash('Group "%s" does not exist.' % group_name, 'error')
else:
role.delete()
M.AuditLog.log('delete group %s', group_name)
flash('Group "%s" deleted successfully.' % group_name)
g.post_event('project_updated')
redirect('.')
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_groups.html')
def index(self, **kw):
c.card = W.group_card
permissions_by_role = self._map_group_permissions()
auth_role = M.ProjectRole.authenticated()
anon_role = M.ProjectRole.anonymous()
roles = c.project.named_roles
roles.append(None)
return dict(roles=roles, permissions_by_role=permissions_by_role,
auth_role=auth_role, anon_role=anon_role)
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def change_perm(self, role_id, permission, allow="true", **kw):
if allow == "true":
M.AuditLog.log('granted permission %s to group %s', permission,
M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
c.project.acl.append(M.ACE.allow(ObjectId(role_id), permission))
else:
admin_group_id = str(M.ProjectRole.by_name('Admin')._id)
if admin_group_id == role_id and permission == 'admin':
return dict(error='You cannot remove the admin permission from the admin group.')
M.AuditLog.log('revoked permission %s from group %s', permission,
M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
c.project.acl.remove(M.ACE.allow(ObjectId(role_id), permission))
g.post_event('project_updated')
return self._map_group_permissions()
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def add_user(self, role_id, username, **kw):
if not username or username == '*anonymous':
return dict(error='You must choose a user to add.')
group = M.ProjectRole.query.get(_id=ObjectId(role_id))
user = M.User.query.get(username=username.strip(), pending=False)
if not group:
return dict(error='Could not find group with id %s' % role_id)
if not user:
return dict(error='User %s not found' % username)
user_role = M.ProjectRole.by_user(user, upsert=True)
if group._id in user_role.roles:
return dict(error='%s (%s) is already in the group %s.' % (user.display_name, username, group.name))
M.AuditLog.log('add user %s to %s', username, group.name)
user_role.roles.append(group._id)
if group.name == 'Admin':
for ac in c.project.app_configs:
c.project.app_instance(ac).subscribe(user)
g.post_event('project_updated')
return dict(username=username, displayname=user.display_name)
@without_trailing_slash
@expose('json:')
@require_post()
@h.vardec
def remove_user(self, role_id, username, **kw):
group = M.ProjectRole.query.get(_id=ObjectId(role_id))
user = M.User.by_username(username.strip())
if group.name == 'Admin' and len(group.users_with_role()) == 1:
return dict(error='You must have at least one user with the Admin role.')
if not group:
return dict(error='Could not find group with id %s' % role_id)
if not user:
return dict(error='User %s not found' % username)
user_role = M.ProjectRole.by_user(user)
if not user_role or group._id not in user_role.roles:
return dict(error='%s (%s) is not in the group %s.' % (user.display_name, username, group.name))
M.AuditLog.log('remove user %s from %s', username, group.name)
user_role.roles.remove(group._id)
if len(user_role.roles) == 0:
# user has no roles in this project any more, so don't leave a useless doc around
user_role.delete()
g.post_event('project_updated')
return dict()
@without_trailing_slash
@expose('jinja:allura.ext.admin:templates/project_group.html')
def new(self):
c.form = W.new_group_settings
return dict(
group=None,
action="create")
@expose()
@require_post()
@validate(W.new_group_settings)
@h.vardec
def create(self, name=None, **kw):
if M.ProjectRole.by_name(name):
flash('%s already exists' % name, 'error')
else:
M.ProjectRole(project_id=c.project._id, name=name)
M.AuditLog.log('create group %s', name)
g.post_event('project_updated')
redirect('.')
class AuditController(BaseController):
@with_trailing_slash
@expose('jinja:allura.ext.admin:templates/audit.html')
def index(self, limit=25, page=0, **kwargs):
limit = int(limit)
page = int(page)
count = M.AuditLog.query.find(dict(project_id=c.project._id)).count()
q = M.AuditLog.query.find(dict(project_id=c.project._id))
q = q.sort('timestamp', -1)
q = q.skip(page * limit)
if count > limit:
q = q.limit(limit)
else:
limit = count
c.widget = W.audit
return dict(
entries=q.all(),
limit=limit,
page=page,
count=count)
class AdminAppAdminController(DefaultAdminController):
'''Administer the admin app'''
pass
|
en
| 0.842948
|
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. This is the admin app. It is pretty much required for a functioning allura project. Whether the user can view the app. # prevent from saving temporary config to db Apps should provide their entries to be added to the main nav :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>` # only show transitional Tools page to older projects that may be used to it # no point is showing it to new projects # need this because features field expects data in specific format # just in case the event handling is super fast # just in case the event handling is super fast # If both filename(without ext.) equals and exiting file ext. is png and given file ext is bmp, there will be two similar png files. # if filename already exists append a random number Sort project screenshots. Called via ajax when screenshots are reordered via drag/drop on the Screenshots admin page. ``kw`` is a mapping of (screenshot._id, sort_order) pairs. Returns the new App or Subproject, if one was installed. Returns None otherwise. # pass only options which app expects # subprojects have a method instead of property # Redirect to root when deleting currect module Exposes RESTful API for project admin actions. # Handle sub project List of installable tools and their default options. # subprojects only allowed on top-level projects (no nesting) Initiate a bulk export of the project data. Must be given a list of tool mount points to include in the export. The list can either be comma-separated or a repeated param, e.g., `export?tools=tickets&tools=discussion`. If the tools are not provided, an invalid mount point is listed, or there is some other problems with the arguments, a `400 Bad Request` response will be returned. If an export is already currently running for this project, a `503 Unavailable` response will be returned. Otherwise, a JSON object of the form `{"status": "in progress", "filename": FILENAME}` will be returned, where `FILENAME` is the filename of the export artifact relative to the users shell account directory. # filename (potentially) includes a timestamp, so we have # to pre-generate to be able to return it to the user Returns the admin options for a given mount_point :type mount_point: str|allura.model.project.AppConfig Check the status of a bulk export. Returns an object containing only one key, `status`, whose value is either `'busy'` or `'ready'`. API for installing tools in current project. Requires a valid tool, mount point and mount label names. (All arguments are required.) Usage example:: POST to: /rest/p/testproject/admin/install_tool/ with params: { 'tool': 'tickets', 'mount_point': 'mountpoint', 'mount_label': 'mountlabel', 'order': 'first|last|alpha_tool' } Example output (in successful case):: { "info": "Tool tickets with mount_point mountpoint and mount_label mountlabel was created.", "success": true } # Do not allow access to 'permissions' page for root projects. # Users should use 'groups' instead. This is to prevent creating 'private' projects # - subprojects are still allowed. # - tools pages are also still allowed, but are in a different controller # make sure the admin group has the admin permission # user has no roles in this project any more, so don't leave a useless doc around Administer the admin app
| 1.21775
| 1
|
yaaz/src/evaluator.py
|
swasun/Yet-Another-AlphaZero
| 2
|
6626894
|
#####################################################################################
# MIT License #
# #
# Copyright (C) 2019 <NAME> #
# #
# This file is part of Yet-Another-AlphaZero. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#####################################################################################
from agent import Agent
from chess_env import ChessEnv
from environment_simulator import EnvironmentSimulator
from error_handling.console_logger import ConsoleLogger
from chess_model import ChessModel
from dataset import Dataset
import os
class Evaluator(object):
def __init__(self, dataset, new_model, environments_number=10):
self._dataset = dataset
self._new_model = new_model
self._environments_number = environments_number
def start(self):
agent1_victories = 0
agent2_victories = 0
draws = 0
# Load the best model or record the first one
current_best_model = self._dataset.load_best_model()
if current_best_model is None:
self._dataset.record_model(self._new_model)
return
for epoch in range(self._environments_number):
ConsoleLogger.status('[EVALUATOR] epoch #{}'.format(epoch))
env = ChessEnv()
agent1 = Agent(env, current_best_model)
agent2 = Agent(env, self._new_model)
environment_simulator = EnvironmentSimulator(env, agent1, agent2)
result = environment_simulator.run()
if result == "1-0":
agent1_victories += 1
elif result == "0-1":
agent2_victories += 1
else:
draws += 1
ConsoleLogger.success('[EVALUATOR] agent1 victories: {} agent2 victories: {} draws: {}'.format(
agent1_victories, agent2_victories, draws
))
if agent2_victories > agent1_victories:
ConsoleLogger.success("[EVALUATOR] agent2's model is better - erase the previous one")
self._dataset.erase_best_model(self._new_model)
else:
ConsoleLogger.status('[EVALUATOR] agent1 it still the best model')
if __name__ == "__main__":
dataset = Dataset(results_path='..' + os.sep + '..' + os.sep + 'results' + os.sep + 'chess')
new_model = ChessModel()
evaluator = Evaluator(dataset, new_model, environments_number=3)
evaluator.start()
|
#####################################################################################
# MIT License #
# #
# Copyright (C) 2019 <NAME> #
# #
# This file is part of Yet-Another-AlphaZero. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#####################################################################################
from agent import Agent
from chess_env import ChessEnv
from environment_simulator import EnvironmentSimulator
from error_handling.console_logger import ConsoleLogger
from chess_model import ChessModel
from dataset import Dataset
import os
class Evaluator(object):
def __init__(self, dataset, new_model, environments_number=10):
self._dataset = dataset
self._new_model = new_model
self._environments_number = environments_number
def start(self):
agent1_victories = 0
agent2_victories = 0
draws = 0
# Load the best model or record the first one
current_best_model = self._dataset.load_best_model()
if current_best_model is None:
self._dataset.record_model(self._new_model)
return
for epoch in range(self._environments_number):
ConsoleLogger.status('[EVALUATOR] epoch #{}'.format(epoch))
env = ChessEnv()
agent1 = Agent(env, current_best_model)
agent2 = Agent(env, self._new_model)
environment_simulator = EnvironmentSimulator(env, agent1, agent2)
result = environment_simulator.run()
if result == "1-0":
agent1_victories += 1
elif result == "0-1":
agent2_victories += 1
else:
draws += 1
ConsoleLogger.success('[EVALUATOR] agent1 victories: {} agent2 victories: {} draws: {}'.format(
agent1_victories, agent2_victories, draws
))
if agent2_victories > agent1_victories:
ConsoleLogger.success("[EVALUATOR] agent2's model is better - erase the previous one")
self._dataset.erase_best_model(self._new_model)
else:
ConsoleLogger.status('[EVALUATOR] agent1 it still the best model')
if __name__ == "__main__":
dataset = Dataset(results_path='..' + os.sep + '..' + os.sep + 'results' + os.sep + 'chess')
new_model = ChessModel()
evaluator = Evaluator(dataset, new_model, environments_number=3)
evaluator.start()
|
en
| 0.626177
|
##################################################################################### # MIT License # # # # Copyright (C) 2019 <NAME> # # # # This file is part of Yet-Another-AlphaZero. # # # # Permission is hereby granted, free of charge, to any person obtaining a copy # # of this software and associated documentation files (the "Software"), to deal # # in the Software without restriction, including without limitation the rights # # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # # copies of the Software, and to permit persons to whom the Software is # # furnished to do so, subject to the following conditions: # # # # The above copyright notice and this permission notice shall be included in all # # copies or substantial portions of the Software. # # # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # # SOFTWARE. # ##################################################################################### # Load the best model or record the first one #{}'.format(epoch))
| 1.238247
| 1
|
Python/maximum-subarray.py
|
ZhiliangGong/LeetCode
| 5
|
6626895
|
# Time: O(n)
# Space: O(1)
#
# Find the contiguous subarray within an array (containing at least one number) which has the largest sum.
#
# For example, given the array [-2,1,-3,4,-1,2,1,-5,4],
# the contiguous subarray [4,-1,2,1] has the largest sum = 6.
#
# click to show more practice.
#
# More practice:
# If you have figured out the O(n) solution, try coding another solution using the divide and conquer approach, which is more subtle.
#
class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if max(nums) < 0:
return max(nums)
global_max, local_max = 0, 0
for x in nums:
local_max = max(0, local_max + x)
global_max = max(global_max, local_max)
return global_max
if __name__ == "__main__":
print Solution().maxSubArray([-2,1,-3,4,-1,2,1,-5,4])
|
# Time: O(n)
# Space: O(1)
#
# Find the contiguous subarray within an array (containing at least one number) which has the largest sum.
#
# For example, given the array [-2,1,-3,4,-1,2,1,-5,4],
# the contiguous subarray [4,-1,2,1] has the largest sum = 6.
#
# click to show more practice.
#
# More practice:
# If you have figured out the O(n) solution, try coding another solution using the divide and conquer approach, which is more subtle.
#
class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if max(nums) < 0:
return max(nums)
global_max, local_max = 0, 0
for x in nums:
local_max = max(0, local_max + x)
global_max = max(global_max, local_max)
return global_max
if __name__ == "__main__":
print Solution().maxSubArray([-2,1,-3,4,-1,2,1,-5,4])
|
en
| 0.792384
|
# Time: O(n) # Space: O(1) # # Find the contiguous subarray within an array (containing at least one number) which has the largest sum. # # For example, given the array [-2,1,-3,4,-1,2,1,-5,4], # the contiguous subarray [4,-1,2,1] has the largest sum = 6. # # click to show more practice. # # More practice: # If you have figured out the O(n) solution, try coding another solution using the divide and conquer approach, which is more subtle. # :type nums: List[int] :rtype: int
| 4.044676
| 4
|
versebot/verse.py
|
Team-VerseBot/versebot
| 13
|
6626896
|
<gh_stars>10-100
"""
VerseBot for Reddit
By <NAME>
Continued by Team VerseBot
verse.py
Copyright (c) 2015 <NAME> (MIT License)
"""
import books
import database
import webparser
class Verse:
""" Class that holds the properties and methods of a Verse object. """
def __init__(self, book, chapter, translation, user, subreddit, verse):
""" Initializes a Verse object with book, chapter, verse (if
exists), and translation (if exists). """
self.book = book
self.subreddit = subreddit.lower()
book_num = books.get_book_number(self.book)
if book_num <= 39:
self.bible_section = "Old Testament"
elif book_num <= 66:
self.bible_section = "New Testament"
else:
self.bible_section = "Deuterocanon"
self.chapter = int(chapter.replace(" ", ""))
if verse != "":
self.verse = verse.replace(" ", "")
if "-" in self.verse:
start_verse, end_verse = self.verse.split("-")
if end_verse != "" and int(start_verse) > int(end_verse):
self.verse = None
elif end_verse == "" or int(start_verse) == int(end_verse):
self.verse = start_verse
end_verse = int(start_verse)
self.start_verse = int(start_verse)
self.end_verse = int(end_verse)
else:
self.start_verse = int(self.verse)
self.end_verse = self.start_verse
else:
self.verse = None
self.start_verse = 0
self.end_verse = 0
if translation != "":
trans = translation.upper().replace(" ", "")
if database.is_valid_trans(trans, self.bible_section):
self.translation = trans
else:
self.determine_translation(user, subreddit)
else:
self.determine_translation(user, subreddit)
self.translation_title = ""
self.contents = ""
self.permalink = ""
def determine_translation(self, user, subreddit):
""" Determines which translation should be used when either the user
does not provide a translation, or when the user provides an invalid
translation.
:param subreddit: The subreddit where the quotation is located
:param user: The user that called VerseBot for a quotation
"""
user_default = database.get_user_trans(user, self.bible_section)
if user_default:
self.translation = user_default
else:
subreddit_default = database.get_subreddit_trans(
subreddit, self.bible_section)
if subreddit_default:
self.translation = subreddit_default
else:
if self.bible_section == "Old Testament":
self.translation = "ESV"
elif self.bible_section == "New Testament":
self.translation = "ESV"
else:
self.translation = "NRSV"
def get_contents(self):
""" Retrieves the contents of a Verse object. """
self.contents, self.translation_title, self.permalink = \
webparser.get_web_contents(self)
|
"""
VerseBot for Reddit
By <NAME>
Continued by Team VerseBot
verse.py
Copyright (c) 2015 <NAME> (MIT License)
"""
import books
import database
import webparser
class Verse:
""" Class that holds the properties and methods of a Verse object. """
def __init__(self, book, chapter, translation, user, subreddit, verse):
""" Initializes a Verse object with book, chapter, verse (if
exists), and translation (if exists). """
self.book = book
self.subreddit = subreddit.lower()
book_num = books.get_book_number(self.book)
if book_num <= 39:
self.bible_section = "Old Testament"
elif book_num <= 66:
self.bible_section = "New Testament"
else:
self.bible_section = "Deuterocanon"
self.chapter = int(chapter.replace(" ", ""))
if verse != "":
self.verse = verse.replace(" ", "")
if "-" in self.verse:
start_verse, end_verse = self.verse.split("-")
if end_verse != "" and int(start_verse) > int(end_verse):
self.verse = None
elif end_verse == "" or int(start_verse) == int(end_verse):
self.verse = start_verse
end_verse = int(start_verse)
self.start_verse = int(start_verse)
self.end_verse = int(end_verse)
else:
self.start_verse = int(self.verse)
self.end_verse = self.start_verse
else:
self.verse = None
self.start_verse = 0
self.end_verse = 0
if translation != "":
trans = translation.upper().replace(" ", "")
if database.is_valid_trans(trans, self.bible_section):
self.translation = trans
else:
self.determine_translation(user, subreddit)
else:
self.determine_translation(user, subreddit)
self.translation_title = ""
self.contents = ""
self.permalink = ""
def determine_translation(self, user, subreddit):
""" Determines which translation should be used when either the user
does not provide a translation, or when the user provides an invalid
translation.
:param subreddit: The subreddit where the quotation is located
:param user: The user that called VerseBot for a quotation
"""
user_default = database.get_user_trans(user, self.bible_section)
if user_default:
self.translation = user_default
else:
subreddit_default = database.get_subreddit_trans(
subreddit, self.bible_section)
if subreddit_default:
self.translation = subreddit_default
else:
if self.bible_section == "Old Testament":
self.translation = "ESV"
elif self.bible_section == "New Testament":
self.translation = "ESV"
else:
self.translation = "NRSV"
def get_contents(self):
""" Retrieves the contents of a Verse object. """
self.contents, self.translation_title, self.permalink = \
webparser.get_web_contents(self)
|
en
| 0.886486
|
VerseBot for Reddit By <NAME> Continued by Team VerseBot verse.py Copyright (c) 2015 <NAME> (MIT License) Class that holds the properties and methods of a Verse object. Initializes a Verse object with book, chapter, verse (if exists), and translation (if exists). Determines which translation should be used when either the user does not provide a translation, or when the user provides an invalid translation. :param subreddit: The subreddit where the quotation is located :param user: The user that called VerseBot for a quotation Retrieves the contents of a Verse object.
| 3.528168
| 4
|
Iteration examples.py
|
KuanZhasulan/Python-Games
| 0
|
6626897
|
<filename>Iteration examples.py
# Iterating over lists
def count_odd(numbers):
count = 0
for num in numbers:
if num % 2 == 1:
count += 1
return count
def check_odd(numbers):
for num in numbers:
if num % 2 == 1:
return True
return False
def remove_odd(numbers):
for num in numbers:
if num % 2 == 1:
numbers.remove(num)
def remove_odd2(numbers):
remove = []
for num in numbers:
if num % 2 == 1:
remove.append(numbers.index(num))
for idx in remove:
numbers.pop(idx)
def remove_odd3(numbers):
remove = []
for num in numbers:
if num % 2 == 1:
remove.append(num)
for num in remove:
numbers.remove(num)
def remove_odd4(numbers):
newnums = []
for num in numbers:
if num % 2 == 0:
newnums.append(num)
return newnums
def remove_last_odd(numbers):
has_odd = False
last_odd = 0
for num in numbers:
if num % 2 == 1:
has_odd = True
last_odd = num
if has_odd:
numbers.remove(last_odd)
def run():
numbers = [1, 7, 2, 34, 8, 7, 2, 5, 14, 22, 93, 48, 76, 15, 7]
print numbers
remove_last_odd(numbers)
print numbers
nums = [1, 2, 3, 4 ,5 ,7 , 9, 10, 12]
new_nums = remove_odd4(nums)
print nums
print new_nums
run()
|
<filename>Iteration examples.py
# Iterating over lists
def count_odd(numbers):
count = 0
for num in numbers:
if num % 2 == 1:
count += 1
return count
def check_odd(numbers):
for num in numbers:
if num % 2 == 1:
return True
return False
def remove_odd(numbers):
for num in numbers:
if num % 2 == 1:
numbers.remove(num)
def remove_odd2(numbers):
remove = []
for num in numbers:
if num % 2 == 1:
remove.append(numbers.index(num))
for idx in remove:
numbers.pop(idx)
def remove_odd3(numbers):
remove = []
for num in numbers:
if num % 2 == 1:
remove.append(num)
for num in remove:
numbers.remove(num)
def remove_odd4(numbers):
newnums = []
for num in numbers:
if num % 2 == 0:
newnums.append(num)
return newnums
def remove_last_odd(numbers):
has_odd = False
last_odd = 0
for num in numbers:
if num % 2 == 1:
has_odd = True
last_odd = num
if has_odd:
numbers.remove(last_odd)
def run():
numbers = [1, 7, 2, 34, 8, 7, 2, 5, 14, 22, 93, 48, 76, 15, 7]
print numbers
remove_last_odd(numbers)
print numbers
nums = [1, 2, 3, 4 ,5 ,7 , 9, 10, 12]
new_nums = remove_odd4(nums)
print nums
print new_nums
run()
|
en
| 0.704717
|
# Iterating over lists
| 4.176619
| 4
|
ale/transformation.py
|
kaitlyndlee/ale
| 0
|
6626898
|
import numpy as np
from numpy.polynomial.polynomial import polyval, polyder
import networkx as nx
from networkx.algorithms.shortest_paths.generic import shortest_path
import spiceypy as spice
from ale.rotation import ConstantRotation, TimeDependentRotation
def create_rotations(rotation_table):
"""
Convert an ISIS rotation table into rotation objects.
Parameters
----------
rotation_table : dict
The rotation ISIS table as a dictionary
Returns
-------
: list
A list of time dependent or constant rotation objects from the table. This
list will always have either 1 or 2 elements. The first rotation will be
time dependent and the second rotation will be constant. The rotations will
be ordered such that the reference frame the first rotation rotates to is
the reference frame the second rotation rotates from.
"""
rotations = []
root_frame = rotation_table['TimeDependentFrames'][-1]
last_time_dep_frame = rotation_table['TimeDependentFrames'][0]
# Case 1: It's a table of quaternions and times
if 'J2000Q0' in rotation_table:
# SPICE quaternions are (W, X, Y, Z) and ALE uses (X, Y, Z, W).
quats = np.array([rotation_table['J2000Q1'],
rotation_table['J2000Q2'],
rotation_table['J2000Q3'],
rotation_table['J2000Q0']]).T
if 'AV1' in rotation_table:
av = np.array([rotation_table['AV1'],
rotation_table['AV2'],
rotation_table['AV3']]).T
else:
av = None
time_dep_rot = TimeDependentRotation(quats,
rotation_table['ET'],
root_frame,
last_time_dep_frame,
av=av)
rotations.append(time_dep_rot)
# Case 2: It's a table of Euler angle coefficients
elif 'J2000Ang1' in rotation_table:
ephemeris_times = np.linspace(rotation_table['CkTableStartTime'],
rotation_table['CkTableEndTime'],
rotation_table['CkTableOriginalSize'])
base_time = rotation_table['J2000Ang1'][-1]
time_scale = rotation_table['J2000Ang2'][-1]
scaled_times = (ephemeris_times - base_time) / time_scale
coeffs = np.array([rotation_table['J2000Ang1'][:-1],
rotation_table['J2000Ang2'][:-1],
rotation_table['J2000Ang3'][:-1]]).T
angles = polyval(scaled_times, coeffs).T
# ISIS is hard coded to ZXZ (313) Euler angle axis order.
# SPICE also interprets Euler angle rotations as negative rotations,
# so negate them before passing to scipy.
time_dep_rot = TimeDependentRotation.from_euler('zxz',
-angles,
ephemeris_times,
root_frame,
last_time_dep_frame)
rotations.append(time_dep_rot)
if 'ConstantRotation' in rotation_table:
last_constant_frame = rotation_table['ConstantFrames'][0]
rot_mat = np.reshape(np.array(rotation_table['ConstantRotation']), (3, 3))
constant_rot = ConstantRotation.from_matrix(rot_mat,
last_time_dep_frame,
last_constant_frame)
rotations.append(constant_rot)
return rotations
class FrameChain(nx.DiGraph):
"""
This class is responsible for handling rotations between reference frames.
Every node is the reference frame and every edge represents the rotation to
between those two nodes. Each edge is directional, where the source --> destination
is one rotation and destination --> source is the inverse of that rotation.
Attributes
__________
frame_changes : list
A list of tuples that represent the rotation from one frame
to another. These tuples should all be NAIF codes for
reference frames
ephemeris_time : list
A of ephemeris times that need to be rotated for each set
of frame rotations in the frame chain
"""
@classmethod
def from_spice(cls, *args, sensor_frame, target_frame, center_ephemeris_time, ephemeris_times=[], **kwargs):
frame_chain = cls()
times = np.array(ephemeris_times)
sensor_time_dependent_frames, sensor_constant_frames = cls.frame_trace(sensor_frame, center_ephemeris_time)
target_time_dependent_frames, target_constant_frames = cls.frame_trace(target_frame, center_ephemeris_time)
time_dependent_frames = list(zip(sensor_time_dependent_frames[:-1], sensor_time_dependent_frames[1:]))
constant_frames = list(zip(sensor_constant_frames[:-1], sensor_constant_frames[1:]))
target_time_dependent_frames = list(zip(target_time_dependent_frames[:-1], target_time_dependent_frames[1:]))
target_constant_frames = list(zip(target_constant_frames[:-1], target_constant_frames[1:]))
time_dependent_frames.extend(target_time_dependent_frames)
constant_frames.extend(target_constant_frames)
for s, d in time_dependent_frames:
quats = np.zeros((len(times), 4))
avs = np.zeros((len(times), 3))
for j, time in enumerate(times):
state_matrix = spice.sxform(spice.frmnam(s), spice.frmnam(d), time)
rotation_matrix, avs[j] = spice.xf2rav(state_matrix)
quat_from_rotation = spice.m2q(rotation_matrix)
quats[j,:3] = quat_from_rotation[1:]
quats[j,3] = quat_from_rotation[0]
rotation = TimeDependentRotation(quats, times, s, d, av=avs)
frame_chain.add_edge(rotation=rotation)
for s, d in constant_frames:
quats = np.zeros(4)
rotation_matrix = spice.pxform(spice.frmnam(s), spice.frmnam(d), times[0])
quat_from_rotation = spice.m2q(rotation_matrix)
quats[:3] = quat_from_rotation[1:]
quats[3] = quat_from_rotation[0]
rotation = ConstantRotation(quats, s, d)
frame_chain.add_edge(rotation=rotation)
return frame_chain
@staticmethod
def frame_trace(reference_frame, ephemeris_time):
frame_codes = [reference_frame]
_, frame_type, _ = spice.frinfo(frame_codes[-1])
frame_types = [frame_type]
while(frame_codes[-1] != 1):
try:
center, frame_type, frame_type_id = spice.frinfo(frame_codes[-1])
except Exception as e:
print(e)
break
if frame_type is 1 or frame_type is 2:
frame_code = 1
elif frame_type is 3:
try:
matrix, frame_code = spice.ckfrot(frame_type_id, ephemeris_time)
except:
raise Exception(f"The ck rotation from frame {frame_codes[-1]} can not \
be found due to no pointing available at requested time \
or a problem with the frame")
elif frame_type is 4:
try:
matrix, frame_code = spice.tkfram(frame_type_id)
except:
raise Exception(f"The tk rotation from frame {frame_codes[-1]} can not \
be found")
elif frame_type is 5:
matrix, frame_code = spice.zzdynrot(frame_type_id, center, ephemeris_time)
else:
raise Exception(f"The frame {frame_codes[-1]} has a type {frame_type_id} \
not supported by your version of Naif Spicelib. \
You need to update.")
frame_codes.append(frame_code)
frame_types.append(frame_type)
constant_frames = []
while frame_codes:
if frame_types[0] == 4:
constant_frames.append(frame_codes.pop(0))
frame_types.pop(0)
else:
break
time_dependent_frames = []
if len(constant_frames) != 0:
time_dependent_frames.append(constant_frames[-1])
while frame_codes:
time_dependent_frames.append(frame_codes.pop(0))
return time_dependent_frames, constant_frames
@classmethod
def from_isis_tables(cls, *args, inst_pointing={}, body_orientation={}, **kwargs):
frame_chain = cls()
for rotation in create_rotations(inst_pointing):
frame_chain.add_edge(rotation=rotation)
for rotation in create_rotations(body_orientation):
frame_chain.add_edge(rotation=rotation)
return frame_chain
def add_edge(self, rotation, **kwargs):
super(FrameChain, self).add_edge(rotation.source, rotation.dest, rotation=rotation, **kwargs)
rotation = rotation.inverse()
super(FrameChain, self).add_edge(rotation.source, rotation.dest, rotation=rotation, **kwargs)
def compute_rotation(self, source, destination):
"""
Returns the rotation to another node. Returns the identity rotation
if the other node is this node.
Parameters
----------
source : int
Integer id for the source node to rotate from
destination : int
Integer id for the node to rotate into from the source node
Returns
-------
rotation : Object
Returns either a TimeDependentRotation object or ConstantRotation
object depending on the number of rotations being multiplied
together
"""
if source == destination:
return ConstantRotation(np.array([0, 0, 0, 1]), source, destination)
path = shortest_path(self, source, destination)
rotations = [self.edges[path[i], path[i+1]]['rotation'] for i in range(len(path) - 1)]
rotation = rotations[0]
for next_rotation in rotations[1:]:
rotation = next_rotation * rotation
return rotation
def last_time_dependent_frame_between(self, source, destination):
"""
Find the last time dependent frame between the source frame and the
destination frame.
Parameters
----------
source : int
Integer id of the source node
destination : int
Integer of the destination node
Returns
-------
: tuple, None
Returns the source node id, destination node id, and edge dictionary
which contains the rotation from source to destination.
"""
path = shortest_path(self, source, destination)
# Reverse the path to search bottom up to find the last time dependent
# frame between the source and destination
path.reverse()
for i in range(len(path) - 1):
edge = self.edges[path[i+1], path[i]]
if isinstance(edge['rotation'], TimeDependentRotation):
return path[i+1], path[i], edge
return None
|
import numpy as np
from numpy.polynomial.polynomial import polyval, polyder
import networkx as nx
from networkx.algorithms.shortest_paths.generic import shortest_path
import spiceypy as spice
from ale.rotation import ConstantRotation, TimeDependentRotation
def create_rotations(rotation_table):
"""
Convert an ISIS rotation table into rotation objects.
Parameters
----------
rotation_table : dict
The rotation ISIS table as a dictionary
Returns
-------
: list
A list of time dependent or constant rotation objects from the table. This
list will always have either 1 or 2 elements. The first rotation will be
time dependent and the second rotation will be constant. The rotations will
be ordered such that the reference frame the first rotation rotates to is
the reference frame the second rotation rotates from.
"""
rotations = []
root_frame = rotation_table['TimeDependentFrames'][-1]
last_time_dep_frame = rotation_table['TimeDependentFrames'][0]
# Case 1: It's a table of quaternions and times
if 'J2000Q0' in rotation_table:
# SPICE quaternions are (W, X, Y, Z) and ALE uses (X, Y, Z, W).
quats = np.array([rotation_table['J2000Q1'],
rotation_table['J2000Q2'],
rotation_table['J2000Q3'],
rotation_table['J2000Q0']]).T
if 'AV1' in rotation_table:
av = np.array([rotation_table['AV1'],
rotation_table['AV2'],
rotation_table['AV3']]).T
else:
av = None
time_dep_rot = TimeDependentRotation(quats,
rotation_table['ET'],
root_frame,
last_time_dep_frame,
av=av)
rotations.append(time_dep_rot)
# Case 2: It's a table of Euler angle coefficients
elif 'J2000Ang1' in rotation_table:
ephemeris_times = np.linspace(rotation_table['CkTableStartTime'],
rotation_table['CkTableEndTime'],
rotation_table['CkTableOriginalSize'])
base_time = rotation_table['J2000Ang1'][-1]
time_scale = rotation_table['J2000Ang2'][-1]
scaled_times = (ephemeris_times - base_time) / time_scale
coeffs = np.array([rotation_table['J2000Ang1'][:-1],
rotation_table['J2000Ang2'][:-1],
rotation_table['J2000Ang3'][:-1]]).T
angles = polyval(scaled_times, coeffs).T
# ISIS is hard coded to ZXZ (313) Euler angle axis order.
# SPICE also interprets Euler angle rotations as negative rotations,
# so negate them before passing to scipy.
time_dep_rot = TimeDependentRotation.from_euler('zxz',
-angles,
ephemeris_times,
root_frame,
last_time_dep_frame)
rotations.append(time_dep_rot)
if 'ConstantRotation' in rotation_table:
last_constant_frame = rotation_table['ConstantFrames'][0]
rot_mat = np.reshape(np.array(rotation_table['ConstantRotation']), (3, 3))
constant_rot = ConstantRotation.from_matrix(rot_mat,
last_time_dep_frame,
last_constant_frame)
rotations.append(constant_rot)
return rotations
class FrameChain(nx.DiGraph):
"""
This class is responsible for handling rotations between reference frames.
Every node is the reference frame and every edge represents the rotation to
between those two nodes. Each edge is directional, where the source --> destination
is one rotation and destination --> source is the inverse of that rotation.
Attributes
__________
frame_changes : list
A list of tuples that represent the rotation from one frame
to another. These tuples should all be NAIF codes for
reference frames
ephemeris_time : list
A of ephemeris times that need to be rotated for each set
of frame rotations in the frame chain
"""
@classmethod
def from_spice(cls, *args, sensor_frame, target_frame, center_ephemeris_time, ephemeris_times=[], **kwargs):
frame_chain = cls()
times = np.array(ephemeris_times)
sensor_time_dependent_frames, sensor_constant_frames = cls.frame_trace(sensor_frame, center_ephemeris_time)
target_time_dependent_frames, target_constant_frames = cls.frame_trace(target_frame, center_ephemeris_time)
time_dependent_frames = list(zip(sensor_time_dependent_frames[:-1], sensor_time_dependent_frames[1:]))
constant_frames = list(zip(sensor_constant_frames[:-1], sensor_constant_frames[1:]))
target_time_dependent_frames = list(zip(target_time_dependent_frames[:-1], target_time_dependent_frames[1:]))
target_constant_frames = list(zip(target_constant_frames[:-1], target_constant_frames[1:]))
time_dependent_frames.extend(target_time_dependent_frames)
constant_frames.extend(target_constant_frames)
for s, d in time_dependent_frames:
quats = np.zeros((len(times), 4))
avs = np.zeros((len(times), 3))
for j, time in enumerate(times):
state_matrix = spice.sxform(spice.frmnam(s), spice.frmnam(d), time)
rotation_matrix, avs[j] = spice.xf2rav(state_matrix)
quat_from_rotation = spice.m2q(rotation_matrix)
quats[j,:3] = quat_from_rotation[1:]
quats[j,3] = quat_from_rotation[0]
rotation = TimeDependentRotation(quats, times, s, d, av=avs)
frame_chain.add_edge(rotation=rotation)
for s, d in constant_frames:
quats = np.zeros(4)
rotation_matrix = spice.pxform(spice.frmnam(s), spice.frmnam(d), times[0])
quat_from_rotation = spice.m2q(rotation_matrix)
quats[:3] = quat_from_rotation[1:]
quats[3] = quat_from_rotation[0]
rotation = ConstantRotation(quats, s, d)
frame_chain.add_edge(rotation=rotation)
return frame_chain
@staticmethod
def frame_trace(reference_frame, ephemeris_time):
frame_codes = [reference_frame]
_, frame_type, _ = spice.frinfo(frame_codes[-1])
frame_types = [frame_type]
while(frame_codes[-1] != 1):
try:
center, frame_type, frame_type_id = spice.frinfo(frame_codes[-1])
except Exception as e:
print(e)
break
if frame_type is 1 or frame_type is 2:
frame_code = 1
elif frame_type is 3:
try:
matrix, frame_code = spice.ckfrot(frame_type_id, ephemeris_time)
except:
raise Exception(f"The ck rotation from frame {frame_codes[-1]} can not \
be found due to no pointing available at requested time \
or a problem with the frame")
elif frame_type is 4:
try:
matrix, frame_code = spice.tkfram(frame_type_id)
except:
raise Exception(f"The tk rotation from frame {frame_codes[-1]} can not \
be found")
elif frame_type is 5:
matrix, frame_code = spice.zzdynrot(frame_type_id, center, ephemeris_time)
else:
raise Exception(f"The frame {frame_codes[-1]} has a type {frame_type_id} \
not supported by your version of Naif Spicelib. \
You need to update.")
frame_codes.append(frame_code)
frame_types.append(frame_type)
constant_frames = []
while frame_codes:
if frame_types[0] == 4:
constant_frames.append(frame_codes.pop(0))
frame_types.pop(0)
else:
break
time_dependent_frames = []
if len(constant_frames) != 0:
time_dependent_frames.append(constant_frames[-1])
while frame_codes:
time_dependent_frames.append(frame_codes.pop(0))
return time_dependent_frames, constant_frames
@classmethod
def from_isis_tables(cls, *args, inst_pointing={}, body_orientation={}, **kwargs):
frame_chain = cls()
for rotation in create_rotations(inst_pointing):
frame_chain.add_edge(rotation=rotation)
for rotation in create_rotations(body_orientation):
frame_chain.add_edge(rotation=rotation)
return frame_chain
def add_edge(self, rotation, **kwargs):
super(FrameChain, self).add_edge(rotation.source, rotation.dest, rotation=rotation, **kwargs)
rotation = rotation.inverse()
super(FrameChain, self).add_edge(rotation.source, rotation.dest, rotation=rotation, **kwargs)
def compute_rotation(self, source, destination):
"""
Returns the rotation to another node. Returns the identity rotation
if the other node is this node.
Parameters
----------
source : int
Integer id for the source node to rotate from
destination : int
Integer id for the node to rotate into from the source node
Returns
-------
rotation : Object
Returns either a TimeDependentRotation object or ConstantRotation
object depending on the number of rotations being multiplied
together
"""
if source == destination:
return ConstantRotation(np.array([0, 0, 0, 1]), source, destination)
path = shortest_path(self, source, destination)
rotations = [self.edges[path[i], path[i+1]]['rotation'] for i in range(len(path) - 1)]
rotation = rotations[0]
for next_rotation in rotations[1:]:
rotation = next_rotation * rotation
return rotation
def last_time_dependent_frame_between(self, source, destination):
"""
Find the last time dependent frame between the source frame and the
destination frame.
Parameters
----------
source : int
Integer id of the source node
destination : int
Integer of the destination node
Returns
-------
: tuple, None
Returns the source node id, destination node id, and edge dictionary
which contains the rotation from source to destination.
"""
path = shortest_path(self, source, destination)
# Reverse the path to search bottom up to find the last time dependent
# frame between the source and destination
path.reverse()
for i in range(len(path) - 1):
edge = self.edges[path[i+1], path[i]]
if isinstance(edge['rotation'], TimeDependentRotation):
return path[i+1], path[i], edge
return None
|
en
| 0.747993
|
Convert an ISIS rotation table into rotation objects. Parameters ---------- rotation_table : dict The rotation ISIS table as a dictionary Returns ------- : list A list of time dependent or constant rotation objects from the table. This list will always have either 1 or 2 elements. The first rotation will be time dependent and the second rotation will be constant. The rotations will be ordered such that the reference frame the first rotation rotates to is the reference frame the second rotation rotates from. # Case 1: It's a table of quaternions and times # SPICE quaternions are (W, X, Y, Z) and ALE uses (X, Y, Z, W). # Case 2: It's a table of Euler angle coefficients # ISIS is hard coded to ZXZ (313) Euler angle axis order. # SPICE also interprets Euler angle rotations as negative rotations, # so negate them before passing to scipy. This class is responsible for handling rotations between reference frames. Every node is the reference frame and every edge represents the rotation to between those two nodes. Each edge is directional, where the source --> destination is one rotation and destination --> source is the inverse of that rotation. Attributes __________ frame_changes : list A list of tuples that represent the rotation from one frame to another. These tuples should all be NAIF codes for reference frames ephemeris_time : list A of ephemeris times that need to be rotated for each set of frame rotations in the frame chain Returns the rotation to another node. Returns the identity rotation if the other node is this node. Parameters ---------- source : int Integer id for the source node to rotate from destination : int Integer id for the node to rotate into from the source node Returns ------- rotation : Object Returns either a TimeDependentRotation object or ConstantRotation object depending on the number of rotations being multiplied together Find the last time dependent frame between the source frame and the destination frame. Parameters ---------- source : int Integer id of the source node destination : int Integer of the destination node Returns ------- : tuple, None Returns the source node id, destination node id, and edge dictionary which contains the rotation from source to destination. # Reverse the path to search bottom up to find the last time dependent # frame between the source and destination
| 2.680593
| 3
|
1558.py
|
gabzin/beecrowd
| 3
|
6626899
|
<gh_stars>1-10
from math import sqrt
q=[0]*11001
for i in range(int(sqrt(11000))+1):
for j in range(i, int(sqrt(11000))+1):
if i*i+j*j>11000:
break
q[i*i+j*j]=1
while True:
try:
n=int(input())
if n<0:print("NO")
else:
print("YES") if q[n] else print("NO")
except EOFError:
break
|
from math import sqrt
q=[0]*11001
for i in range(int(sqrt(11000))+1):
for j in range(i, int(sqrt(11000))+1):
if i*i+j*j>11000:
break
q[i*i+j*j]=1
while True:
try:
n=int(input())
if n<0:print("NO")
else:
print("YES") if q[n] else print("NO")
except EOFError:
break
|
none
| 1
| 2.940024
| 3
|
|
tests/test_grid_search_cv.py
|
franneck94/TensorCross
| 9
|
6626900
|
"""Test code for the grid search cv.
"""
import os
import unittest
import numpy as np
import tensorflow as tf
from tensorcross.model_selection import GridSearchCV
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
np.random.seed(0)
tf.random.set_seed(0)
def f(x: np.ndarray) -> np.ndarray:
return 2 * x + 1
class DATA:
def __init__(self) -> None:
x = np.random.uniform(low=-10.0, high=10.0, size=100)
y = f(x) + np.random.normal(size=100)
self.train_dataset = tf.data.Dataset.from_tensor_slices(
(x.reshape(-1, 1), y.reshape(-1, 1))
)
def build_model(
num_features: int,
num_targets: int,
optimizer: tf.keras.optimizers.Optimizer,
learning_rate: float,
) -> tf.keras.models.Model:
"""Build the test model."""
x_input = tf.keras.layers.Input(shape=num_features)
y_pred = tf.keras.layers.Dense(units=num_targets)(x_input)
model = tf.keras.models.Model(inputs=[x_input], outputs=[y_pred])
opt = optimizer(learning_rate=learning_rate)
model.compile(loss="mse", optimizer=opt, metrics=["mse"])
return model
class GridSearchTests(unittest.TestCase):
def setUp(self) -> None:
data = DATA()
self.train_dataset = data.train_dataset
param_grid = {
"optimizer": [
tf.keras.optimizers.Adam,
tf.keras.optimizers.RMSprop,
],
"learning_rate": [0.001, 0.0001],
}
self.grid_search_cv = GridSearchCV(
model_fn=build_model,
param_grid=param_grid,
n_folds=2,
verbose=1,
num_features=1,
num_targets=1,
)
def test_grid_search_cv(self) -> None:
self.grid_search_cv.fit(dataset=self.train_dataset, epochs=1, verbose=1)
self.grid_search_cv.summary()
if __name__ == "__main__":
unittest.main()
|
"""Test code for the grid search cv.
"""
import os
import unittest
import numpy as np
import tensorflow as tf
from tensorcross.model_selection import GridSearchCV
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
np.random.seed(0)
tf.random.set_seed(0)
def f(x: np.ndarray) -> np.ndarray:
return 2 * x + 1
class DATA:
def __init__(self) -> None:
x = np.random.uniform(low=-10.0, high=10.0, size=100)
y = f(x) + np.random.normal(size=100)
self.train_dataset = tf.data.Dataset.from_tensor_slices(
(x.reshape(-1, 1), y.reshape(-1, 1))
)
def build_model(
num_features: int,
num_targets: int,
optimizer: tf.keras.optimizers.Optimizer,
learning_rate: float,
) -> tf.keras.models.Model:
"""Build the test model."""
x_input = tf.keras.layers.Input(shape=num_features)
y_pred = tf.keras.layers.Dense(units=num_targets)(x_input)
model = tf.keras.models.Model(inputs=[x_input], outputs=[y_pred])
opt = optimizer(learning_rate=learning_rate)
model.compile(loss="mse", optimizer=opt, metrics=["mse"])
return model
class GridSearchTests(unittest.TestCase):
def setUp(self) -> None:
data = DATA()
self.train_dataset = data.train_dataset
param_grid = {
"optimizer": [
tf.keras.optimizers.Adam,
tf.keras.optimizers.RMSprop,
],
"learning_rate": [0.001, 0.0001],
}
self.grid_search_cv = GridSearchCV(
model_fn=build_model,
param_grid=param_grid,
n_folds=2,
verbose=1,
num_features=1,
num_targets=1,
)
def test_grid_search_cv(self) -> None:
self.grid_search_cv.fit(dataset=self.train_dataset, epochs=1, verbose=1)
self.grid_search_cv.summary()
if __name__ == "__main__":
unittest.main()
|
en
| 0.505266
|
Test code for the grid search cv. Build the test model.
| 2.639778
| 3
|
rising_threads/lib/frontpage_scanner.py
|
RGood/rising_threads
| 2
|
6626901
|
<filename>rising_threads/lib/frontpage_scanner.py<gh_stars>1-10
import time
class FrontpageScanner:
def __init__(self, reddit, db_collection):
self.reddit = reddit
self.collection = db_collection
def scan_frontpage(self):
while(True):
index = 0
try:
for post in self.reddit.subreddit('all').hot(limit=1000):
entry = self.collection.find_one({'id': post.id})
if(entry != None):
if index < entry['top_position']:
#print("Logged frontpage post found.")
self.collection.update_one({'id': post.id}, {'$set': {'top_position': index}})
index+=1
time.sleep(30)
except KeyboardInterrupt:
print("Stopping.")
break
except:
pass
|
<filename>rising_threads/lib/frontpage_scanner.py<gh_stars>1-10
import time
class FrontpageScanner:
def __init__(self, reddit, db_collection):
self.reddit = reddit
self.collection = db_collection
def scan_frontpage(self):
while(True):
index = 0
try:
for post in self.reddit.subreddit('all').hot(limit=1000):
entry = self.collection.find_one({'id': post.id})
if(entry != None):
if index < entry['top_position']:
#print("Logged frontpage post found.")
self.collection.update_one({'id': post.id}, {'$set': {'top_position': index}})
index+=1
time.sleep(30)
except KeyboardInterrupt:
print("Stopping.")
break
except:
pass
|
en
| 0.312707
|
#print("Logged frontpage post found.")
| 2.859485
| 3
|
Hacker Rank/Equal.py
|
MhmdRyhn/Programming-Sloution
| 1
|
6626902
|
# Warning:
# In the problem, it is said that "Christy can give 1, 2 or 5 chocolates".
# Correct: Christy can give 1, 2 or 5 chocolates.
# Otherwise you will get "Wrong Answer"
def oper_per_person(n):
ans =0
ans += int(n//5)
n %= 5
ans += int(n//2)
n %= 2
ans += n
return ans
def total_oper(min, minus, arr, n):
min_oper = 0
for i in range(n):
min_oper += oper_per_person(arr[i] - (min-minus))
return min_oper
if __name__ == '__main__':
t = int(input())
for x in range(t):
n = int(input())
emp = list(map(int, input().split()))
mn = min(emp)
ans = total_oper(mn, 0, emp, n)
for i in range(1, 5):
temp = total_oper(mn, i, emp, n)
ans = min(ans, temp)
print(ans)
|
# Warning:
# In the problem, it is said that "Christy can give 1, 2 or 5 chocolates".
# Correct: Christy can give 1, 2 or 5 chocolates.
# Otherwise you will get "Wrong Answer"
def oper_per_person(n):
ans =0
ans += int(n//5)
n %= 5
ans += int(n//2)
n %= 2
ans += n
return ans
def total_oper(min, minus, arr, n):
min_oper = 0
for i in range(n):
min_oper += oper_per_person(arr[i] - (min-minus))
return min_oper
if __name__ == '__main__':
t = int(input())
for x in range(t):
n = int(input())
emp = list(map(int, input().split()))
mn = min(emp)
ans = total_oper(mn, 0, emp, n)
for i in range(1, 5):
temp = total_oper(mn, i, emp, n)
ans = min(ans, temp)
print(ans)
|
en
| 0.841287
|
# Warning: # In the problem, it is said that "Christy can give 1, 2 or 5 chocolates". # Correct: Christy can give 1, 2 or 5 chocolates. # Otherwise you will get "Wrong Answer"
| 3.479971
| 3
|
chap_06/exe_136_reverse_lookup.py
|
aleattene/python-workbook
| 0
|
6626903
|
<gh_stars>0
"""
Write a function named reverseLookup that finds all of the keys in a dictionary that map to a specific value.
The function will take the dictionary and the value to search for as its only parameters.
It will return a (possibly empty) list of keys from the dictionary that map to the provided value.
Include a main program that demonstrates the reverseLookup function
as part of your solution to this exercise.
Your program should create a dictionary and then
show that the reverseLookup function works correctly when it returns multiple keys, a single key, and no keys.
Ensure that your main program only runs
when the file containing your solution to this exercise has not been imported into another program.
"""
# START Definition of the FUNCTION
def checkEntry(values_string):
# Possible evolution -> CHECK ENTRY -> FLOAT
pass
def reverseLookup(weigth_people, weigth):
# Conversion STR -> INT (possible evolution -> FLOAT)
weight = int(weigth)
# List of people with the same weight
weight_mapped = []
# Analysis within the dictionary
for key in weigth_people:
if weight == weigth_people[key]:
weight_mapped.append(key)
return weight_mapped
# END Definition of FUNCTION
# START MAIN PROGRAM
def main():
# DICTIONARY - People Weight
weigth_people = {
"Alessandro": 95,
"Daniela": 50,
"Davide ": 47,
"Gianni": 50,
"Aldo": 85,
"Manuela": 50
}
# Acquisition of DATA entered by the USER
weight = input("Enter your WEIGHT (kg): ")
# WEIGHT MAPPING
mapping_results = reverseLookup(weigth_people, weight)
# Displaying the RESULTS
print("MAPPING RESULTS -> ", end="")
if len(mapping_results) == 0:
print("NO ONE has", end=" ")
elif len(mapping_results) == 1:
print("{} has".format(mapping_results[0].upper()), end=" ")
else:
for k in mapping_results:
print(k.upper(), end=", ")
print("have", end=" ")
print("the SAME WEIGHT as YOU ({} kg).".format(weight))
if __name__ == "__main__":
main()
|
"""
Write a function named reverseLookup that finds all of the keys in a dictionary that map to a specific value.
The function will take the dictionary and the value to search for as its only parameters.
It will return a (possibly empty) list of keys from the dictionary that map to the provided value.
Include a main program that demonstrates the reverseLookup function
as part of your solution to this exercise.
Your program should create a dictionary and then
show that the reverseLookup function works correctly when it returns multiple keys, a single key, and no keys.
Ensure that your main program only runs
when the file containing your solution to this exercise has not been imported into another program.
"""
# START Definition of the FUNCTION
def checkEntry(values_string):
# Possible evolution -> CHECK ENTRY -> FLOAT
pass
def reverseLookup(weigth_people, weigth):
# Conversion STR -> INT (possible evolution -> FLOAT)
weight = int(weigth)
# List of people with the same weight
weight_mapped = []
# Analysis within the dictionary
for key in weigth_people:
if weight == weigth_people[key]:
weight_mapped.append(key)
return weight_mapped
# END Definition of FUNCTION
# START MAIN PROGRAM
def main():
# DICTIONARY - People Weight
weigth_people = {
"Alessandro": 95,
"Daniela": 50,
"Davide ": 47,
"Gianni": 50,
"Aldo": 85,
"Manuela": 50
}
# Acquisition of DATA entered by the USER
weight = input("Enter your WEIGHT (kg): ")
# WEIGHT MAPPING
mapping_results = reverseLookup(weigth_people, weight)
# Displaying the RESULTS
print("MAPPING RESULTS -> ", end="")
if len(mapping_results) == 0:
print("NO ONE has", end=" ")
elif len(mapping_results) == 1:
print("{} has".format(mapping_results[0].upper()), end=" ")
else:
for k in mapping_results:
print(k.upper(), end=", ")
print("have", end=" ")
print("the SAME WEIGHT as YOU ({} kg).".format(weight))
if __name__ == "__main__":
main()
|
en
| 0.849056
|
Write a function named reverseLookup that finds all of the keys in a dictionary that map to a specific value. The function will take the dictionary and the value to search for as its only parameters. It will return a (possibly empty) list of keys from the dictionary that map to the provided value. Include a main program that demonstrates the reverseLookup function as part of your solution to this exercise. Your program should create a dictionary and then show that the reverseLookup function works correctly when it returns multiple keys, a single key, and no keys. Ensure that your main program only runs when the file containing your solution to this exercise has not been imported into another program. # START Definition of the FUNCTION # Possible evolution -> CHECK ENTRY -> FLOAT # Conversion STR -> INT (possible evolution -> FLOAT) # List of people with the same weight # Analysis within the dictionary # END Definition of FUNCTION # START MAIN PROGRAM # DICTIONARY - People Weight # Acquisition of DATA entered by the USER # WEIGHT MAPPING # Displaying the RESULTS
| 4.720161
| 5
|
tests/commands/test_init.py
|
aurule/npc
| 13
|
6626904
|
import npc
import os
import json
from util import fixture_dir
def test_init_bare(prefs, campaign):
npc.commands.init(prefs=prefs)
for k, p in prefs.get('paths.required').items():
if k in ["additional_paths"]:
continue
assert os.path.exists(p)
def test_init_additional_paths(prefs, campaign):
override_path = fixture_dir('settings', 'settings-vim.json')
prefs.load_more(override_path)
npc.commands.init(prefs=prefs)
for p in prefs.get('paths.required.additional_paths'):
assert os.path.exists(p)
def test_init_types(prefs, campaign):
npc.commands.init(create_types=True, prefs=prefs)
for path in prefs.get_type_paths():
assert os.path.exists(os.path.join(prefs.get('paths.required.characters'), path))
def test_init_all(prefs, campaign):
npc.commands.init(create_all=True, prefs=prefs)
for path in prefs.get_type_paths():
assert os.path.exists(os.path.join(prefs.get('paths.required.characters'), path))
def test_init_with_name(prefs, campaign):
npc.commands.init(campaign_name='Super Game', prefs=prefs)
assert os.path.exists(prefs.get_settings_path('campaign'))
with open(prefs.get_settings_path('campaign'), 'r') as settings:
parsed = json.load(settings)
assert parsed['campaign_name'] == 'Super Game'
def test_init_dryrun(prefs, campaign):
npc.commands.init(dryrun=True, prefs=prefs)
for path in prefs.get_type_paths():
assert not os.path.exists(os.path.join(prefs.get('paths.required.characters'), path))
|
import npc
import os
import json
from util import fixture_dir
def test_init_bare(prefs, campaign):
npc.commands.init(prefs=prefs)
for k, p in prefs.get('paths.required').items():
if k in ["additional_paths"]:
continue
assert os.path.exists(p)
def test_init_additional_paths(prefs, campaign):
override_path = fixture_dir('settings', 'settings-vim.json')
prefs.load_more(override_path)
npc.commands.init(prefs=prefs)
for p in prefs.get('paths.required.additional_paths'):
assert os.path.exists(p)
def test_init_types(prefs, campaign):
npc.commands.init(create_types=True, prefs=prefs)
for path in prefs.get_type_paths():
assert os.path.exists(os.path.join(prefs.get('paths.required.characters'), path))
def test_init_all(prefs, campaign):
npc.commands.init(create_all=True, prefs=prefs)
for path in prefs.get_type_paths():
assert os.path.exists(os.path.join(prefs.get('paths.required.characters'), path))
def test_init_with_name(prefs, campaign):
npc.commands.init(campaign_name='Super Game', prefs=prefs)
assert os.path.exists(prefs.get_settings_path('campaign'))
with open(prefs.get_settings_path('campaign'), 'r') as settings:
parsed = json.load(settings)
assert parsed['campaign_name'] == 'Super Game'
def test_init_dryrun(prefs, campaign):
npc.commands.init(dryrun=True, prefs=prefs)
for path in prefs.get_type_paths():
assert not os.path.exists(os.path.join(prefs.get('paths.required.characters'), path))
|
none
| 1
| 2.251431
| 2
|
|
stand_mapping/utils/metrics.py
|
d-diaz/stand_mapping
| 0
|
6626905
|
<gh_stars>0
import numpy as np
import torch
import torch.nn.functional as F
def batchify(targets, predictions, nodata, score_func,
aggregate=True, *args, **kwargs):
"""
Applies a scoring function to each sample image in a batch.
Parameters
----------
targets : array-like, shape (batch_size, height, width)
observed, ground-truth target images
predictions : array-like, shape (batch_size, height, width)
predicted images
nodata : array-like, shape (batch_size, height, width)
nodata masks indicating areas where differences between targets and
predictions will be ignored
score_func : callable
scoring function that will be called on each sample, should expect
targets, predictions and nodata as arguments, with optional args and
kwargs to follow.
aggregate : bool
whether to return the average of each metrics across the batch (default),
or to return each of the metrics for each of the samples in the batch
"""
results = []
for targ, pred, msk in zip(targets, predictions, nodata):
# unpack the batch
# X, Y, Z = batch
results.append(score_func(targ, pred, msk, *args, **kwargs))
if aggregate:
results = tuple(np.array(results).mean(axis=0))
return results
def masked_accuracy(input, target, nodata=None, reduction='mean'):
"""Calculates classification accuracy for a batch of images.
Parameters
----------
input : tensor, shape (B, 1, H, W)
batch of images with predicted classes
target : tensor, shape (B, 1, H, W)
batch of images with target classes
nodata : tensor, shape (B, 1, H, W), optional
batch of boolean images indicating areas to be excluded from scoring
Returns
-------
score : tensor, shape (B,)
average accuracy among valid (not nodata) pixels for each of B images
"""
correct = (input == target)
support = torch.ones(target.shape)
if nodata is not None:
if nodata.dtype != torch.bool:
nodata = nodata > 0
correct *= ~nodata
support *= ~nodata
score = correct.sum(dim=(1, 2, 3)) / support.sum(dim=(1, 2, 3))
if reduction == 'mean':
score = score.mean()
elif reduction == 'sum':
score = score.sum()
return score
def masked_precision(input, target, nodata=None):
"""Calculates classification precision for a batch of images.
Parameters
----------
input : tensor, shape (B, 1, H, W)
batch of images with predicted classes
target : tensor, shape (B, 1, H, W)
batch of images with target classes
nodata : tensor, shape (B, 1, H, W), optional
batch of boolean images indicating areas to be excluded from scoring
"""
correct = (input == target)
support = torch.ones(target.shape)
if nodata is not None:
if nodata.dtype != torch.bool:
nodata = nodata > 0
correct *= ~nodata
support *= ~nodata
score = correct.sum(dim=(1, 2, 3)) / support.sum(dim=(1, 2, 3))
return score
def masked_classification_stats(input, target, nodata=None, num_classes=5):
"""Calculates rates of true and false positives and negatives with
optional nodata mask.
Parameters
input : tensor, shape (B, 1, H, W)
batch of images with predicted classes
target : tensor, shape (B, 1, H, W)
batch of images with target classes
nodata : tensor, shape (B, 1, H, W), optional
batch of boolean images indicating areas to be excluded from scoring
Returns
-------
stats : 5-tuple of tensors, each shape (B, N)
ratio of true positives, true negatives, false positives, and false
negatives, and support for each of N classes in each of B images in batch
"""
# convert input and target with shape (B,N,H,W)
B, C, H, W = input.shape
hard_pred = torch.argmax(F.softmax(input, dim=1), axis=1)
input_onehot = F.one_hot(hard_pred,
num_classes=num_classes).permute(0, 3, 1, 2)
targ_onehot = F.one_hot(target[:, 0, :, :].clip(0,),
num_classes=num_classes).permute(0, 3, 1, 2)
valid_pixels = H*W
tp = (input_onehot == targ_onehot) * targ_onehot
tn = (input_onehot == targ_onehot) * (targ_onehot == 0)
fp = (input_onehot > targ_onehot)
fn = (input_onehot < targ_onehot)
if nodata is not None:
if nodata.dtype != torch.bool:
nodata = nodata > 0
tp *= ~nodata
tn *= ~nodata
fp *= ~nodata
fn *= ~nodata
valid_pixels = (~nodata).sum(dim=(1, 2, 3)).unsqueeze(-1)
tp = tp.sum(dim=(2, 3)) / valid_pixels
tn = tn.sum(dim=(2, 3)) / valid_pixels
fp = fp.sum(dim=(2, 3)) / valid_pixels
fn = fn.sum(dim=(2, 3)) / valid_pixels
support = targ_onehot.sum(dim=(2, 3))
return (torch.nan_to_num(tp), # replaces NaNs with zero
torch.nan_to_num(tn), # usually where support is 0
torch.nan_to_num(fp),
torch.nan_to_num(fn),
torch.nan_to_num(support))
def masked_dice_coef(input, target, nodata=None, num_classes=5, eps=1e-23):
"""Calculates the Sorensen-Dice Coefficient with the option of including a
nodata mask, returning score for each class for each image in a batch.
Parameters
----------
input : tensor, shape (B, N, H, W)
logits (unnormalized predictions) for a batch, will be converted to class
probabilities using softmax.
target : tensor, shape (B, 1, H, W)
batch of semantic segmentation target labels.
nodata : tensor, shape (B, 1, H, W), optional
boolean or binary tensor where values of True or 1 indicate areas that
should be excluded from scoring (e.g., where no label was annotated)
eps : float
a small value added to denominator of Dice Coefficient for numerical
stability (prevents divide by zero)
Returns
-------
score : tensor, shape (B, C)
Dice Coefficient for each class for each image in batch
"""
# compute softmax over the classes dimension
soft = F.softmax(input, dim=1)
# convert target to one-hot, then scatter to shape (B,N,H,W)
one_hot = F.one_hot(target[:, 0, :, :].clip(0,),
num_classes=num_classes).permute(0, 3, 1, 2)
if nodata is not None:
if nodata.dtype != nodata.bool:
nodata = nodata > 0 # cast to bool
soft = soft * ~nodata
one_hot = one_hot * ~nodata
inter = torch.sum(soft * one_hot, dim=(2, 3))
card = torch.sum(soft + one_hot, dim=(2, 3))
score = 2 * inter / (card + eps)
return score
|
import numpy as np
import torch
import torch.nn.functional as F
def batchify(targets, predictions, nodata, score_func,
aggregate=True, *args, **kwargs):
"""
Applies a scoring function to each sample image in a batch.
Parameters
----------
targets : array-like, shape (batch_size, height, width)
observed, ground-truth target images
predictions : array-like, shape (batch_size, height, width)
predicted images
nodata : array-like, shape (batch_size, height, width)
nodata masks indicating areas where differences between targets and
predictions will be ignored
score_func : callable
scoring function that will be called on each sample, should expect
targets, predictions and nodata as arguments, with optional args and
kwargs to follow.
aggregate : bool
whether to return the average of each metrics across the batch (default),
or to return each of the metrics for each of the samples in the batch
"""
results = []
for targ, pred, msk in zip(targets, predictions, nodata):
# unpack the batch
# X, Y, Z = batch
results.append(score_func(targ, pred, msk, *args, **kwargs))
if aggregate:
results = tuple(np.array(results).mean(axis=0))
return results
def masked_accuracy(input, target, nodata=None, reduction='mean'):
"""Calculates classification accuracy for a batch of images.
Parameters
----------
input : tensor, shape (B, 1, H, W)
batch of images with predicted classes
target : tensor, shape (B, 1, H, W)
batch of images with target classes
nodata : tensor, shape (B, 1, H, W), optional
batch of boolean images indicating areas to be excluded from scoring
Returns
-------
score : tensor, shape (B,)
average accuracy among valid (not nodata) pixels for each of B images
"""
correct = (input == target)
support = torch.ones(target.shape)
if nodata is not None:
if nodata.dtype != torch.bool:
nodata = nodata > 0
correct *= ~nodata
support *= ~nodata
score = correct.sum(dim=(1, 2, 3)) / support.sum(dim=(1, 2, 3))
if reduction == 'mean':
score = score.mean()
elif reduction == 'sum':
score = score.sum()
return score
def masked_precision(input, target, nodata=None):
"""Calculates classification precision for a batch of images.
Parameters
----------
input : tensor, shape (B, 1, H, W)
batch of images with predicted classes
target : tensor, shape (B, 1, H, W)
batch of images with target classes
nodata : tensor, shape (B, 1, H, W), optional
batch of boolean images indicating areas to be excluded from scoring
"""
correct = (input == target)
support = torch.ones(target.shape)
if nodata is not None:
if nodata.dtype != torch.bool:
nodata = nodata > 0
correct *= ~nodata
support *= ~nodata
score = correct.sum(dim=(1, 2, 3)) / support.sum(dim=(1, 2, 3))
return score
def masked_classification_stats(input, target, nodata=None, num_classes=5):
"""Calculates rates of true and false positives and negatives with
optional nodata mask.
Parameters
input : tensor, shape (B, 1, H, W)
batch of images with predicted classes
target : tensor, shape (B, 1, H, W)
batch of images with target classes
nodata : tensor, shape (B, 1, H, W), optional
batch of boolean images indicating areas to be excluded from scoring
Returns
-------
stats : 5-tuple of tensors, each shape (B, N)
ratio of true positives, true negatives, false positives, and false
negatives, and support for each of N classes in each of B images in batch
"""
# convert input and target with shape (B,N,H,W)
B, C, H, W = input.shape
hard_pred = torch.argmax(F.softmax(input, dim=1), axis=1)
input_onehot = F.one_hot(hard_pred,
num_classes=num_classes).permute(0, 3, 1, 2)
targ_onehot = F.one_hot(target[:, 0, :, :].clip(0,),
num_classes=num_classes).permute(0, 3, 1, 2)
valid_pixels = H*W
tp = (input_onehot == targ_onehot) * targ_onehot
tn = (input_onehot == targ_onehot) * (targ_onehot == 0)
fp = (input_onehot > targ_onehot)
fn = (input_onehot < targ_onehot)
if nodata is not None:
if nodata.dtype != torch.bool:
nodata = nodata > 0
tp *= ~nodata
tn *= ~nodata
fp *= ~nodata
fn *= ~nodata
valid_pixels = (~nodata).sum(dim=(1, 2, 3)).unsqueeze(-1)
tp = tp.sum(dim=(2, 3)) / valid_pixels
tn = tn.sum(dim=(2, 3)) / valid_pixels
fp = fp.sum(dim=(2, 3)) / valid_pixels
fn = fn.sum(dim=(2, 3)) / valid_pixels
support = targ_onehot.sum(dim=(2, 3))
return (torch.nan_to_num(tp), # replaces NaNs with zero
torch.nan_to_num(tn), # usually where support is 0
torch.nan_to_num(fp),
torch.nan_to_num(fn),
torch.nan_to_num(support))
def masked_dice_coef(input, target, nodata=None, num_classes=5, eps=1e-23):
"""Calculates the Sorensen-Dice Coefficient with the option of including a
nodata mask, returning score for each class for each image in a batch.
Parameters
----------
input : tensor, shape (B, N, H, W)
logits (unnormalized predictions) for a batch, will be converted to class
probabilities using softmax.
target : tensor, shape (B, 1, H, W)
batch of semantic segmentation target labels.
nodata : tensor, shape (B, 1, H, W), optional
boolean or binary tensor where values of True or 1 indicate areas that
should be excluded from scoring (e.g., where no label was annotated)
eps : float
a small value added to denominator of Dice Coefficient for numerical
stability (prevents divide by zero)
Returns
-------
score : tensor, shape (B, C)
Dice Coefficient for each class for each image in batch
"""
# compute softmax over the classes dimension
soft = F.softmax(input, dim=1)
# convert target to one-hot, then scatter to shape (B,N,H,W)
one_hot = F.one_hot(target[:, 0, :, :].clip(0,),
num_classes=num_classes).permute(0, 3, 1, 2)
if nodata is not None:
if nodata.dtype != nodata.bool:
nodata = nodata > 0 # cast to bool
soft = soft * ~nodata
one_hot = one_hot * ~nodata
inter = torch.sum(soft * one_hot, dim=(2, 3))
card = torch.sum(soft + one_hot, dim=(2, 3))
score = 2 * inter / (card + eps)
return score
|
en
| 0.770267
|
Applies a scoring function to each sample image in a batch. Parameters ---------- targets : array-like, shape (batch_size, height, width) observed, ground-truth target images predictions : array-like, shape (batch_size, height, width) predicted images nodata : array-like, shape (batch_size, height, width) nodata masks indicating areas where differences between targets and predictions will be ignored score_func : callable scoring function that will be called on each sample, should expect targets, predictions and nodata as arguments, with optional args and kwargs to follow. aggregate : bool whether to return the average of each metrics across the batch (default), or to return each of the metrics for each of the samples in the batch # unpack the batch # X, Y, Z = batch Calculates classification accuracy for a batch of images. Parameters ---------- input : tensor, shape (B, 1, H, W) batch of images with predicted classes target : tensor, shape (B, 1, H, W) batch of images with target classes nodata : tensor, shape (B, 1, H, W), optional batch of boolean images indicating areas to be excluded from scoring Returns ------- score : tensor, shape (B,) average accuracy among valid (not nodata) pixels for each of B images Calculates classification precision for a batch of images. Parameters ---------- input : tensor, shape (B, 1, H, W) batch of images with predicted classes target : tensor, shape (B, 1, H, W) batch of images with target classes nodata : tensor, shape (B, 1, H, W), optional batch of boolean images indicating areas to be excluded from scoring Calculates rates of true and false positives and negatives with optional nodata mask. Parameters input : tensor, shape (B, 1, H, W) batch of images with predicted classes target : tensor, shape (B, 1, H, W) batch of images with target classes nodata : tensor, shape (B, 1, H, W), optional batch of boolean images indicating areas to be excluded from scoring Returns ------- stats : 5-tuple of tensors, each shape (B, N) ratio of true positives, true negatives, false positives, and false negatives, and support for each of N classes in each of B images in batch # convert input and target with shape (B,N,H,W) # replaces NaNs with zero # usually where support is 0 Calculates the Sorensen-Dice Coefficient with the option of including a nodata mask, returning score for each class for each image in a batch. Parameters ---------- input : tensor, shape (B, N, H, W) logits (unnormalized predictions) for a batch, will be converted to class probabilities using softmax. target : tensor, shape (B, 1, H, W) batch of semantic segmentation target labels. nodata : tensor, shape (B, 1, H, W), optional boolean or binary tensor where values of True or 1 indicate areas that should be excluded from scoring (e.g., where no label was annotated) eps : float a small value added to denominator of Dice Coefficient for numerical stability (prevents divide by zero) Returns ------- score : tensor, shape (B, C) Dice Coefficient for each class for each image in batch # compute softmax over the classes dimension # convert target to one-hot, then scatter to shape (B,N,H,W) # cast to bool
| 2.873411
| 3
|
gcloud/logging/sink.py
|
waprin/google-cloud-python
| 0
|
6626906
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define Stackdriver Logging API Sinks."""
from gcloud.exceptions import NotFound
class Sink(object):
"""Sinks represent filtered exports for log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
:type name: string
:param name: the name of the sink
:type filter_: string
:param filter_: the advanced logs filter expression defining the entries
exported by the sink. If not passed, the instance should
already exist, to be refreshed via :meth:`reload`.
:type destination: string
:param destination: destination URI for the entries exported by the sink.
If not passed, the instance should already exist, to
be refreshed via :meth:`reload`.
:type client: :class:`gcloud.logging.client.Client`
:param client: A client which holds credentials and project configuration
for the sink (which requires a project).
"""
def __init__(self, name, filter_=None, destination=None, client=None):
self.name = name
self.filter_ = filter_
self.destination = destination
self._client = client
@property
def client(self):
"""Clent bound to the sink."""
return self._client
@property
def project(self):
"""Project bound to the sink."""
return self._client.project
@property
def full_name(self):
"""Fully-qualified name used in sink APIs"""
return 'projects/%s/sinks/%s' % (self.project, self.name)
@property
def path(self):
"""URL path for the sink's APIs"""
return '/%s' % (self.full_name)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a sink given its API representation
:type resource: dict
:param resource: sink resource representation returned from the API
:type client: :class:`gcloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration for the sink.
:rtype: :class:`gcloud.logging.sink.Sink`
:returns: Sink parsed from ``resource``.
:raises: :class:`ValueError` if ``client`` is not ``None`` and the
project from the resource does not agree with the project
from the client.
"""
sink_name = resource['name']
filter_ = resource['filter']
destination = resource['destination']
return cls(sink_name, filter_, destination, client=client)
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:rtype: :class:`gcloud.logging.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
def create(self, client=None):
"""API call: create the sink via a PUT request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_create(
self.project, self.name, self.filter_, self.destination)
def exists(self, client=None):
"""API call: test for the existence of the sink via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
try:
client.sinks_api.sink_get(self.project, self.name)
except NotFound:
return False
else:
return True
def reload(self, client=None):
"""API call: sync local sink configuration via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
data = client.sinks_api.sink_get(self.project, self.name)
self.filter_ = data['filter']
self.destination = data['destination']
def update(self, client=None):
"""API call: update sink configuration via a PUT request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_update(
self.project, self.name, self.filter_, self.destination)
def delete(self, client=None):
"""API call: delete a sink via a DELETE request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_delete(self.project, self.name)
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define Stackdriver Logging API Sinks."""
from gcloud.exceptions import NotFound
class Sink(object):
"""Sinks represent filtered exports for log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
:type name: string
:param name: the name of the sink
:type filter_: string
:param filter_: the advanced logs filter expression defining the entries
exported by the sink. If not passed, the instance should
already exist, to be refreshed via :meth:`reload`.
:type destination: string
:param destination: destination URI for the entries exported by the sink.
If not passed, the instance should already exist, to
be refreshed via :meth:`reload`.
:type client: :class:`gcloud.logging.client.Client`
:param client: A client which holds credentials and project configuration
for the sink (which requires a project).
"""
def __init__(self, name, filter_=None, destination=None, client=None):
self.name = name
self.filter_ = filter_
self.destination = destination
self._client = client
@property
def client(self):
"""Clent bound to the sink."""
return self._client
@property
def project(self):
"""Project bound to the sink."""
return self._client.project
@property
def full_name(self):
"""Fully-qualified name used in sink APIs"""
return 'projects/%s/sinks/%s' % (self.project, self.name)
@property
def path(self):
"""URL path for the sink's APIs"""
return '/%s' % (self.full_name)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a sink given its API representation
:type resource: dict
:param resource: sink resource representation returned from the API
:type client: :class:`gcloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration for the sink.
:rtype: :class:`gcloud.logging.sink.Sink`
:returns: Sink parsed from ``resource``.
:raises: :class:`ValueError` if ``client`` is not ``None`` and the
project from the resource does not agree with the project
from the client.
"""
sink_name = resource['name']
filter_ = resource['filter']
destination = resource['destination']
return cls(sink_name, filter_, destination, client=client)
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:rtype: :class:`gcloud.logging.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
def create(self, client=None):
"""API call: create the sink via a PUT request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_create(
self.project, self.name, self.filter_, self.destination)
def exists(self, client=None):
"""API call: test for the existence of the sink via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
try:
client.sinks_api.sink_get(self.project, self.name)
except NotFound:
return False
else:
return True
def reload(self, client=None):
"""API call: sync local sink configuration via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
data = client.sinks_api.sink_get(self.project, self.name)
self.filter_ = data['filter']
self.destination = data['destination']
def update(self, client=None):
"""API call: update sink configuration via a PUT request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_update(
self.project, self.name, self.filter_, self.destination)
def delete(self, client=None):
"""API call: delete a sink via a DELETE request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_delete(self.project, self.name)
|
en
| 0.711966
|
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Define Stackdriver Logging API Sinks. Sinks represent filtered exports for log entries. See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks :type name: string :param name: the name of the sink :type filter_: string :param filter_: the advanced logs filter expression defining the entries exported by the sink. If not passed, the instance should already exist, to be refreshed via :meth:`reload`. :type destination: string :param destination: destination URI for the entries exported by the sink. If not passed, the instance should already exist, to be refreshed via :meth:`reload`. :type client: :class:`gcloud.logging.client.Client` :param client: A client which holds credentials and project configuration for the sink (which requires a project). Clent bound to the sink. Project bound to the sink. Fully-qualified name used in sink APIs URL path for the sink's APIs Factory: construct a sink given its API representation :type resource: dict :param resource: sink resource representation returned from the API :type client: :class:`gcloud.logging.client.Client` :param client: Client which holds credentials and project configuration for the sink. :rtype: :class:`gcloud.logging.sink.Sink` :returns: Sink parsed from ``resource``. :raises: :class:`ValueError` if ``client`` is not ``None`` and the project from the resource does not agree with the project from the client. Check client or verify over-ride. :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. :rtype: :class:`gcloud.logging.client.Client` :returns: The client passed in or the currently bound client. API call: create the sink via a PUT request See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. API call: test for the existence of the sink via a GET request See https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. API call: sync local sink configuration via a GET request See https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. API call: update sink configuration via a PUT request See https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. API call: delete a sink via a DELETE request See https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink.
| 2.228599
| 2
|
demo_train.py
|
amitaifrey/learn-to-hash
| 0
|
6626907
|
'''
Demo for running training or linear models.
'''
import utils
from kahip.kmkahip import run_kmkahip
if __name__ == '__main__':
opt = utils.parse_args()
#adjust the number of parts and the height of the hierarchy
n_cluster_l = [opt.n_clusters]
height_l = [opt.height]
# load dataset
if opt.glove:
dataset = utils.load_glove_data('train').to(utils.device)
queryset = utils.load_glove_data('query').to(utils.device)
neighbors = utils.load_glove_data('answers').to(utils.device)
elif opt.glove_25:
dataset = utils.load_glove_25_data('train').to(utils.device)
queryset = utils.load_glove_25_data('query').to(utils.device)
neighbors = utils.load_glove_25_data('answers').to(utils.device)
elif opt.glove_200:
dataset = utils.load_glove_200_data('train').to(utils.device)
queryset = utils.load_glove_200_data('query').to(utils.device)
neighbors = utils.load_glove_200_data('answers').to(utils.device)
elif opt.sift:
dataset = utils.load_sift_data('train').to(utils.device)
queryset = utils.load_sift_data('query').to(utils.device)
neighbors = utils.load_sift_data('answers').to(utils.device)
elif opt.gist:
dataset = utils.load_gist_data('train').to(utils.device)
queryset = utils.load_gist_data('query').to(utils.device)
neighbors = utils.load_gist_data('answers').to(utils.device)
elif opt.lastfm:
dataset = utils.load_lastfm_data('train').to(utils.device)
queryset = utils.load_lastfm_data('query').to(utils.device)
neighbors = utils.load_lastfm_data('answers').to(utils.device)
else:
dataset = utils.load_data('train').to(utils.device)
queryset = utils.load_data('query').to(utils.device)
neighbors = utils.load_data('answers').to(utils.device)
#specify which action to take at each level, actions can be km, kahip, train, or svm. Lower keys indicate closer to leaf.
#Note that if 'kahip' is included, evaluation must be on training rather than test set, since partitioning was performed on training, but not test, set.
#e.g.: opt.level2action = {0:'km', 1:'train', 3:'train'}
opt.level2action = {0:'train', 1:'train', 2:'train', 3: 'train'}
if opt.height == 2 and opt.n_clusters == 256:
opt.level2action = {0: 'km', 1: 'train'}
for n_cluster in n_cluster_l:
print('n_cluster {}'.format(n_cluster))
opt.n_clusters = n_cluster
opt.n_class = n_cluster
for height in height_l:
run_kmkahip(height, opt, dataset, queryset, neighbors)
|
'''
Demo for running training or linear models.
'''
import utils
from kahip.kmkahip import run_kmkahip
if __name__ == '__main__':
opt = utils.parse_args()
#adjust the number of parts and the height of the hierarchy
n_cluster_l = [opt.n_clusters]
height_l = [opt.height]
# load dataset
if opt.glove:
dataset = utils.load_glove_data('train').to(utils.device)
queryset = utils.load_glove_data('query').to(utils.device)
neighbors = utils.load_glove_data('answers').to(utils.device)
elif opt.glove_25:
dataset = utils.load_glove_25_data('train').to(utils.device)
queryset = utils.load_glove_25_data('query').to(utils.device)
neighbors = utils.load_glove_25_data('answers').to(utils.device)
elif opt.glove_200:
dataset = utils.load_glove_200_data('train').to(utils.device)
queryset = utils.load_glove_200_data('query').to(utils.device)
neighbors = utils.load_glove_200_data('answers').to(utils.device)
elif opt.sift:
dataset = utils.load_sift_data('train').to(utils.device)
queryset = utils.load_sift_data('query').to(utils.device)
neighbors = utils.load_sift_data('answers').to(utils.device)
elif opt.gist:
dataset = utils.load_gist_data('train').to(utils.device)
queryset = utils.load_gist_data('query').to(utils.device)
neighbors = utils.load_gist_data('answers').to(utils.device)
elif opt.lastfm:
dataset = utils.load_lastfm_data('train').to(utils.device)
queryset = utils.load_lastfm_data('query').to(utils.device)
neighbors = utils.load_lastfm_data('answers').to(utils.device)
else:
dataset = utils.load_data('train').to(utils.device)
queryset = utils.load_data('query').to(utils.device)
neighbors = utils.load_data('answers').to(utils.device)
#specify which action to take at each level, actions can be km, kahip, train, or svm. Lower keys indicate closer to leaf.
#Note that if 'kahip' is included, evaluation must be on training rather than test set, since partitioning was performed on training, but not test, set.
#e.g.: opt.level2action = {0:'km', 1:'train', 3:'train'}
opt.level2action = {0:'train', 1:'train', 2:'train', 3: 'train'}
if opt.height == 2 and opt.n_clusters == 256:
opt.level2action = {0: 'km', 1: 'train'}
for n_cluster in n_cluster_l:
print('n_cluster {}'.format(n_cluster))
opt.n_clusters = n_cluster
opt.n_class = n_cluster
for height in height_l:
run_kmkahip(height, opt, dataset, queryset, neighbors)
|
en
| 0.891004
|
Demo for running training or linear models. #adjust the number of parts and the height of the hierarchy # load dataset #specify which action to take at each level, actions can be km, kahip, train, or svm. Lower keys indicate closer to leaf. #Note that if 'kahip' is included, evaluation must be on training rather than test set, since partitioning was performed on training, but not test, set. #e.g.: opt.level2action = {0:'km', 1:'train', 3:'train'}
| 2.27372
| 2
|
blog/templatetags/blog_tags.py
|
sometimeslove/www.superstrong.com
| 0
|
6626908
|
<filename>blog/templatetags/blog_tags.py
#!/usr/bin/env python
# encoding: utf-8
"""
@version: ??
@author: superstrongz
@license: MIT Licence
@contact: <EMAIL>
@site: http://www.superstrongz.com/
@software: PyCharm
@file: blog_tags.py
@time: ??
"""
from django import template
from django.db.models import Q
from django.conf import settings
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
import random
from django.urls import reverse
from blog.models import Article, Category, Tag, Links, SideBar
from django.utils.encoding import force_text
from django.shortcuts import get_object_or_404
import hashlib
import urllib
from comments.models import Comment
from DjangoBlog.utils import cache_decorator, cache
from django.contrib.auth import get_user_model
from oauth.models import OAuthUser
from DjangoBlog.utils import get_current_site
import logging
logger = logging.getLogger(__name__)
register = template.Library()
@register.simple_tag
def timeformat(data):
try:
return data.strftime(settings.TIME_FORMAT)
# print(data.strftime(settings.TIME_FORMAT))
# return "ddd"
except Exception as e:
logger.error(e)
return ""
@register.simple_tag
def datetimeformat(data):
try:
return data.strftime(settings.DATE_TIME_FORMAT)
except Exception as e:
logger.error(e)
return ""
@register.filter(is_safe=True)
@stringfilter
def custom_markdown(content):
from DjangoBlog.utils import CommonMarkdown
return mark_safe(CommonMarkdown.get_markdown(content))
@register.filter(is_safe=True)
@stringfilter
def truncatechars_content(content):
"""
获得文章内容的摘要
:param content:
:return:
"""
from django.template.defaultfilters import truncatechars_html
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
return truncatechars_html(content, blogsetting.article_sub_length)
@register.filter(is_safe=True)
@stringfilter
def truncate(content):
from django.utils.html import strip_tags
return strip_tags(content)[:150]
@register.inclusion_tag('blog/tags/breadcrumb.html')
def load_breadcrumb(article):
"""
获得文章面包屑
:param article:
:return:
"""
names = article.get_category_tree()
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
site = get_current_site().domain
names.append((blogsetting.sitename, '/'))
names = names[::-1]
return {
'names': names,
'title': article.title
}
@register.inclusion_tag('blog/tags/article_tag_list.html')
def load_articletags(article):
"""
文章标签
:param article:
:return:
"""
tags = article.tags.all()
tags_list = []
for tag in tags:
url = tag.get_absolute_url()
count = tag.get_article_count()
tags_list.append((
url, count, tag, random.choice(settings.BOOTSTRAP_COLOR_TYPES)
))
return {
'article_tags_list': tags_list
}
@register.inclusion_tag('blog/tags/sidebar.html')
def load_sidebar(user, linktype):
"""
加载侧边栏
:return:
"""
logger.info('load sidebar')
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
recent_articles = Article.objects.filter(status='p')[:blogsetting.sidebar_article_count]
sidebar_categorys = Category.objects.all()
extra_sidebars = SideBar.objects.filter(is_enable=True).order_by('sequence')
most_read_articles = Article.objects.filter(status='p').order_by('-views')[:blogsetting.sidebar_article_count]
dates = Article.objects.datetimes('created_time', 'month', order='DESC')
links = Links.objects.filter(is_enable=True).filter(Q(show_type=str(linktype)) | Q(show_type='a'))
commment_list = Comment.objects.filter(is_enable=True).order_by('-id')[:blogsetting.sidebar_comment_count]
# 标签云 计算字体大小
# 根据总数计算出平均值 大小为 (数目/平均值)*步长
increment = 5
tags = Tag.objects.all()
sidebar_tags = None
if tags and len(tags) > 0:
s = list(map(lambda t: (t, t.get_article_count()), tags))
count = sum(map(lambda t: t[1], s))
dd = 1 if (count == 0 and not len(tags)) else count / len(tags)
import random
sidebar_tags = list(map(lambda x: (x[0], x[1], (x[1] / dd) * increment + 10), s))
random.shuffle(sidebar_tags)
return {
'recent_articles': recent_articles,
'sidebar_categorys': sidebar_categorys,
'most_read_articles': most_read_articles,
'article_dates': dates,
'sidebar_comments': commment_list,
'user': user,
'sidabar_links': links,
'show_google_adsense': blogsetting.show_google_adsense,
'google_adsense_codes': blogsetting.google_adsense_codes,
'open_site_comment': blogsetting.open_site_comment,
'show_gongan_code': blogsetting.show_gongan_code,
'sidebar_tags': sidebar_tags,
'extra_sidebars': extra_sidebars
}
@register.inclusion_tag('blog/tags/article_meta_info.html')
def load_article_metas(article, user):
"""
获得文章meta信息
:param article:
:return:
"""
return {
'article': article,
'user': user
}
@register.inclusion_tag('blog/tags/article_pagination.html')
def load_pagination_info(page_obj, page_type, tag_name):
previous_url = ''
next_url = ''
if page_type == '':
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:index_page', kwargs={'page': next_number})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:index_page', kwargs={'page': previous_number})
if page_type == '分类标签归档':
tag = get_object_or_404(Tag, name=tag_name)
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:tag_detail_page', kwargs={'page': next_number, 'tag_name': tag.slug})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:tag_detail_page', kwargs={'page': previous_number, 'tag_name': tag.slug})
if page_type == '作者文章归档':
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:author_detail_page', kwargs={'page': next_number, 'author_name': tag_name})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:author_detail_page', kwargs={'page': previous_number, 'author_name': tag_name})
if page_type == '分类目录归档':
category = get_object_or_404(Category, name=tag_name)
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:category_detail_page',
kwargs={'page': next_number, 'category_name': category.slug})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:category_detail_page',
kwargs={'page': previous_number, 'category_name': category.slug})
return {
'previous_url': previous_url,
'next_url': next_url,
'page_obj': page_obj
}
"""
@register.inclusion_tag('nav.html')
def load_nav_info():
category_list = Category.objects.all()
return {
'nav_category_list': category_list
}
"""
@register.inclusion_tag('blog/tags/article_info.html')
def load_article_detail(article, isindex, user):
"""
加载文章详情
:param article:
:param isindex:是否列表页,若是列表页只显示摘要
:return:
"""
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
return {
'article': article,
'isindex': isindex,
'user': user,
'open_site_comment': blogsetting.open_site_comment,
}
# return only the URL of the gravatar
# TEMPLATE USE: {{ email|gravatar_url:150 }}
@register.filter
def gravatar_url(email, size=40):
"""获得gravatar头像"""
cachekey = 'gravatat/' + email
if cache.get(cachekey):
return cache.get(cachekey)
else:
usermodels = OAuthUser.objects.filter(email=email)
if usermodels:
o = list(filter(lambda x: x.picture is not None, usermodels))
if o:
return o[0].picture
email = email.encode('utf-8')
default = "https://resource.lylinux.net/image/2017/03/26/120117.jpg".encode('utf-8')
url = "https://www.gravatar.com/avatar/%s?%s" % (
hashlib.md5(email.lower()).hexdigest(), urllib.parse.urlencode({'d': default, 's': str(size)}))
cache.set(cachekey, url, 60 * 60 * 10)
return url
@register.filter
def gravatar(email, size=40):
"""获得gravatar头像"""
url = gravatar_url(email, size)
return mark_safe('<img src="%s" height="%d" width="%d">' % (url, size, size))
@register.simple_tag
def query(qs, **kwargs):
""" template tag which allows queryset filtering. Usage:
{% query books author=author as mybooks %}
{% for book in mybooks %}
...
{% endfor %}
"""
return qs.filter(**kwargs)
|
<filename>blog/templatetags/blog_tags.py
#!/usr/bin/env python
# encoding: utf-8
"""
@version: ??
@author: superstrongz
@license: MIT Licence
@contact: <EMAIL>
@site: http://www.superstrongz.com/
@software: PyCharm
@file: blog_tags.py
@time: ??
"""
from django import template
from django.db.models import Q
from django.conf import settings
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
import random
from django.urls import reverse
from blog.models import Article, Category, Tag, Links, SideBar
from django.utils.encoding import force_text
from django.shortcuts import get_object_or_404
import hashlib
import urllib
from comments.models import Comment
from DjangoBlog.utils import cache_decorator, cache
from django.contrib.auth import get_user_model
from oauth.models import OAuthUser
from DjangoBlog.utils import get_current_site
import logging
logger = logging.getLogger(__name__)
register = template.Library()
@register.simple_tag
def timeformat(data):
try:
return data.strftime(settings.TIME_FORMAT)
# print(data.strftime(settings.TIME_FORMAT))
# return "ddd"
except Exception as e:
logger.error(e)
return ""
@register.simple_tag
def datetimeformat(data):
try:
return data.strftime(settings.DATE_TIME_FORMAT)
except Exception as e:
logger.error(e)
return ""
@register.filter(is_safe=True)
@stringfilter
def custom_markdown(content):
from DjangoBlog.utils import CommonMarkdown
return mark_safe(CommonMarkdown.get_markdown(content))
@register.filter(is_safe=True)
@stringfilter
def truncatechars_content(content):
"""
获得文章内容的摘要
:param content:
:return:
"""
from django.template.defaultfilters import truncatechars_html
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
return truncatechars_html(content, blogsetting.article_sub_length)
@register.filter(is_safe=True)
@stringfilter
def truncate(content):
from django.utils.html import strip_tags
return strip_tags(content)[:150]
@register.inclusion_tag('blog/tags/breadcrumb.html')
def load_breadcrumb(article):
"""
获得文章面包屑
:param article:
:return:
"""
names = article.get_category_tree()
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
site = get_current_site().domain
names.append((blogsetting.sitename, '/'))
names = names[::-1]
return {
'names': names,
'title': article.title
}
@register.inclusion_tag('blog/tags/article_tag_list.html')
def load_articletags(article):
"""
文章标签
:param article:
:return:
"""
tags = article.tags.all()
tags_list = []
for tag in tags:
url = tag.get_absolute_url()
count = tag.get_article_count()
tags_list.append((
url, count, tag, random.choice(settings.BOOTSTRAP_COLOR_TYPES)
))
return {
'article_tags_list': tags_list
}
@register.inclusion_tag('blog/tags/sidebar.html')
def load_sidebar(user, linktype):
"""
加载侧边栏
:return:
"""
logger.info('load sidebar')
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
recent_articles = Article.objects.filter(status='p')[:blogsetting.sidebar_article_count]
sidebar_categorys = Category.objects.all()
extra_sidebars = SideBar.objects.filter(is_enable=True).order_by('sequence')
most_read_articles = Article.objects.filter(status='p').order_by('-views')[:blogsetting.sidebar_article_count]
dates = Article.objects.datetimes('created_time', 'month', order='DESC')
links = Links.objects.filter(is_enable=True).filter(Q(show_type=str(linktype)) | Q(show_type='a'))
commment_list = Comment.objects.filter(is_enable=True).order_by('-id')[:blogsetting.sidebar_comment_count]
# 标签云 计算字体大小
# 根据总数计算出平均值 大小为 (数目/平均值)*步长
increment = 5
tags = Tag.objects.all()
sidebar_tags = None
if tags and len(tags) > 0:
s = list(map(lambda t: (t, t.get_article_count()), tags))
count = sum(map(lambda t: t[1], s))
dd = 1 if (count == 0 and not len(tags)) else count / len(tags)
import random
sidebar_tags = list(map(lambda x: (x[0], x[1], (x[1] / dd) * increment + 10), s))
random.shuffle(sidebar_tags)
return {
'recent_articles': recent_articles,
'sidebar_categorys': sidebar_categorys,
'most_read_articles': most_read_articles,
'article_dates': dates,
'sidebar_comments': commment_list,
'user': user,
'sidabar_links': links,
'show_google_adsense': blogsetting.show_google_adsense,
'google_adsense_codes': blogsetting.google_adsense_codes,
'open_site_comment': blogsetting.open_site_comment,
'show_gongan_code': blogsetting.show_gongan_code,
'sidebar_tags': sidebar_tags,
'extra_sidebars': extra_sidebars
}
@register.inclusion_tag('blog/tags/article_meta_info.html')
def load_article_metas(article, user):
"""
获得文章meta信息
:param article:
:return:
"""
return {
'article': article,
'user': user
}
@register.inclusion_tag('blog/tags/article_pagination.html')
def load_pagination_info(page_obj, page_type, tag_name):
previous_url = ''
next_url = ''
if page_type == '':
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:index_page', kwargs={'page': next_number})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:index_page', kwargs={'page': previous_number})
if page_type == '分类标签归档':
tag = get_object_or_404(Tag, name=tag_name)
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:tag_detail_page', kwargs={'page': next_number, 'tag_name': tag.slug})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:tag_detail_page', kwargs={'page': previous_number, 'tag_name': tag.slug})
if page_type == '作者文章归档':
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:author_detail_page', kwargs={'page': next_number, 'author_name': tag_name})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:author_detail_page', kwargs={'page': previous_number, 'author_name': tag_name})
if page_type == '分类目录归档':
category = get_object_or_404(Category, name=tag_name)
if page_obj.has_next():
next_number = page_obj.next_page_number()
next_url = reverse('blog:category_detail_page',
kwargs={'page': next_number, 'category_name': category.slug})
if page_obj.has_previous():
previous_number = page_obj.previous_page_number()
previous_url = reverse('blog:category_detail_page',
kwargs={'page': previous_number, 'category_name': category.slug})
return {
'previous_url': previous_url,
'next_url': next_url,
'page_obj': page_obj
}
"""
@register.inclusion_tag('nav.html')
def load_nav_info():
category_list = Category.objects.all()
return {
'nav_category_list': category_list
}
"""
@register.inclusion_tag('blog/tags/article_info.html')
def load_article_detail(article, isindex, user):
"""
加载文章详情
:param article:
:param isindex:是否列表页,若是列表页只显示摘要
:return:
"""
from DjangoBlog.utils import get_blog_setting
blogsetting = get_blog_setting()
return {
'article': article,
'isindex': isindex,
'user': user,
'open_site_comment': blogsetting.open_site_comment,
}
# return only the URL of the gravatar
# TEMPLATE USE: {{ email|gravatar_url:150 }}
@register.filter
def gravatar_url(email, size=40):
"""获得gravatar头像"""
cachekey = 'gravatat/' + email
if cache.get(cachekey):
return cache.get(cachekey)
else:
usermodels = OAuthUser.objects.filter(email=email)
if usermodels:
o = list(filter(lambda x: x.picture is not None, usermodels))
if o:
return o[0].picture
email = email.encode('utf-8')
default = "https://resource.lylinux.net/image/2017/03/26/120117.jpg".encode('utf-8')
url = "https://www.gravatar.com/avatar/%s?%s" % (
hashlib.md5(email.lower()).hexdigest(), urllib.parse.urlencode({'d': default, 's': str(size)}))
cache.set(cachekey, url, 60 * 60 * 10)
return url
@register.filter
def gravatar(email, size=40):
"""获得gravatar头像"""
url = gravatar_url(email, size)
return mark_safe('<img src="%s" height="%d" width="%d">' % (url, size, size))
@register.simple_tag
def query(qs, **kwargs):
""" template tag which allows queryset filtering. Usage:
{% query books author=author as mybooks %}
{% for book in mybooks %}
...
{% endfor %}
"""
return qs.filter(**kwargs)
|
en
| 0.215921
|
#!/usr/bin/env python # encoding: utf-8 @version: ?? @author: superstrongz @license: MIT Licence @contact: <EMAIL> @site: http://www.superstrongz.com/ @software: PyCharm @file: blog_tags.py @time: ?? # print(data.strftime(settings.TIME_FORMAT)) # return "ddd" 获得文章内容的摘要 :param content: :return: 获得文章面包屑 :param article: :return: 文章标签 :param article: :return: 加载侧边栏 :return: # 标签云 计算字体大小 # 根据总数计算出平均值 大小为 (数目/平均值)*步长 获得文章meta信息 :param article: :return: @register.inclusion_tag('nav.html') def load_nav_info(): category_list = Category.objects.all() return { 'nav_category_list': category_list } 加载文章详情 :param article: :param isindex:是否列表页,若是列表页只显示摘要 :return: # return only the URL of the gravatar # TEMPLATE USE: {{ email|gravatar_url:150 }} 获得gravatar头像 获得gravatar头像 template tag which allows queryset filtering. Usage: {% query books author=author as mybooks %} {% for book in mybooks %} ... {% endfor %}
| 2.221107
| 2
|
eval_psdb_mobiLess.py
|
cgangEE/ssd
| 0
|
6626909
|
basepath=$(cd `dirname $0`; pwd)
./evaluate_mobi.py \
--rec-path ${basepath}/data/psdb/val.rec \
--network mobilenetLess \
--num-class 4 \
--data-shape 300 \
--cpu \
--epoch 240 \
--batch-size 1 \
--prefix ${basepath}/output/psdbMobileNetLess/ssd \
--class-names 'pedestrian, head, head-shouler, upper-body' &> log_eval_psdb_mobiLess &
#--gpus 0 \
|
basepath=$(cd `dirname $0`; pwd)
./evaluate_mobi.py \
--rec-path ${basepath}/data/psdb/val.rec \
--network mobilenetLess \
--num-class 4 \
--data-shape 300 \
--cpu \
--epoch 240 \
--batch-size 1 \
--prefix ${basepath}/output/psdbMobileNetLess/ssd \
--class-names 'pedestrian, head, head-shouler, upper-body' &> log_eval_psdb_mobiLess &
#--gpus 0 \
|
sr
| 0.319517
|
#--gpus 0 \
| 1.268038
| 1
|
libs/rbv/utils.py
|
hexatester/ut-telegram-bot
| 0
|
6626910
|
import os
from bs4 import BeautifulSoup, Tag
from logging import getLogger
from requests import Response
from config import IMG_PATH
from .base import SESSION, USERNAME, PASSWORD
from .page import Page
logger = getLogger(__name__)
def get_chaptcha(soup: Tag) -> str:
c = ""
try:
ccaptcha: Tag = soup.find("input", {"name": "ccaptcha"})
q: str = ccaptcha.previous.strip().lower().split()
# 'Berapa hasil dari 3 + 9 ='
a = q[3]
n = q[4]
b = q[5]
if n == "+":
c = int(a) + int(b)
elif n == "-":
c = int(a) - int(b)
elif n == "/" or n == ":":
c = int(a) / int(b)
elif n == "*" or n == "x":
c = int(a) * int(b)
except Exception as E:
logger.exception(E)
finally:
return str(c)
def fetch_page(
url: str,
retry: int = 0,
res: Tag = None,
username: str = USERNAME,
password: str = PASSWORD,
) -> Response:
if not res:
res = SESSION.get(url)
if not res.ok or not res.text:
if retry > 0:
retry -= 1
return fetch_page(url, retry)
return
soup = BeautifulSoup(res.text, "lxml")
captcha = get_chaptcha(soup)
data = {
"_submit_check": "1",
"username": USERNAME,
"password": PASSWORD,
"ccaptcha": captcha,
"submit": "Submit",
}
res = SESSION.post(url, data=data)
if not res.ok or "Kode Captcha tidak sesuai!" in res.text:
if retry > 0:
retry -= 1
return fetch_page(url, retry, res)
return
return res
def get_file(url, filepath, headers=None):
res: Response = SESSION.get(url, headers=headers)
if not res.ok or res.encoding == "UTF-8":
return False
with open(filepath, "wb") as f:
for chunk in res.iter_content(1024):
f.write(chunk)
return True
def download(url, page, filepath, module_url, doc, subfolder):
if os.path.isfile(filepath):
return True
headers = {"Referer": module_url}
if get_file(url, filepath, headers):
return True
res = fetch_page(module_url, 10)
if not res or not res.ok:
return False
page = (page // 10 + 1) * 10
jsonp_url = f"http://www.pustaka.ut.ac.id/reader/services/view.php?doc={doc}&format=jsonp&subfolder={subfolder}/&page={page}" # NOQA
res = SESSION.get(jsonp_url, headers=headers)
if res.ok and get_file(url, filepath, headers) and os.path.isfile(filepath):
return True
return False
def get_txt(filepath: str) -> str:
val = ""
with open(filepath, "r", encoding="utf-8") as txt:
val = txt.read()
return val
def store_txt(filepath: str, txt: str) -> str:
with open(filepath, "w", encoding="utf-8") as f:
f.write(txt)
return txt
def fetch_page_json(
page_number: int, module_url: str, doc: str, subfolder: str, retry: int = 0
) -> str:
page = (page_number // 10 + 1) * 10
cache_filepath = os.path.join(IMG_PATH, f"{subfolder}-{doc}-{page}.txt")
if os.path.isfile(cache_filepath):
return get_txt(cache_filepath)
headers = {"Referer": module_url}
jsonp_url = f"http://www.pustaka.ut.ac.id/reader/services/view.php?doc={doc}&format=jsonp&subfolder={subfolder}/&page={page}" # NOQA
res = SESSION.get(jsonp_url, headers=headers)
if not res.ok or not res.text:
if retry > 10:
raise ValueError("Buku / halaman tidak ditemukan.")
return ""
if res.text == "Don't waste your time trying to access this file":
if retry > 10:
raise ValueError("Buku / halaman tidak ditemukan.")
res = fetch_page(module_url, 10)
if not res or not res.ok:
return ""
return fetch_page_json(page_number, module_url, doc, subfolder, retry + 1)
return store_txt(cache_filepath, res.text[1:-1])
def fetch_page_txt(page_number: int, module_url: str, doc: str, subfolder: str) -> str:
if Page.exist(subfolder, doc, page_number):
return get_txt(Page.get_filepath(subfolder, doc, page_number))
jsonp = fetch_page_json(page_number, module_url, doc, subfolder)
pages = Page.from_jsonp(jsonp)
out = None
for page in pages:
page.save(subfolder, doc)
if page.number == page_number:
out = page
return out.txt if out else ""
def get_max_page(url: str, subfolder: str, doc: str, page_number: int = 1) -> int:
res = fetch_page(url, retry=1)
if not res or not res.ok:
return -1
max_page = None
soup = BeautifulSoup(res.text, "lxml")
page = (page_number // 10 + 1) * 10
try:
max_page = int(soup.body.script.next.split(";")[0].split("=")[-1])
except ValueError:
headers = {"Referer": url}
jsonp_url = f"http://www.pustaka.ut.ac.id/reader/services/view.php?doc={doc}&format=jsonp&subfolder={subfolder}/&page={page}" # NOQA
res = SESSION.get(jsonp_url, headers=headers)
if not res.ok or not res.text:
return -1
pages = Page.from_jsonp(res.text)
max_page = pages[0].pages
finally:
return max_page
|
import os
from bs4 import BeautifulSoup, Tag
from logging import getLogger
from requests import Response
from config import IMG_PATH
from .base import SESSION, USERNAME, PASSWORD
from .page import Page
logger = getLogger(__name__)
def get_chaptcha(soup: Tag) -> str:
c = ""
try:
ccaptcha: Tag = soup.find("input", {"name": "ccaptcha"})
q: str = ccaptcha.previous.strip().lower().split()
# 'Berapa hasil dari 3 + 9 ='
a = q[3]
n = q[4]
b = q[5]
if n == "+":
c = int(a) + int(b)
elif n == "-":
c = int(a) - int(b)
elif n == "/" or n == ":":
c = int(a) / int(b)
elif n == "*" or n == "x":
c = int(a) * int(b)
except Exception as E:
logger.exception(E)
finally:
return str(c)
def fetch_page(
url: str,
retry: int = 0,
res: Tag = None,
username: str = USERNAME,
password: str = PASSWORD,
) -> Response:
if not res:
res = SESSION.get(url)
if not res.ok or not res.text:
if retry > 0:
retry -= 1
return fetch_page(url, retry)
return
soup = BeautifulSoup(res.text, "lxml")
captcha = get_chaptcha(soup)
data = {
"_submit_check": "1",
"username": USERNAME,
"password": PASSWORD,
"ccaptcha": captcha,
"submit": "Submit",
}
res = SESSION.post(url, data=data)
if not res.ok or "Kode Captcha tidak sesuai!" in res.text:
if retry > 0:
retry -= 1
return fetch_page(url, retry, res)
return
return res
def get_file(url, filepath, headers=None):
res: Response = SESSION.get(url, headers=headers)
if not res.ok or res.encoding == "UTF-8":
return False
with open(filepath, "wb") as f:
for chunk in res.iter_content(1024):
f.write(chunk)
return True
def download(url, page, filepath, module_url, doc, subfolder):
if os.path.isfile(filepath):
return True
headers = {"Referer": module_url}
if get_file(url, filepath, headers):
return True
res = fetch_page(module_url, 10)
if not res or not res.ok:
return False
page = (page // 10 + 1) * 10
jsonp_url = f"http://www.pustaka.ut.ac.id/reader/services/view.php?doc={doc}&format=jsonp&subfolder={subfolder}/&page={page}" # NOQA
res = SESSION.get(jsonp_url, headers=headers)
if res.ok and get_file(url, filepath, headers) and os.path.isfile(filepath):
return True
return False
def get_txt(filepath: str) -> str:
val = ""
with open(filepath, "r", encoding="utf-8") as txt:
val = txt.read()
return val
def store_txt(filepath: str, txt: str) -> str:
with open(filepath, "w", encoding="utf-8") as f:
f.write(txt)
return txt
def fetch_page_json(
page_number: int, module_url: str, doc: str, subfolder: str, retry: int = 0
) -> str:
page = (page_number // 10 + 1) * 10
cache_filepath = os.path.join(IMG_PATH, f"{subfolder}-{doc}-{page}.txt")
if os.path.isfile(cache_filepath):
return get_txt(cache_filepath)
headers = {"Referer": module_url}
jsonp_url = f"http://www.pustaka.ut.ac.id/reader/services/view.php?doc={doc}&format=jsonp&subfolder={subfolder}/&page={page}" # NOQA
res = SESSION.get(jsonp_url, headers=headers)
if not res.ok or not res.text:
if retry > 10:
raise ValueError("Buku / halaman tidak ditemukan.")
return ""
if res.text == "Don't waste your time trying to access this file":
if retry > 10:
raise ValueError("Buku / halaman tidak ditemukan.")
res = fetch_page(module_url, 10)
if not res or not res.ok:
return ""
return fetch_page_json(page_number, module_url, doc, subfolder, retry + 1)
return store_txt(cache_filepath, res.text[1:-1])
def fetch_page_txt(page_number: int, module_url: str, doc: str, subfolder: str) -> str:
if Page.exist(subfolder, doc, page_number):
return get_txt(Page.get_filepath(subfolder, doc, page_number))
jsonp = fetch_page_json(page_number, module_url, doc, subfolder)
pages = Page.from_jsonp(jsonp)
out = None
for page in pages:
page.save(subfolder, doc)
if page.number == page_number:
out = page
return out.txt if out else ""
def get_max_page(url: str, subfolder: str, doc: str, page_number: int = 1) -> int:
res = fetch_page(url, retry=1)
if not res or not res.ok:
return -1
max_page = None
soup = BeautifulSoup(res.text, "lxml")
page = (page_number // 10 + 1) * 10
try:
max_page = int(soup.body.script.next.split(";")[0].split("=")[-1])
except ValueError:
headers = {"Referer": url}
jsonp_url = f"http://www.pustaka.ut.ac.id/reader/services/view.php?doc={doc}&format=jsonp&subfolder={subfolder}/&page={page}" # NOQA
res = SESSION.get(jsonp_url, headers=headers)
if not res.ok or not res.text:
return -1
pages = Page.from_jsonp(res.text)
max_page = pages[0].pages
finally:
return max_page
|
id
| 0.752548
|
# 'Berapa hasil dari 3 + 9 =' # NOQA # NOQA # NOQA
| 2.757946
| 3
|
utils_lm.py
|
exelents/soft-prompt-tuning
| 7
|
6626911
|
from transformers.data.datasets.language_modeling import *
import copy
class LineByLineWebNLGTextDataset(Dataset):
"""
This will be superseded by a framework-agnostic approach
soon.
"""
def __init__(self, tokenizer: PreTrainedTokenizer, file_path: str,
block_size: int, bos_tok:str, eos_tok: str,
n_prefix_tokens: int, id_prefix_token: [int, list]):
assert os.path.isfile(file_path), f"Input file path {file_path} not found"
# Here, we do not cache the features, operating under the assumption
# that we will soon use fast multithreaded tokenizers from the
# `tokenizers` repo everywhere =)
logger.info("Creating features from dataset file at %s", file_path)
self.n_prefix_tokens = n_prefix_tokens
self.id_prefix_token = id_prefix_token
with open(file_path) as f:
lines_dict = json.load(f)
full_rela_lst = []
full_src_lst = []
full_tgt_lst = []
for i, example in enumerate(lines_dict['entries']):
sents = example[str(i + 1)]['lexicalisations']
triples = example[str(i + 1)]['modifiedtripleset']
rela_lst = []
temp_triples = ''
for j, tripleset in enumerate(triples):
subj, rela, obj = tripleset['subject'], tripleset['property'], tripleset['object']
rela_lst.append(rela)
temp_triples += ' | '
temp_triples += '{} : {} : {}'.format(subj, rela, obj)
for sent in sents:
if sent["comment"] == 'good':
full_tgt_lst.append(sent["lex"])
full_src_lst.append(temp_triples)
full_rela_lst.append(rela_lst)
assert len(full_rela_lst) == len(full_src_lst)
assert len(full_rela_lst) == len(full_tgt_lst)
edited_sents = []
for src, tgt in zip(full_src_lst, full_tgt_lst):
sent = ' {} {} '.format(src, bos_tok) + tgt + ' {}'.format(eos_tok)
edited_sents.append(sent)
batch_encoding = tokenizer(edited_sents, add_special_tokens=True, truncation=True,
max_length=block_size-n_prefix_tokens,
is_split_into_words=False)
# !!!!
# <NAME>
pad_token = tokenizer.pad_token_id
if pad_token is None:
pad_token = -100
if isinstance(id_prefix_token, int):
id_prefix_token = [id_prefix_token]
self.examples = []
self.labels = []
for e in batch_encoding["input_ids"]:
for pt_id in id_prefix_token:
self.examples.append([pt_id] * n_prefix_tokens + e)
self.labels.append([pad_token] * n_prefix_tokens + e)
# self.labels = copy.deepcopy(self.examples)
# split into category words:
# ssl_lst = full_rela_lst
#
# self.src_cat = tokenizer(ssl_lst, add_special_tokens=True, truncation=True, max_length=block_size,
# is_split_into_words=True)['input_ids']
#
# self.src_sent = []
# self.tgt_sent = []
# if True:
# separator = tokenizer(bos_tok, add_special_tokens=False)['input_ids'][0]
# for i, elem in enumerate(self.labels):
# try:
# sep_idx = elem.index(separator) + 1
# except ValueError:
# self.labels[i] = None
# continue
# self.src_sent.append(self.examples[i][:sep_idx-1]) # does not contain the BOS separator
# self.tgt_sent.append(self.examples[i][sep_idx-1:]) # contains the BOS separator.
# self.labels[i][:sep_idx] = [-100] * sep_idx
#
# self.labels = [l for l in self.labels if l is not None]
print(self.examples[0])
print(self.labels[0])
print()
print(self.examples[1])
print(self.labels[1])
assert len(self.labels) == len(self.examples)
def __len__(self):
return len(self.examples)
# def __getitem__(self, i) -> torch.Tensor:
def __getitem__(self, i):
return (torch.tensor(self.examples[i], dtype=torch.long),
torch.tensor(self.labels[i], dtype=torch.long),
)
|
from transformers.data.datasets.language_modeling import *
import copy
class LineByLineWebNLGTextDataset(Dataset):
"""
This will be superseded by a framework-agnostic approach
soon.
"""
def __init__(self, tokenizer: PreTrainedTokenizer, file_path: str,
block_size: int, bos_tok:str, eos_tok: str,
n_prefix_tokens: int, id_prefix_token: [int, list]):
assert os.path.isfile(file_path), f"Input file path {file_path} not found"
# Here, we do not cache the features, operating under the assumption
# that we will soon use fast multithreaded tokenizers from the
# `tokenizers` repo everywhere =)
logger.info("Creating features from dataset file at %s", file_path)
self.n_prefix_tokens = n_prefix_tokens
self.id_prefix_token = id_prefix_token
with open(file_path) as f:
lines_dict = json.load(f)
full_rela_lst = []
full_src_lst = []
full_tgt_lst = []
for i, example in enumerate(lines_dict['entries']):
sents = example[str(i + 1)]['lexicalisations']
triples = example[str(i + 1)]['modifiedtripleset']
rela_lst = []
temp_triples = ''
for j, tripleset in enumerate(triples):
subj, rela, obj = tripleset['subject'], tripleset['property'], tripleset['object']
rela_lst.append(rela)
temp_triples += ' | '
temp_triples += '{} : {} : {}'.format(subj, rela, obj)
for sent in sents:
if sent["comment"] == 'good':
full_tgt_lst.append(sent["lex"])
full_src_lst.append(temp_triples)
full_rela_lst.append(rela_lst)
assert len(full_rela_lst) == len(full_src_lst)
assert len(full_rela_lst) == len(full_tgt_lst)
edited_sents = []
for src, tgt in zip(full_src_lst, full_tgt_lst):
sent = ' {} {} '.format(src, bos_tok) + tgt + ' {}'.format(eos_tok)
edited_sents.append(sent)
batch_encoding = tokenizer(edited_sents, add_special_tokens=True, truncation=True,
max_length=block_size-n_prefix_tokens,
is_split_into_words=False)
# !!!!
# <NAME>
pad_token = tokenizer.pad_token_id
if pad_token is None:
pad_token = -100
if isinstance(id_prefix_token, int):
id_prefix_token = [id_prefix_token]
self.examples = []
self.labels = []
for e in batch_encoding["input_ids"]:
for pt_id in id_prefix_token:
self.examples.append([pt_id] * n_prefix_tokens + e)
self.labels.append([pad_token] * n_prefix_tokens + e)
# self.labels = copy.deepcopy(self.examples)
# split into category words:
# ssl_lst = full_rela_lst
#
# self.src_cat = tokenizer(ssl_lst, add_special_tokens=True, truncation=True, max_length=block_size,
# is_split_into_words=True)['input_ids']
#
# self.src_sent = []
# self.tgt_sent = []
# if True:
# separator = tokenizer(bos_tok, add_special_tokens=False)['input_ids'][0]
# for i, elem in enumerate(self.labels):
# try:
# sep_idx = elem.index(separator) + 1
# except ValueError:
# self.labels[i] = None
# continue
# self.src_sent.append(self.examples[i][:sep_idx-1]) # does not contain the BOS separator
# self.tgt_sent.append(self.examples[i][sep_idx-1:]) # contains the BOS separator.
# self.labels[i][:sep_idx] = [-100] * sep_idx
#
# self.labels = [l for l in self.labels if l is not None]
print(self.examples[0])
print(self.labels[0])
print()
print(self.examples[1])
print(self.labels[1])
assert len(self.labels) == len(self.examples)
def __len__(self):
return len(self.examples)
# def __getitem__(self, i) -> torch.Tensor:
def __getitem__(self, i):
return (torch.tensor(self.examples[i], dtype=torch.long),
torch.tensor(self.labels[i], dtype=torch.long),
)
|
en
| 0.440476
|
This will be superseded by a framework-agnostic approach soon. # Here, we do not cache the features, operating under the assumption # that we will soon use fast multithreaded tokenizers from the # `tokenizers` repo everywhere =) # !!!! # <NAME> # self.labels = copy.deepcopy(self.examples) # split into category words: # ssl_lst = full_rela_lst # # self.src_cat = tokenizer(ssl_lst, add_special_tokens=True, truncation=True, max_length=block_size, # is_split_into_words=True)['input_ids'] # # self.src_sent = [] # self.tgt_sent = [] # if True: # separator = tokenizer(bos_tok, add_special_tokens=False)['input_ids'][0] # for i, elem in enumerate(self.labels): # try: # sep_idx = elem.index(separator) + 1 # except ValueError: # self.labels[i] = None # continue # self.src_sent.append(self.examples[i][:sep_idx-1]) # does not contain the BOS separator # self.tgt_sent.append(self.examples[i][sep_idx-1:]) # contains the BOS separator. # self.labels[i][:sep_idx] = [-100] * sep_idx # # self.labels = [l for l in self.labels if l is not None] # def __getitem__(self, i) -> torch.Tensor:
| 2.41137
| 2
|
Gathered CTF writeups/2018-04-30-rhme3/CPA/collect_same.py
|
mihaid-b/CyberSakura
| 1
|
6626912
|
from library import *
import sys
with open(sys.argv[1], "w") as f:
for i in range(int(sys.argv[2])):
save_sample(f, chr(int(sys.argv[3], 0))*16)
|
from library import *
import sys
with open(sys.argv[1], "w") as f:
for i in range(int(sys.argv[2])):
save_sample(f, chr(int(sys.argv[3], 0))*16)
|
none
| 1
| 2.508249
| 3
|
|
python/tests/custom_dictionary_test.py
|
sqdk/brotli
| 5
|
6626913
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
from subprocess import check_call, Popen, PIPE
from _test_utils import PYTHON, BRO, TEST_ENV, diff_q
INPUTS = """\
testdata/alice29.txt
testdata/asyoulik.txt
testdata/lcet10.txt
testdata/plrabn12.txt
../enc/encode.c
../common/dictionary.h
../dec/decode.c
%s
""" % BRO
os.chdir(os.path.abspath("../../tests"))
for filename in INPUTS.splitlines():
for quality in (1, 6, 9, 11):
for lgwin in (10, 15, 20, 24):
filename = os.path.abspath(filename)
print('Roundtrip testing file "%s" at quality %d with lg(win)=%d and auto-custom-dictionary' %
(os.path.basename(filename), quality, lgwin))
compressed = os.path.splitext(filename)[0] + ".custom_bro"
uncompressed = os.path.splitext(filename)[0] + ".custom_unbro"
check_call([PYTHON, BRO, "-f", "-q", str(quality), "-i", filename,
"-o", compressed, "--lgwin", str(lgwin),
"--custom-dictionary", filename], env=TEST_ENV)
check_call([PYTHON, BRO, "-f", "-d", "-i", compressed, "-o",
uncompressed, "--custom-dictionary", filename], env=TEST_ENV)
if diff_q(filename, uncompressed) != 0:
sys.exit(1)
try:
os.unlink(compressed)
os.unlink(uncompressed)
except OSError:
pass
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
from subprocess import check_call, Popen, PIPE
from _test_utils import PYTHON, BRO, TEST_ENV, diff_q
INPUTS = """\
testdata/alice29.txt
testdata/asyoulik.txt
testdata/lcet10.txt
testdata/plrabn12.txt
../enc/encode.c
../common/dictionary.h
../dec/decode.c
%s
""" % BRO
os.chdir(os.path.abspath("../../tests"))
for filename in INPUTS.splitlines():
for quality in (1, 6, 9, 11):
for lgwin in (10, 15, 20, 24):
filename = os.path.abspath(filename)
print('Roundtrip testing file "%s" at quality %d with lg(win)=%d and auto-custom-dictionary' %
(os.path.basename(filename), quality, lgwin))
compressed = os.path.splitext(filename)[0] + ".custom_bro"
uncompressed = os.path.splitext(filename)[0] + ".custom_unbro"
check_call([PYTHON, BRO, "-f", "-q", str(quality), "-i", filename,
"-o", compressed, "--lgwin", str(lgwin),
"--custom-dictionary", filename], env=TEST_ENV)
check_call([PYTHON, BRO, "-f", "-d", "-i", compressed, "-o",
uncompressed, "--custom-dictionary", filename], env=TEST_ENV)
if diff_q(filename, uncompressed) != 0:
sys.exit(1)
try:
os.unlink(compressed)
os.unlink(uncompressed)
except OSError:
pass
|
en
| 0.215286
|
#!/usr/bin/env python \ testdata/alice29.txt testdata/asyoulik.txt testdata/lcet10.txt testdata/plrabn12.txt ../enc/encode.c ../common/dictionary.h ../dec/decode.c %s
| 2.278584
| 2
|
UI/ui_MainWindows.py
|
BeiChenYx/NetDebug
| 9
|
6626914
|
<filename>UI/ui_MainWindows.py<gh_stars>1-10
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'MainWindows.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setLayoutDirection(QtCore.Qt.LeftToRight)
self.centralwidget.setAutoFillBackground(False)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.listWidget = QtWidgets.QListWidget(self.centralwidget)
self.listWidget.setMinimumSize(QtCore.QSize(115, 0))
self.listWidget.setMaximumSize(QtCore.QSize(115, 100000))
self.listWidget.setObjectName("listWidget")
item = QtWidgets.QListWidgetItem()
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/img/images/TCP服务器.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/img/images/TCP客户端.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon1)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/img/images/UDP工具.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon2)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/img/images/帮助.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon3)
self.listWidget.addItem(item)
self.gridLayout.addWidget(self.listWidget, 0, 0, 1, 1)
self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget)
self.stackedWidget.setObjectName("stackedWidget")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.stackedWidget.addWidget(self.page)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.stackedWidget.addWidget(self.page_2)
self.gridLayout.addWidget(self.stackedWidget, 0, 1, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "网络调试工具"))
__sortingEnabled = self.listWidget.isSortingEnabled()
self.listWidget.setSortingEnabled(False)
item = self.listWidget.item(0)
item.setText(_translate("MainWindow", "TCP服务器"))
item = self.listWidget.item(1)
item.setText(_translate("MainWindow", "TCP客户端"))
item = self.listWidget.item(2)
item.setText(_translate("MainWindow", "UDP工具组"))
item = self.listWidget.item(3)
item.setText(_translate("MainWindow", "关于/帮助"))
self.listWidget.setSortingEnabled(__sortingEnabled)
import img_rc
|
<filename>UI/ui_MainWindows.py<gh_stars>1-10
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'MainWindows.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setLayoutDirection(QtCore.Qt.LeftToRight)
self.centralwidget.setAutoFillBackground(False)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.listWidget = QtWidgets.QListWidget(self.centralwidget)
self.listWidget.setMinimumSize(QtCore.QSize(115, 0))
self.listWidget.setMaximumSize(QtCore.QSize(115, 100000))
self.listWidget.setObjectName("listWidget")
item = QtWidgets.QListWidgetItem()
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/img/images/TCP服务器.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/img/images/TCP客户端.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon1)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/img/images/UDP工具.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon2)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/img/images/帮助.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon3)
self.listWidget.addItem(item)
self.gridLayout.addWidget(self.listWidget, 0, 0, 1, 1)
self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget)
self.stackedWidget.setObjectName("stackedWidget")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.stackedWidget.addWidget(self.page)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.stackedWidget.addWidget(self.page_2)
self.gridLayout.addWidget(self.stackedWidget, 0, 1, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "网络调试工具"))
__sortingEnabled = self.listWidget.isSortingEnabled()
self.listWidget.setSortingEnabled(False)
item = self.listWidget.item(0)
item.setText(_translate("MainWindow", "TCP服务器"))
item = self.listWidget.item(1)
item.setText(_translate("MainWindow", "TCP客户端"))
item = self.listWidget.item(2)
item.setText(_translate("MainWindow", "UDP工具组"))
item = self.listWidget.item(3)
item.setText(_translate("MainWindow", "关于/帮助"))
self.listWidget.setSortingEnabled(__sortingEnabled)
import img_rc
|
en
| 0.777901
|
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'MainWindows.ui' # # Created by: PyQt5 UI code generator 5.11.3 # # WARNING! All changes made in this file will be lost!
| 1.78347
| 2
|
homeassistant/components/flock/notify.py
|
alemuro/home-assistant
| 2
|
6626915
|
"""Flock platform for notify component."""
import asyncio
import logging
import async_timeout
import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "https://api.flock.com/hooks/sendMessage/"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_ACCESS_TOKEN): cv.string})
async def get_service(hass, config, discovery_info=None):
"""Get the Flock notification service."""
access_token = config.get(CONF_ACCESS_TOKEN)
url = "{}{}".format(_RESOURCE, access_token)
session = async_get_clientsession(hass)
return FlockNotificationService(url, session)
class FlockNotificationService(BaseNotificationService):
"""Implement the notification service for Flock."""
def __init__(self, url, session):
"""Initialize the Flock notification service."""
self._url = url
self._session = session
async def async_send_message(self, message, **kwargs):
"""Send the message to the user."""
payload = {"text": message}
_LOGGER.debug("Attempting to call Flock at %s", self._url)
try:
with async_timeout.timeout(10):
response = await self._session.post(self._url, json=payload)
result = await response.json()
if response.status != 200 or "error" in result:
_LOGGER.error(
"Flock service returned HTTP status %d, response %s",
response.status,
result,
)
except asyncio.TimeoutError:
_LOGGER.error("Timeout accessing Flock at %s", self._url)
|
"""Flock platform for notify component."""
import asyncio
import logging
import async_timeout
import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "https://api.flock.com/hooks/sendMessage/"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_ACCESS_TOKEN): cv.string})
async def get_service(hass, config, discovery_info=None):
"""Get the Flock notification service."""
access_token = config.get(CONF_ACCESS_TOKEN)
url = "{}{}".format(_RESOURCE, access_token)
session = async_get_clientsession(hass)
return FlockNotificationService(url, session)
class FlockNotificationService(BaseNotificationService):
"""Implement the notification service for Flock."""
def __init__(self, url, session):
"""Initialize the Flock notification service."""
self._url = url
self._session = session
async def async_send_message(self, message, **kwargs):
"""Send the message to the user."""
payload = {"text": message}
_LOGGER.debug("Attempting to call Flock at %s", self._url)
try:
with async_timeout.timeout(10):
response = await self._session.post(self._url, json=payload)
result = await response.json()
if response.status != 200 or "error" in result:
_LOGGER.error(
"Flock service returned HTTP status %d, response %s",
response.status,
result,
)
except asyncio.TimeoutError:
_LOGGER.error("Timeout accessing Flock at %s", self._url)
|
en
| 0.718297
|
Flock platform for notify component. Get the Flock notification service. Implement the notification service for Flock. Initialize the Flock notification service. Send the message to the user.
| 2.46706
| 2
|
server/auvsi_suas/views/teams.py
|
dcat52/interop
| 0
|
6626916
|
"""Teams view."""
import json
from auvsi_suas.models.mission_clock_event import MissionClockEvent
from auvsi_suas.models.uas_telemetry import UasTelemetry
from auvsi_suas.models.takeoff_or_landing_event import TakeoffOrLandingEvent
from auvsi_suas.views import logger
from auvsi_suas.views.decorators import require_superuser
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.utils.decorators import method_decorator
from django.views.generic import View
def user_json(user):
"""Generate JSON-style dict for user."""
telemetry = UasTelemetry.last_for_user(user)
return {
'name': user.username,
'id': user.pk,
'on_clock': MissionClockEvent.user_on_clock(user),
'on_timeout': MissionClockEvent.user_on_timeout(user),
'in_air': TakeoffOrLandingEvent.user_in_air(user),
'telemetry': telemetry.json() if telemetry else None
}
class Teams(View):
"""Gets a list of all teams."""
@method_decorator(require_superuser)
def dispatch(self, *args, **kwargs):
return super(Teams, self).dispatch(*args, **kwargs)
def get(self, request):
users = User.objects.all()
teams = []
for user in users:
# Only standard users are exported
if not user.is_superuser:
teams.append(user_json(user))
return HttpResponse(json.dumps(teams), content_type="application/json")
class TeamsId(View):
"""GET/PUT specific team."""
@method_decorator(require_superuser)
def dispatch(self, *args, **kwargs):
return super(TeamsId, self).dispatch(*args, **kwargs)
def get(self, request, pk):
try:
user = User.objects.get(pk=int(pk))
except User.DoesNotExist:
return HttpResponseBadRequest('Unknown team %s' % pk)
return HttpResponse(
json.dumps(user_json(user)), content_type="application/json")
def put(self, request, pk):
"""PUT allows updating status."""
try:
user = User.objects.get(pk=int(pk))
except User.DoesNotExist:
return HttpResponseBadRequest('Unknown team %s' % pk)
try:
data = json.loads(request.body)
except ValueError:
return HttpResponseBadRequest('Invalid JSON: %s' % request.body)
# Potential events to update.
takeoff_event = None
clock_event = None
# Update whether UAS is in air.
if 'in_air' in data:
in_air = data['in_air']
if not isinstance(in_air, bool):
return HttpResponseBadRequest('in_air must be boolean')
currently_in_air = TakeoffOrLandingEvent.user_in_air(user)
# New event only necessary if changing status
if currently_in_air != in_air:
takeoff_event = TakeoffOrLandingEvent(
user=user, uas_in_air=in_air)
# Update whether UAS in on clock or timeout.
if 'on_clock' in data or 'on_timeout' in data:
currently_on_clock = MissionClockEvent.user_on_clock(user)
currently_on_timeout = MissionClockEvent.user_on_timeout(user)
on_clock = data.get('on_clock', currently_on_clock)
on_timeout = data.get('on_timeout', currently_on_timeout)
if (not isinstance(on_clock, bool) or
not isinstance(on_timeout, bool)):
return HttpResponseBadRequest(
'on_clock and on_timeout must be boolean.')
if on_clock and on_timeout:
return HttpResponseBadRequest(
'Cannot be on mission clock and on timeout.')
# New event only necessary if changing status
if (on_clock != currently_on_clock or
on_timeout != currently_on_timeout):
clock_event = MissionClockEvent(
user=user,
team_on_clock=on_clock,
team_on_timeout=on_timeout)
# Request was valid. Save updates.
if takeoff_event:
takeoff_event.save()
if clock_event:
clock_event.save()
return HttpResponse(
json.dumps(user_json(user)), content_type="application/json")
|
"""Teams view."""
import json
from auvsi_suas.models.mission_clock_event import MissionClockEvent
from auvsi_suas.models.uas_telemetry import UasTelemetry
from auvsi_suas.models.takeoff_or_landing_event import TakeoffOrLandingEvent
from auvsi_suas.views import logger
from auvsi_suas.views.decorators import require_superuser
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.utils.decorators import method_decorator
from django.views.generic import View
def user_json(user):
"""Generate JSON-style dict for user."""
telemetry = UasTelemetry.last_for_user(user)
return {
'name': user.username,
'id': user.pk,
'on_clock': MissionClockEvent.user_on_clock(user),
'on_timeout': MissionClockEvent.user_on_timeout(user),
'in_air': TakeoffOrLandingEvent.user_in_air(user),
'telemetry': telemetry.json() if telemetry else None
}
class Teams(View):
"""Gets a list of all teams."""
@method_decorator(require_superuser)
def dispatch(self, *args, **kwargs):
return super(Teams, self).dispatch(*args, **kwargs)
def get(self, request):
users = User.objects.all()
teams = []
for user in users:
# Only standard users are exported
if not user.is_superuser:
teams.append(user_json(user))
return HttpResponse(json.dumps(teams), content_type="application/json")
class TeamsId(View):
"""GET/PUT specific team."""
@method_decorator(require_superuser)
def dispatch(self, *args, **kwargs):
return super(TeamsId, self).dispatch(*args, **kwargs)
def get(self, request, pk):
try:
user = User.objects.get(pk=int(pk))
except User.DoesNotExist:
return HttpResponseBadRequest('Unknown team %s' % pk)
return HttpResponse(
json.dumps(user_json(user)), content_type="application/json")
def put(self, request, pk):
"""PUT allows updating status."""
try:
user = User.objects.get(pk=int(pk))
except User.DoesNotExist:
return HttpResponseBadRequest('Unknown team %s' % pk)
try:
data = json.loads(request.body)
except ValueError:
return HttpResponseBadRequest('Invalid JSON: %s' % request.body)
# Potential events to update.
takeoff_event = None
clock_event = None
# Update whether UAS is in air.
if 'in_air' in data:
in_air = data['in_air']
if not isinstance(in_air, bool):
return HttpResponseBadRequest('in_air must be boolean')
currently_in_air = TakeoffOrLandingEvent.user_in_air(user)
# New event only necessary if changing status
if currently_in_air != in_air:
takeoff_event = TakeoffOrLandingEvent(
user=user, uas_in_air=in_air)
# Update whether UAS in on clock or timeout.
if 'on_clock' in data or 'on_timeout' in data:
currently_on_clock = MissionClockEvent.user_on_clock(user)
currently_on_timeout = MissionClockEvent.user_on_timeout(user)
on_clock = data.get('on_clock', currently_on_clock)
on_timeout = data.get('on_timeout', currently_on_timeout)
if (not isinstance(on_clock, bool) or
not isinstance(on_timeout, bool)):
return HttpResponseBadRequest(
'on_clock and on_timeout must be boolean.')
if on_clock and on_timeout:
return HttpResponseBadRequest(
'Cannot be on mission clock and on timeout.')
# New event only necessary if changing status
if (on_clock != currently_on_clock or
on_timeout != currently_on_timeout):
clock_event = MissionClockEvent(
user=user,
team_on_clock=on_clock,
team_on_timeout=on_timeout)
# Request was valid. Save updates.
if takeoff_event:
takeoff_event.save()
if clock_event:
clock_event.save()
return HttpResponse(
json.dumps(user_json(user)), content_type="application/json")
|
en
| 0.733635
|
Teams view. Generate JSON-style dict for user. Gets a list of all teams. # Only standard users are exported GET/PUT specific team. PUT allows updating status. # Potential events to update. # Update whether UAS is in air. # New event only necessary if changing status # Update whether UAS in on clock or timeout. # New event only necessary if changing status # Request was valid. Save updates.
| 2.209967
| 2
|
sample/ch4/cross-iris.py
|
wagase/scraping
| 0
|
6626917
|
from sklearn import svm, metrics
import random, re
# アヤメのCSVデータを読み込む --- (※1)
lines = open('iris.csv', 'r', encoding='utf-8').read().split("\n")
f_tonum = lambda n : float(n) if re.match(r'^[0-9\.]+$', n) else n
f_cols = lambda li: list(map(f_tonum,li.strip().split(',')))
csv = list(map(f_cols, lines))
del csv[0] # 先頭のヘッダ行を削除
random.shuffle(csv) # データをシャッフル
# データをK分割する --- (※2)
K = 5
csvk = [ [] for i in range(K) ]
for i in range(len(csv)):
csvk[i % K].append(csv[i])
# リストを訓練データとラベルに分割する関数
def split_data_label(rows):
data = []; label = []
for row in rows:
data.append(row[0:4])
label.append(row[4])
return (data, label)
# 正解率を求める --- (※3)
def calc_score(test, train):
test_f, test_l = split_data_label(test)
train_f, train_l = split_data_label(train)
# 区連データを学習して分類して正解率を求める
clf = svm.SVC()
clf.fit(train_f, train_l)
pre = clf.predict(test_f)
return metrics.accuracy_score(test_l, pre)
# K分割したデータについて正解率を求める --- (※4)
score_list = []
for testc in csvk:
# testc以外のデータを訓練データとする
trainc = []
for i in csvk:
if i != testc: trainc += i
sc = calc_score(testc, trainc)
score_list.append(sc)
print("各正解率=", score_list)
print("平均正解率=", sum(score_list) / len(score_list))
|
from sklearn import svm, metrics
import random, re
# アヤメのCSVデータを読み込む --- (※1)
lines = open('iris.csv', 'r', encoding='utf-8').read().split("\n")
f_tonum = lambda n : float(n) if re.match(r'^[0-9\.]+$', n) else n
f_cols = lambda li: list(map(f_tonum,li.strip().split(',')))
csv = list(map(f_cols, lines))
del csv[0] # 先頭のヘッダ行を削除
random.shuffle(csv) # データをシャッフル
# データをK分割する --- (※2)
K = 5
csvk = [ [] for i in range(K) ]
for i in range(len(csv)):
csvk[i % K].append(csv[i])
# リストを訓練データとラベルに分割する関数
def split_data_label(rows):
data = []; label = []
for row in rows:
data.append(row[0:4])
label.append(row[4])
return (data, label)
# 正解率を求める --- (※3)
def calc_score(test, train):
test_f, test_l = split_data_label(test)
train_f, train_l = split_data_label(train)
# 区連データを学習して分類して正解率を求める
clf = svm.SVC()
clf.fit(train_f, train_l)
pre = clf.predict(test_f)
return metrics.accuracy_score(test_l, pre)
# K分割したデータについて正解率を求める --- (※4)
score_list = []
for testc in csvk:
# testc以外のデータを訓練データとする
trainc = []
for i in csvk:
if i != testc: trainc += i
sc = calc_score(testc, trainc)
score_list.append(sc)
print("各正解率=", score_list)
print("平均正解率=", sum(score_list) / len(score_list))
|
ja
| 0.999637
|
# アヤメのCSVデータを読み込む --- (※1) # 先頭のヘッダ行を削除 # データをシャッフル # データをK分割する --- (※2) # リストを訓練データとラベルに分割する関数 # 正解率を求める --- (※3) # 区連データを学習して分類して正解率を求める # K分割したデータについて正解率を求める --- (※4) # testc以外のデータを訓練データとする
| 2.63743
| 3
|
scripts/db_add_sentiment.py
|
dwlmt/Story-Untangling
| 7
|
6626918
|
<gh_stars>1-10
import argparse
import asyncio
from concurrent.futures.process import ProcessPoolExecutor
from story_untangling.dataset_readers.dataset_features import save_sentiment
engine_kwargs = {"pool_recycle": 3600, "connect_args": {'timeout': 1000, "check_same_thread": False}}
async def add_sentiment_features(args):
database = args["database"]
dataset_db = f"sqlite:///{database}"
loop = asyncio.get_event_loop()
with ProcessPoolExecutor(max_workers=args["max_workers"]) as executor:
await save_sentiment(args["batch_size"], dataset_db, executor, loop)
parser = argparse.ArgumentParser(
description='Add per sentence sentiment information to the database.')
parser.add_argument('--database', required=True, type=str, help="The database.")
parser.add_argument('--batch-size', type=int, default=1000, help="Batch size. default: 1000")
parser.add_argument('--max-workers', type=int, default=16, help="Number of topics to use from HDP. Default: 16")
args = parser.parse_args()
loop = asyncio.get_event_loop()
dataset_db = loop.run_until_complete(add_sentiment_features(vars(args)))
|
import argparse
import asyncio
from concurrent.futures.process import ProcessPoolExecutor
from story_untangling.dataset_readers.dataset_features import save_sentiment
engine_kwargs = {"pool_recycle": 3600, "connect_args": {'timeout': 1000, "check_same_thread": False}}
async def add_sentiment_features(args):
database = args["database"]
dataset_db = f"sqlite:///{database}"
loop = asyncio.get_event_loop()
with ProcessPoolExecutor(max_workers=args["max_workers"]) as executor:
await save_sentiment(args["batch_size"], dataset_db, executor, loop)
parser = argparse.ArgumentParser(
description='Add per sentence sentiment information to the database.')
parser.add_argument('--database', required=True, type=str, help="The database.")
parser.add_argument('--batch-size', type=int, default=1000, help="Batch size. default: 1000")
parser.add_argument('--max-workers', type=int, default=16, help="Number of topics to use from HDP. Default: 16")
args = parser.parse_args()
loop = asyncio.get_event_loop()
dataset_db = loop.run_until_complete(add_sentiment_features(vars(args)))
|
none
| 1
| 2.517728
| 3
|
|
Python/Regex And Parsing/Hex Color Code.py
|
abivilion/Hackerank-Solutions-
| 0
|
6626919
|
import re, sys
# print(sys.stdin)
for i in sys.stdin:
# print(i)
for j in re.findall('[\s:](#[a-f0-9]{6}|#[a-f0-9]{3})', i, re.I):
print(j)
|
import re, sys
# print(sys.stdin)
for i in sys.stdin:
# print(i)
for j in re.findall('[\s:](#[a-f0-9]{6}|#[a-f0-9]{3})', i, re.I):
print(j)
|
fa
| 0.082394
|
# print(sys.stdin) # print(i) #[a-f0-9]{6}|#[a-f0-9]{3})', i, re.I):
| 2.949183
| 3
|
mahjong/hand_calculating/fu.py
|
otamajakusi/mahjong
| 0
|
6626920
|
# -*- coding: utf-8 -*-
from mahjong.constants import HONOR_INDICES, TERMINAL_INDICES
from mahjong.meld import Meld
from mahjong.utils import contains_terminals, is_pair, is_pon_or_kan, simplify
class FuCalculator(object):
BASE = "base"
PENCHAN = "penchan"
KANCHAN = "kanchan"
VALUED_PAIR = "valued_pair"
DOUBLE_VALUED_PAIR = "double_valued_pair"
PAIR_WAIT = "pair_wait"
TSUMO = "tsumo"
HAND_WITHOUT_FU = "hand_without_fu"
CLOSED_PON = "closed_pon"
OPEN_PON = "open_pon"
CLOSED_TERMINAL_PON = "closed_terminal_pon"
OPEN_TERMINAL_PON = "open_terminal_pon"
CLOSED_KAN = "closed_kan"
OPEN_KAN = "open_kan"
CLOSED_TERMINAL_KAN = "closed_terminal_kan"
OPEN_TERMINAL_KAN = "open_terminal_kan"
def calculate_fu(
self,
hand,
win_tile,
win_group,
config,
valued_tiles=None,
melds=None,
):
"""
Calculate hand fu with explanations
:param hand:
:param win_tile: 136 tile format
:param win_group: one set where win tile exists
:param config: HandConfig object
:param valued_tiles: dragons, player wind, round wind
:param melds: opened sets
:return:
"""
win_tile_34 = win_tile // 4
if not valued_tiles:
valued_tiles = []
if not melds:
melds = []
fu_details = []
if len(hand) == 7:
return [{"fu": 25, "reason": FuCalculator.BASE}], 25
pair = [x for x in hand if is_pair(x)][0]
pon_sets = [x for x in hand if is_pon_or_kan(x)]
copied_opened_melds = [x.tiles_34 for x in melds if x.type == Meld.CHI]
closed_chi_sets = []
for x in hand:
if x not in copied_opened_melds:
closed_chi_sets.append(x)
else:
copied_opened_melds.remove(x)
is_open_hand = any([x.opened for x in melds])
if win_group in closed_chi_sets:
tile_index = simplify(win_tile_34)
# penchan
if contains_terminals(win_group):
# 1-2-... wait
if tile_index == 2 and win_group.index(win_tile_34) == 2:
fu_details.append({"fu": 2, "reason": FuCalculator.PENCHAN})
# 8-9-... wait
elif tile_index == 6 and win_group.index(win_tile_34) == 0:
fu_details.append({"fu": 2, "reason": FuCalculator.PENCHAN})
# kanchan waiting 5-...-7
if win_group.index(win_tile_34) == 1:
fu_details.append({"fu": 2, "reason": FuCalculator.KANCHAN})
# valued pair
count_of_valued_pairs = valued_tiles.count(pair[0])
if count_of_valued_pairs == 1:
fu_details.append({"fu": 2, "reason": FuCalculator.VALUED_PAIR})
# east-east pair when you are on east gave double fu
if count_of_valued_pairs == 2:
fu_details.append({"fu": 4, "reason": FuCalculator.DOUBLE_VALUED_PAIR})
# pair wait
if is_pair(win_group):
fu_details.append({"fu": 2, "reason": FuCalculator.PAIR_WAIT})
for set_item in pon_sets:
open_meld = [x for x in melds if set_item == x.tiles_34]
open_meld = open_meld and open_meld[0] or None
set_was_open = open_meld and open_meld.opened or False
is_kan_set = (open_meld and (open_meld.type == Meld.KAN or open_meld.type == Meld.SHOUMINKAN)) or False
is_honor = set_item[0] in TERMINAL_INDICES + HONOR_INDICES
# we win by ron on the third pon tile, our pon will be count as open
if not config.is_tsumo and set_item == win_group:
set_was_open = True
if is_honor:
if is_kan_set:
if set_was_open:
fu_details.append({"fu": 16, "reason": FuCalculator.OPEN_TERMINAL_KAN})
else:
fu_details.append({"fu": 32, "reason": FuCalculator.CLOSED_TERMINAL_KAN})
else:
if set_was_open:
fu_details.append({"fu": 4, "reason": FuCalculator.OPEN_TERMINAL_PON})
else:
fu_details.append({"fu": 8, "reason": FuCalculator.CLOSED_TERMINAL_PON})
else:
if is_kan_set:
if set_was_open:
fu_details.append({"fu": 8, "reason": FuCalculator.OPEN_KAN})
else:
fu_details.append({"fu": 16, "reason": FuCalculator.CLOSED_KAN})
else:
if set_was_open:
fu_details.append({"fu": 2, "reason": FuCalculator.OPEN_PON})
else:
fu_details.append({"fu": 4, "reason": FuCalculator.CLOSED_PON})
add_tsumo_fu = len(fu_details) > 0 or config.options.fu_for_pinfu_tsumo
if config.is_tsumo and add_tsumo_fu:
# 2 additional fu for tsumo (but not for pinfu)
fu_details.append({"fu": 2, "reason": FuCalculator.TSUMO})
if is_open_hand and not len(fu_details) and config.options.fu_for_open_pinfu:
# there is no 1-20 hands, so we had to add additional fu
fu_details.append({"fu": 2, "reason": FuCalculator.HAND_WITHOUT_FU})
if is_open_hand or config.is_tsumo:
fu_details.append({"fu": 20, "reason": FuCalculator.BASE})
else:
fu_details.append({"fu": 30, "reason": FuCalculator.BASE})
return fu_details, self.round_fu(fu_details)
def round_fu(self, fu_details):
# 22 -> 30 and etc.
fu = sum([x["fu"] for x in fu_details])
return (fu + 9) // 10 * 10
|
# -*- coding: utf-8 -*-
from mahjong.constants import HONOR_INDICES, TERMINAL_INDICES
from mahjong.meld import Meld
from mahjong.utils import contains_terminals, is_pair, is_pon_or_kan, simplify
class FuCalculator(object):
BASE = "base"
PENCHAN = "penchan"
KANCHAN = "kanchan"
VALUED_PAIR = "valued_pair"
DOUBLE_VALUED_PAIR = "double_valued_pair"
PAIR_WAIT = "pair_wait"
TSUMO = "tsumo"
HAND_WITHOUT_FU = "hand_without_fu"
CLOSED_PON = "closed_pon"
OPEN_PON = "open_pon"
CLOSED_TERMINAL_PON = "closed_terminal_pon"
OPEN_TERMINAL_PON = "open_terminal_pon"
CLOSED_KAN = "closed_kan"
OPEN_KAN = "open_kan"
CLOSED_TERMINAL_KAN = "closed_terminal_kan"
OPEN_TERMINAL_KAN = "open_terminal_kan"
def calculate_fu(
self,
hand,
win_tile,
win_group,
config,
valued_tiles=None,
melds=None,
):
"""
Calculate hand fu with explanations
:param hand:
:param win_tile: 136 tile format
:param win_group: one set where win tile exists
:param config: HandConfig object
:param valued_tiles: dragons, player wind, round wind
:param melds: opened sets
:return:
"""
win_tile_34 = win_tile // 4
if not valued_tiles:
valued_tiles = []
if not melds:
melds = []
fu_details = []
if len(hand) == 7:
return [{"fu": 25, "reason": FuCalculator.BASE}], 25
pair = [x for x in hand if is_pair(x)][0]
pon_sets = [x for x in hand if is_pon_or_kan(x)]
copied_opened_melds = [x.tiles_34 for x in melds if x.type == Meld.CHI]
closed_chi_sets = []
for x in hand:
if x not in copied_opened_melds:
closed_chi_sets.append(x)
else:
copied_opened_melds.remove(x)
is_open_hand = any([x.opened for x in melds])
if win_group in closed_chi_sets:
tile_index = simplify(win_tile_34)
# penchan
if contains_terminals(win_group):
# 1-2-... wait
if tile_index == 2 and win_group.index(win_tile_34) == 2:
fu_details.append({"fu": 2, "reason": FuCalculator.PENCHAN})
# 8-9-... wait
elif tile_index == 6 and win_group.index(win_tile_34) == 0:
fu_details.append({"fu": 2, "reason": FuCalculator.PENCHAN})
# kanchan waiting 5-...-7
if win_group.index(win_tile_34) == 1:
fu_details.append({"fu": 2, "reason": FuCalculator.KANCHAN})
# valued pair
count_of_valued_pairs = valued_tiles.count(pair[0])
if count_of_valued_pairs == 1:
fu_details.append({"fu": 2, "reason": FuCalculator.VALUED_PAIR})
# east-east pair when you are on east gave double fu
if count_of_valued_pairs == 2:
fu_details.append({"fu": 4, "reason": FuCalculator.DOUBLE_VALUED_PAIR})
# pair wait
if is_pair(win_group):
fu_details.append({"fu": 2, "reason": FuCalculator.PAIR_WAIT})
for set_item in pon_sets:
open_meld = [x for x in melds if set_item == x.tiles_34]
open_meld = open_meld and open_meld[0] or None
set_was_open = open_meld and open_meld.opened or False
is_kan_set = (open_meld and (open_meld.type == Meld.KAN or open_meld.type == Meld.SHOUMINKAN)) or False
is_honor = set_item[0] in TERMINAL_INDICES + HONOR_INDICES
# we win by ron on the third pon tile, our pon will be count as open
if not config.is_tsumo and set_item == win_group:
set_was_open = True
if is_honor:
if is_kan_set:
if set_was_open:
fu_details.append({"fu": 16, "reason": FuCalculator.OPEN_TERMINAL_KAN})
else:
fu_details.append({"fu": 32, "reason": FuCalculator.CLOSED_TERMINAL_KAN})
else:
if set_was_open:
fu_details.append({"fu": 4, "reason": FuCalculator.OPEN_TERMINAL_PON})
else:
fu_details.append({"fu": 8, "reason": FuCalculator.CLOSED_TERMINAL_PON})
else:
if is_kan_set:
if set_was_open:
fu_details.append({"fu": 8, "reason": FuCalculator.OPEN_KAN})
else:
fu_details.append({"fu": 16, "reason": FuCalculator.CLOSED_KAN})
else:
if set_was_open:
fu_details.append({"fu": 2, "reason": FuCalculator.OPEN_PON})
else:
fu_details.append({"fu": 4, "reason": FuCalculator.CLOSED_PON})
add_tsumo_fu = len(fu_details) > 0 or config.options.fu_for_pinfu_tsumo
if config.is_tsumo and add_tsumo_fu:
# 2 additional fu for tsumo (but not for pinfu)
fu_details.append({"fu": 2, "reason": FuCalculator.TSUMO})
if is_open_hand and not len(fu_details) and config.options.fu_for_open_pinfu:
# there is no 1-20 hands, so we had to add additional fu
fu_details.append({"fu": 2, "reason": FuCalculator.HAND_WITHOUT_FU})
if is_open_hand or config.is_tsumo:
fu_details.append({"fu": 20, "reason": FuCalculator.BASE})
else:
fu_details.append({"fu": 30, "reason": FuCalculator.BASE})
return fu_details, self.round_fu(fu_details)
def round_fu(self, fu_details):
# 22 -> 30 and etc.
fu = sum([x["fu"] for x in fu_details])
return (fu + 9) // 10 * 10
|
en
| 0.850943
|
# -*- coding: utf-8 -*- Calculate hand fu with explanations :param hand: :param win_tile: 136 tile format :param win_group: one set where win tile exists :param config: HandConfig object :param valued_tiles: dragons, player wind, round wind :param melds: opened sets :return: # penchan # 1-2-... wait # 8-9-... wait # kanchan waiting 5-...-7 # valued pair # east-east pair when you are on east gave double fu # pair wait # we win by ron on the third pon tile, our pon will be count as open # 2 additional fu for tsumo (but not for pinfu) # there is no 1-20 hands, so we had to add additional fu # 22 -> 30 and etc.
| 2.588241
| 3
|
application.py
|
tcharts-boop/registration_notifications_usf
| 0
|
6626921
|
<reponame>tcharts-boop/registration_notifications_usf
import mechanicalsoup
from bs4 import BeautifulSoup
import unicodedata
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import csv
import sys
class ApplicationMethods:
# Static method to load text file into variables/list
@staticmethod
def load_file():
file_name = 'crn_text_file.txt'
with open(file_name, 'r') as file:
# Set up email and password variables
email = file.readline().strip()
password = file.readline().strip()
# Strip line and split by ', ' into array if the value is not blank
term_crn_list = [line.strip().split(', ') for line in file if line.strip()]
print('File loaded successfully.')
# Return variables in dictionary data type
return {'list': term_crn_list, 'email': email, 'password': password}
# Static method to return duration and duration counter variables
@staticmethod
def set_duration():
duration_count = 0
while True:
# Try to get an int input
try:
duration_hours = int(input('Duration (in hours): '))
# Try again if not int input
except:
print("Please enter a number value between 1 - 4.")
continue
else:
# If duration_hours between 1 and 4, break the loop
if duration_hours >= 1 and duration_hours <= 4:
break
# If not between 1 and 4, continue loop
else:
print("Please enter a number value between 1 - 4.")
continue
# Convert hours to seconds
duration = int(duration_hours * 3600)
# Return variables in dictionary data type
return {'duration': duration, 'count': duration_count}
@staticmethod
def set_interval():
while True:
# Try to get an int input
try:
interval_minutes = int(input('Interval (in minutes): '))
# Try again if not int input
except:
print("Please enter a number value between 1 - 10.")
continue
else:
# If interval_minutes between 1 and 10, break the loop
if interval_minutes >= 1 and interval_minutes <= 10:
break
# If not between 1 and 10, continue loop
else:
print("Please enter a number value between 1 - 10.")
continue
# Convert minutes to seconds
interval = int(interval_minutes * 60)
# Return interval variable
return interval
class CheckSeats():
# Initialize CheckSeats class with term and crn variables
def __init__(self, term, crn):
self.term = term
self.crn = crn
# Method to scrape class data from USF registrar query page
def check_seats(self):
# Creates instance of StatefulBrowser
browser = mechanicalsoup.StatefulBrowser()
# Sets up term value to correct HTML option value
if self.term == 'Spring 2021':
term_value = '202101'
elif self.term == 'Fall 2020':
term_value = '202008'
elif self.term == 'Summer 2020':
term_value = '202005'
url = 'http://www.registrar.usf.edu/ssearch/staff/staff.php'
browser.open(url)
# Select form with HTML method as post
browser.select_form('form[method="post"]')
# Fill in the term value
browser["P_SEMESTER"] = term_value
# Fill in the crn value
browser["P_REF"] = self.crn
# Submit query
response = browser.submit_selected()
# Load query page HTML response into variable
html_doc = response.text
# Instance of BeautifulSoup
soup = BeautifulSoup(html_doc, 'lxml')
# Keys are the text value of headers
keys = soup.select('#results tr th')
# Values are the text values of class data
values = soup.select('#results tr td')
# Set up class_dict as dictionary data type
class_dict = dict()
# For loop in the length of the values
for i in range(len(values)):
key = unicodedata.normalize("NFKC", keys[i].get_text())
value = unicodedata.normalize("NFKC", values[i].get_text())
# Add a value to our class_dict dictionary
class_dict[key] = value
# Print statement to show affected CRN
print('CRN ' + self.crn + ' - Seats Remaining: ' + class_dict['SEATSREMAIN'])
# Return class_dict dictionary
return class_dict
class SendEmail():
# Initialize SendEmail class with email, password, and message_data variables
def __init__(self, email, password, message_data):
self.email = email
self.password = password
self.message_data = message_data
# Method that creates our email message from scrapped class data
def create_email_message(self):
# msg euqals instance of MIMEMultipart
msg = MIMEMultipart()
# From ourselves
msg['From'] = self.email
# To ourselves
msg['To'] = self.email
# Subject line
msg['Subject'] = 'Some of your classes are available!'
# Set up body variable with empty string
body = ''
# Append to body for however many crn's had seats available
for i in range(len(self.message_data)):
# Format each string with a row of the message_data array
body += '{} - CRN {}: {} has {} seats available.\n'.format(*self.message_data[i])
# Attch the method as plain text
msg.attach(MIMEText(body, 'plain'))
# Set the string value of msg to msg_text
msg_text = msg.as_string()
print('Message created.')
# Return msg_text variable
return msg_text
# Method that sends created message to passed in email
def send_email(self, message):
# SMTP is outlook.office365.com (@usf.edu is an instance of office 365)
smtp = 'outlook.office365.com'
# Required port for office 365 smtp
port = 587
# Set up SMTP server
server = smtplib.SMTP(smtp, port)
# Start SMTP server with encryption
server.starttls()
# Login in with email in password
server.login(self.email, self.password)
# Send email to ourselves with created message
server.sendmail(self.email, self.email, message)
# Quit the server
server.quit()
print('Email sent successfully.')
|
import mechanicalsoup
from bs4 import BeautifulSoup
import unicodedata
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import csv
import sys
class ApplicationMethods:
# Static method to load text file into variables/list
@staticmethod
def load_file():
file_name = 'crn_text_file.txt'
with open(file_name, 'r') as file:
# Set up email and password variables
email = file.readline().strip()
password = file.readline().strip()
# Strip line and split by ', ' into array if the value is not blank
term_crn_list = [line.strip().split(', ') for line in file if line.strip()]
print('File loaded successfully.')
# Return variables in dictionary data type
return {'list': term_crn_list, 'email': email, 'password': password}
# Static method to return duration and duration counter variables
@staticmethod
def set_duration():
duration_count = 0
while True:
# Try to get an int input
try:
duration_hours = int(input('Duration (in hours): '))
# Try again if not int input
except:
print("Please enter a number value between 1 - 4.")
continue
else:
# If duration_hours between 1 and 4, break the loop
if duration_hours >= 1 and duration_hours <= 4:
break
# If not between 1 and 4, continue loop
else:
print("Please enter a number value between 1 - 4.")
continue
# Convert hours to seconds
duration = int(duration_hours * 3600)
# Return variables in dictionary data type
return {'duration': duration, 'count': duration_count}
@staticmethod
def set_interval():
while True:
# Try to get an int input
try:
interval_minutes = int(input('Interval (in minutes): '))
# Try again if not int input
except:
print("Please enter a number value between 1 - 10.")
continue
else:
# If interval_minutes between 1 and 10, break the loop
if interval_minutes >= 1 and interval_minutes <= 10:
break
# If not between 1 and 10, continue loop
else:
print("Please enter a number value between 1 - 10.")
continue
# Convert minutes to seconds
interval = int(interval_minutes * 60)
# Return interval variable
return interval
class CheckSeats():
# Initialize CheckSeats class with term and crn variables
def __init__(self, term, crn):
self.term = term
self.crn = crn
# Method to scrape class data from USF registrar query page
def check_seats(self):
# Creates instance of StatefulBrowser
browser = mechanicalsoup.StatefulBrowser()
# Sets up term value to correct HTML option value
if self.term == 'Spring 2021':
term_value = '202101'
elif self.term == 'Fall 2020':
term_value = '202008'
elif self.term == 'Summer 2020':
term_value = '202005'
url = 'http://www.registrar.usf.edu/ssearch/staff/staff.php'
browser.open(url)
# Select form with HTML method as post
browser.select_form('form[method="post"]')
# Fill in the term value
browser["P_SEMESTER"] = term_value
# Fill in the crn value
browser["P_REF"] = self.crn
# Submit query
response = browser.submit_selected()
# Load query page HTML response into variable
html_doc = response.text
# Instance of BeautifulSoup
soup = BeautifulSoup(html_doc, 'lxml')
# Keys are the text value of headers
keys = soup.select('#results tr th')
# Values are the text values of class data
values = soup.select('#results tr td')
# Set up class_dict as dictionary data type
class_dict = dict()
# For loop in the length of the values
for i in range(len(values)):
key = unicodedata.normalize("NFKC", keys[i].get_text())
value = unicodedata.normalize("NFKC", values[i].get_text())
# Add a value to our class_dict dictionary
class_dict[key] = value
# Print statement to show affected CRN
print('CRN ' + self.crn + ' - Seats Remaining: ' + class_dict['SEATSREMAIN'])
# Return class_dict dictionary
return class_dict
class SendEmail():
# Initialize SendEmail class with email, password, and message_data variables
def __init__(self, email, password, message_data):
self.email = email
self.password = password
self.message_data = message_data
# Method that creates our email message from scrapped class data
def create_email_message(self):
# msg euqals instance of MIMEMultipart
msg = MIMEMultipart()
# From ourselves
msg['From'] = self.email
# To ourselves
msg['To'] = self.email
# Subject line
msg['Subject'] = 'Some of your classes are available!'
# Set up body variable with empty string
body = ''
# Append to body for however many crn's had seats available
for i in range(len(self.message_data)):
# Format each string with a row of the message_data array
body += '{} - CRN {}: {} has {} seats available.\n'.format(*self.message_data[i])
# Attch the method as plain text
msg.attach(MIMEText(body, 'plain'))
# Set the string value of msg to msg_text
msg_text = msg.as_string()
print('Message created.')
# Return msg_text variable
return msg_text
# Method that sends created message to passed in email
def send_email(self, message):
# SMTP is outlook.office365.com (@usf.edu is an instance of office 365)
smtp = 'outlook.office365.com'
# Required port for office 365 smtp
port = 587
# Set up SMTP server
server = smtplib.SMTP(smtp, port)
# Start SMTP server with encryption
server.starttls()
# Login in with email in password
server.login(self.email, self.password)
# Send email to ourselves with created message
server.sendmail(self.email, self.email, message)
# Quit the server
server.quit()
print('Email sent successfully.')
|
en
| 0.766042
|
# Static method to load text file into variables/list # Set up email and password variables # Strip line and split by ', ' into array if the value is not blank # Return variables in dictionary data type # Static method to return duration and duration counter variables # Try to get an int input # Try again if not int input # If duration_hours between 1 and 4, break the loop # If not between 1 and 4, continue loop # Convert hours to seconds # Return variables in dictionary data type # Try to get an int input # Try again if not int input # If interval_minutes between 1 and 10, break the loop # If not between 1 and 10, continue loop # Convert minutes to seconds # Return interval variable # Initialize CheckSeats class with term and crn variables # Method to scrape class data from USF registrar query page # Creates instance of StatefulBrowser # Sets up term value to correct HTML option value # Select form with HTML method as post # Fill in the term value # Fill in the crn value # Submit query # Load query page HTML response into variable # Instance of BeautifulSoup # Keys are the text value of headers # Values are the text values of class data # Set up class_dict as dictionary data type # For loop in the length of the values # Add a value to our class_dict dictionary # Print statement to show affected CRN # Return class_dict dictionary # Initialize SendEmail class with email, password, and message_data variables # Method that creates our email message from scrapped class data # msg euqals instance of MIMEMultipart # From ourselves # To ourselves # Subject line # Set up body variable with empty string # Append to body for however many crn's had seats available # Format each string with a row of the message_data array # Attch the method as plain text # Set the string value of msg to msg_text # Return msg_text variable # Method that sends created message to passed in email # SMTP is outlook.office365.com (@usf.edu is an instance of office 365) # Required port for office 365 smtp # Set up SMTP server # Start SMTP server with encryption # Login in with email in password # Send email to ourselves with created message # Quit the server
| 3.056531
| 3
|
package/spack-openexr/package.py
|
ctuning/ck-spack
| 1
|
6626922
|
<gh_stars>1-10
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Openexr(Package):
"""OpenEXR Graphics Tools (high dynamic-range image file format)"""
homepage = "http://www.openexr.com/"
url = "https://savannah.nongnu.org/download/openexr/openexr-2.2.0.tar.gz"
version('2.2.0', 'b64e931c82aa3790329c21418373db4e')
version('2.1.0', '33735d37d2ee01c6d8fbd0df94fb8b43')
version('2.0.1', '4387e6050d2faa65dd5215618ff2ddce')
version('1.7.0', '27113284f7d26a58f853c346e0851d7a')
version('1.6.1', '11951f164f9c872b183df75e66de145a')
version('1.5.0', '55342d2256ab3ae99da16f16b2e12ce9')
version('1.4.0a', 'd0a4b9a930c766fa51561b05fb204afe')
version('1.3.2', '1522fe69135016c52eb88fc7d8514409')
variant('debug', default=False,
description='Builds a debug version of the libraries')
depends_on('pkgconfig', type='build')
depends_on('ilmbase')
def install(self, spec, prefix):
configure_options = ['--prefix={0}'.format(prefix)]
if '+debug' not in spec:
configure_options.append('--disable-debug')
configure(*configure_options)
make('install')
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Openexr(Package):
"""OpenEXR Graphics Tools (high dynamic-range image file format)"""
homepage = "http://www.openexr.com/"
url = "https://savannah.nongnu.org/download/openexr/openexr-2.2.0.tar.gz"
version('2.2.0', 'b64e931c82aa3790329c21418373db4e')
version('2.1.0', '33735d37d2ee01c6d8fbd0df94fb8b43')
version('2.0.1', '4387e6050d2faa65dd5215618ff2ddce')
version('1.7.0', '27113284f7d26a58f853c346e0851d7a')
version('1.6.1', '11951f164f9c872b183df75e66de145a')
version('1.5.0', '55342d2256ab3ae99da16f16b2e12ce9')
version('1.4.0a', 'd0a4b9a930c766fa51561b05fb204afe')
version('1.3.2', '1522fe69135016c52eb88fc7d8514409')
variant('debug', default=False,
description='Builds a debug version of the libraries')
depends_on('pkgconfig', type='build')
depends_on('ilmbase')
def install(self, spec, prefix):
configure_options = ['--prefix={0}'.format(prefix)]
if '+debug' not in spec:
configure_options.append('--disable-debug')
configure(*configure_options)
make('install')
|
en
| 0.749616
|
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by <NAME>, <EMAIL>, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## OpenEXR Graphics Tools (high dynamic-range image file format)
| 1.31146
| 1
|
models/proposal.py
|
NSLS-II/nsls2-api
| 0
|
6626923
|
<filename>models/proposal.py<gh_stars>0
from typing import Optional, List
from pydantic.main import BaseModel
class Proposal(BaseModel):
proposal_id: str
users_admin: List[str] # PI(s)
users: List[str]
|
<filename>models/proposal.py<gh_stars>0
from typing import Optional, List
from pydantic.main import BaseModel
class Proposal(BaseModel):
proposal_id: str
users_admin: List[str] # PI(s)
users: List[str]
|
none
| 1
| 2.208384
| 2
|
|
tests/integration-tests/tests/common/mpi_common.py
|
siddharthsalot/aws-parallelcluster
| 0
|
6626924
|
import logging
import pathlib
from assertpy import assert_that
from tests.common.assertions import assert_no_errors_in_logs, assert_scaling_worked
from tests.common.schedulers_common import get_scheduler_commands
OS_TO_ARCHITECTURE_TO_OPENMPI_MODULE = {
"alinux": {"x86_64": "openmpi"},
"alinux2": {"x86_64": "openmpi", "arm64": "openmpi"},
"centos7": {"x86_64": "openmpi"},
"ubuntu1604": {"x86_64": "openmpi"},
"centos6": {"x86_64": "openmpi-x86_64"},
"ubuntu1804": {"x86_64": "openmpi", "arm64": "openmpi"},
}
def _test_mpi(
remote_command_executor,
slots_per_instance,
scheduler,
os,
architecture,
region=None,
stack_name=None,
scaledown_idletime=None,
verify_scaling=False,
):
logging.info("Testing mpi job")
datadir = pathlib.Path(__file__).parent / "data/mpi/"
mpi_module = OS_TO_ARCHITECTURE_TO_OPENMPI_MODULE[os][architecture]
# Compile mpi script
command = "mpicc -o ring ring.c"
if mpi_module != "no_module_available":
command = "module load {0} && {1}".format(mpi_module, command)
remote_command_executor.run_remote_command(command, additional_files=[str(datadir / "ring.c")])
scheduler_commands = get_scheduler_commands(scheduler, remote_command_executor)
# submit script using additional files
result = scheduler_commands.submit_script(
str(datadir / "mpi_submit_{0}.sh".format(mpi_module)), slots=2 * slots_per_instance
)
job_id = scheduler_commands.assert_job_submitted(result.stdout)
if verify_scaling:
assert_scaling_worked(
scheduler_commands, region, stack_name, scaledown_idletime, expected_max=2, expected_final=0
)
# not checking assert_job_succeeded after cluster scale down cause the scheduler history might be gone
else:
scheduler_commands.wait_job_completed(job_id)
scheduler_commands.assert_job_succeeded(job_id)
mpi_out = remote_command_executor.run_remote_command("cat /shared/mpi.out").stdout
# mpi_out expected output
# Hello world from processor ip-192-168-53-169, rank 0 out of 2 processors
# Process 0 received token -1 from process 1
# Hello world from processor ip-192-168-60-9, rank 1 out of 2 processors
# Process 1 received token -1 from process 0
assert_that(mpi_out.splitlines()).is_length(4)
assert_that(mpi_out).matches(r"Hello world from processor ip-.+, rank 0 out of 2 processors")
assert_that(mpi_out).matches(r"Hello world from processor ip-.+, rank 1 out of 2 processors")
assert_that(mpi_out).contains("Process 0 received token -1 from process 1")
assert_that(mpi_out).matches("Process 1 received token -1 from process 0")
assert_no_errors_in_logs(remote_command_executor, ["/var/log/sqswatcher", "/var/log/jobwatcher"])
|
import logging
import pathlib
from assertpy import assert_that
from tests.common.assertions import assert_no_errors_in_logs, assert_scaling_worked
from tests.common.schedulers_common import get_scheduler_commands
OS_TO_ARCHITECTURE_TO_OPENMPI_MODULE = {
"alinux": {"x86_64": "openmpi"},
"alinux2": {"x86_64": "openmpi", "arm64": "openmpi"},
"centos7": {"x86_64": "openmpi"},
"ubuntu1604": {"x86_64": "openmpi"},
"centos6": {"x86_64": "openmpi-x86_64"},
"ubuntu1804": {"x86_64": "openmpi", "arm64": "openmpi"},
}
def _test_mpi(
remote_command_executor,
slots_per_instance,
scheduler,
os,
architecture,
region=None,
stack_name=None,
scaledown_idletime=None,
verify_scaling=False,
):
logging.info("Testing mpi job")
datadir = pathlib.Path(__file__).parent / "data/mpi/"
mpi_module = OS_TO_ARCHITECTURE_TO_OPENMPI_MODULE[os][architecture]
# Compile mpi script
command = "mpicc -o ring ring.c"
if mpi_module != "no_module_available":
command = "module load {0} && {1}".format(mpi_module, command)
remote_command_executor.run_remote_command(command, additional_files=[str(datadir / "ring.c")])
scheduler_commands = get_scheduler_commands(scheduler, remote_command_executor)
# submit script using additional files
result = scheduler_commands.submit_script(
str(datadir / "mpi_submit_{0}.sh".format(mpi_module)), slots=2 * slots_per_instance
)
job_id = scheduler_commands.assert_job_submitted(result.stdout)
if verify_scaling:
assert_scaling_worked(
scheduler_commands, region, stack_name, scaledown_idletime, expected_max=2, expected_final=0
)
# not checking assert_job_succeeded after cluster scale down cause the scheduler history might be gone
else:
scheduler_commands.wait_job_completed(job_id)
scheduler_commands.assert_job_succeeded(job_id)
mpi_out = remote_command_executor.run_remote_command("cat /shared/mpi.out").stdout
# mpi_out expected output
# Hello world from processor ip-192-168-53-169, rank 0 out of 2 processors
# Process 0 received token -1 from process 1
# Hello world from processor ip-192-168-60-9, rank 1 out of 2 processors
# Process 1 received token -1 from process 0
assert_that(mpi_out.splitlines()).is_length(4)
assert_that(mpi_out).matches(r"Hello world from processor ip-.+, rank 0 out of 2 processors")
assert_that(mpi_out).matches(r"Hello world from processor ip-.+, rank 1 out of 2 processors")
assert_that(mpi_out).contains("Process 0 received token -1 from process 1")
assert_that(mpi_out).matches("Process 1 received token -1 from process 0")
assert_no_errors_in_logs(remote_command_executor, ["/var/log/sqswatcher", "/var/log/jobwatcher"])
|
en
| 0.782109
|
# Compile mpi script # submit script using additional files # not checking assert_job_succeeded after cluster scale down cause the scheduler history might be gone # mpi_out expected output # Hello world from processor ip-192-168-53-169, rank 0 out of 2 processors # Process 0 received token -1 from process 1 # Hello world from processor ip-192-168-60-9, rank 1 out of 2 processors # Process 1 received token -1 from process 0
| 2.028123
| 2
|
main.py
|
ZackDowning/CommandRunner
| 0
|
6626925
|
from gui import ManagementFileBrowseWindow, ConfigFileBrowseWindow
from net_async import AsyncSessions, ForceSessionRetry
from getpass import getpass
import re
import time
# TODO: Add support for config file
# PyInstaller bundle command:
# pyinstaller -F --hidden-import PySimpleGUI,net_async --add-data templates;templates main.py
banner = ' ______ ______ \n' \
' / ____/___ ____ ___ ____ ___ ____ _____ ____/ / __ \\__ ______ ____ ___ _____\n' \
' / / / __ \\/ __ `__ \\/ __ `__ \\/ __ `/ __ \\/ __ / /_/ / / / / __ \\/ __ \\/ _ \\/ ___/\n' \
'/ /___/ /_/ / / / / / / / / / / / /_/ / / / / /_/ / _, _/ /_/ / / / / / / / __/ / \n' \
'\\____/\\____/_/ /_/ /_/_/ /_/ /_/\\__,_/_/ /_/\\__,_/_/ |_|\\__,_/_/ /_/_/ /_/\\___/_/ \n'
# Outputs failed device list to CSV file with columns:
# 'ip_address,connectivity,authentication,authorization,con_type,con_exception'
def output_failed_to_file(failed_list):
with open('failed_devices.csv', 'w+') as file:
file.write(
'ip_address,connectivity,authentication,authorization,con_exception\n'
)
for device in failed_list:
ip_address = device['ip_address']
connectivity = device['connectivity']
authentication = device['authentication']
authorization = device['authorization']
exception = device['exception']
file.write(
f'{ip_address},{connectivity},{authentication},{authorization},{exception}\n'
)
def command_runner():
def cr(session):
save_cmd = ''
if session.devicetype == 'cisco_ios' or session.devicetype == 'cisco_ios_telnet':
save_cmd = 'wr'
elif session.devicetype == 'cisco_nxos':
save_cmd = 'copy run start'
try:
if commands[0] != '':
cmd = session.send_config_set(commands)
if cmd.__contains__('Authorization failed'):
raise ForceSessionRetry
except IndexError:
pass
if save:
session.session.fast_cli = False
cmd = session.send_command(save_cmd)
if cmd.__contains__('Authorization failed'):
raise ForceSessionRetry
mgmt_ips = ManagementFileBrowseWindow().mgmt_ips
print(banner)
try:
if len(mgmt_ips) == 0:
print('No IP addresses found in file provided.')
input('Press Enter to close.')
except TypeError:
print('No file provided.')
input('Press Enter to close.')
else:
username = input('Enter Username: ')
password = getpass('Enter Password: ')
enable_pw = getpass('(If applicable) Enter Enable Password: ')
while True:
commands = input(
"""Type 'use_file' if you want to provide configuration file, or
enter configuration commands separating each command with a comma and no space.
Example: interface vlan 1,no ip address,shut
Commands: """
).split(',')
if 'use_file' in commands[0]:
commands = ConfigFileBrowseWindow().cmds
while True:
save = input('Do you want to save the config? [Y]/N: ')
if re.fullmatch(r'[Yy]|', save):
save = True
break
elif re.fullmatch(r'[Nn]', save):
save = False
break
start = time.perf_counter()
print('Sending commands to devices...\n'
'------------------------------------------------')
sessions = AsyncSessions(username, password, mgmt_ips, cr, enable_pw, True)
end = time.perf_counter()
print(f'------------------------------------------------'
f'\nCommands ran in {int(round(end - start, 0))} seconds.')
if len(sessions.failed_devices) != 0:
print('See failed_devices.csv for more information on failed devices')
output_failed_to_file(sessions.failed_devices)
print('\nFinished.')
more_cmds = input('Do you want to send more commands? Y/[N]: ')
if re.fullmatch(r'[Yy]', more_cmds):
continue
elif re.fullmatch(r'[Nn]|', more_cmds):
break
else:
break
input('Press Enter to close.')
if __name__ == '__main__':
command_runner()
|
from gui import ManagementFileBrowseWindow, ConfigFileBrowseWindow
from net_async import AsyncSessions, ForceSessionRetry
from getpass import getpass
import re
import time
# TODO: Add support for config file
# PyInstaller bundle command:
# pyinstaller -F --hidden-import PySimpleGUI,net_async --add-data templates;templates main.py
banner = ' ______ ______ \n' \
' / ____/___ ____ ___ ____ ___ ____ _____ ____/ / __ \\__ ______ ____ ___ _____\n' \
' / / / __ \\/ __ `__ \\/ __ `__ \\/ __ `/ __ \\/ __ / /_/ / / / / __ \\/ __ \\/ _ \\/ ___/\n' \
'/ /___/ /_/ / / / / / / / / / / / /_/ / / / / /_/ / _, _/ /_/ / / / / / / / __/ / \n' \
'\\____/\\____/_/ /_/ /_/_/ /_/ /_/\\__,_/_/ /_/\\__,_/_/ |_|\\__,_/_/ /_/_/ /_/\\___/_/ \n'
# Outputs failed device list to CSV file with columns:
# 'ip_address,connectivity,authentication,authorization,con_type,con_exception'
def output_failed_to_file(failed_list):
with open('failed_devices.csv', 'w+') as file:
file.write(
'ip_address,connectivity,authentication,authorization,con_exception\n'
)
for device in failed_list:
ip_address = device['ip_address']
connectivity = device['connectivity']
authentication = device['authentication']
authorization = device['authorization']
exception = device['exception']
file.write(
f'{ip_address},{connectivity},{authentication},{authorization},{exception}\n'
)
def command_runner():
def cr(session):
save_cmd = ''
if session.devicetype == 'cisco_ios' or session.devicetype == 'cisco_ios_telnet':
save_cmd = 'wr'
elif session.devicetype == 'cisco_nxos':
save_cmd = 'copy run start'
try:
if commands[0] != '':
cmd = session.send_config_set(commands)
if cmd.__contains__('Authorization failed'):
raise ForceSessionRetry
except IndexError:
pass
if save:
session.session.fast_cli = False
cmd = session.send_command(save_cmd)
if cmd.__contains__('Authorization failed'):
raise ForceSessionRetry
mgmt_ips = ManagementFileBrowseWindow().mgmt_ips
print(banner)
try:
if len(mgmt_ips) == 0:
print('No IP addresses found in file provided.')
input('Press Enter to close.')
except TypeError:
print('No file provided.')
input('Press Enter to close.')
else:
username = input('Enter Username: ')
password = getpass('Enter Password: ')
enable_pw = getpass('(If applicable) Enter Enable Password: ')
while True:
commands = input(
"""Type 'use_file' if you want to provide configuration file, or
enter configuration commands separating each command with a comma and no space.
Example: interface vlan 1,no ip address,shut
Commands: """
).split(',')
if 'use_file' in commands[0]:
commands = ConfigFileBrowseWindow().cmds
while True:
save = input('Do you want to save the config? [Y]/N: ')
if re.fullmatch(r'[Yy]|', save):
save = True
break
elif re.fullmatch(r'[Nn]', save):
save = False
break
start = time.perf_counter()
print('Sending commands to devices...\n'
'------------------------------------------------')
sessions = AsyncSessions(username, password, mgmt_ips, cr, enable_pw, True)
end = time.perf_counter()
print(f'------------------------------------------------'
f'\nCommands ran in {int(round(end - start, 0))} seconds.')
if len(sessions.failed_devices) != 0:
print('See failed_devices.csv for more information on failed devices')
output_failed_to_file(sessions.failed_devices)
print('\nFinished.')
more_cmds = input('Do you want to send more commands? Y/[N]: ')
if re.fullmatch(r'[Yy]', more_cmds):
continue
elif re.fullmatch(r'[Nn]|', more_cmds):
break
else:
break
input('Press Enter to close.')
if __name__ == '__main__':
command_runner()
|
en
| 0.555159
|
# TODO: Add support for config file # PyInstaller bundle command: # pyinstaller -F --hidden-import PySimpleGUI,net_async --add-data templates;templates main.py # Outputs failed device list to CSV file with columns: # 'ip_address,connectivity,authentication,authorization,con_type,con_exception' Type 'use_file' if you want to provide configuration file, or enter configuration commands separating each command with a comma and no space. Example: interface vlan 1,no ip address,shut Commands:
| 2.108429
| 2
|
styler_rest_framework/pubsub/publishers/pubsub_handler.py
|
STYLER-Inc/styler-rest-framework
| 3
|
6626926
|
""" Handler for pubsub
"""
from concurrent import futures
import logging
from google.cloud import pubsub_v1
def get_callback(data):
"""Wrap message data in the context of the callback function."""
def callback(api_future):
try:
api_future.result()
except Exception:
logging.error(
"A problem occurred when publishing %s: %s\n",
data,
api_future.exception(),
)
raise
return callback
def publish_message(topic_name, data): # pragma: no coverage
"""Publishes a message to a Pub/Sub topic."""
# Initialize a Publisher client.
client = pubsub_v1.PublisherClient()
metadata = {"version": "1"}
# When you publish a message, the client returns a future.
api_future = client.publish(topic_name, data=data, **metadata)
api_future.add_done_callback(get_callback(data))
futures.wait([api_future], return_when=futures.ALL_COMPLETED)
|
""" Handler for pubsub
"""
from concurrent import futures
import logging
from google.cloud import pubsub_v1
def get_callback(data):
"""Wrap message data in the context of the callback function."""
def callback(api_future):
try:
api_future.result()
except Exception:
logging.error(
"A problem occurred when publishing %s: %s\n",
data,
api_future.exception(),
)
raise
return callback
def publish_message(topic_name, data): # pragma: no coverage
"""Publishes a message to a Pub/Sub topic."""
# Initialize a Publisher client.
client = pubsub_v1.PublisherClient()
metadata = {"version": "1"}
# When you publish a message, the client returns a future.
api_future = client.publish(topic_name, data=data, **metadata)
api_future.add_done_callback(get_callback(data))
futures.wait([api_future], return_when=futures.ALL_COMPLETED)
|
en
| 0.635749
|
Handler for pubsub Wrap message data in the context of the callback function. # pragma: no coverage Publishes a message to a Pub/Sub topic. # Initialize a Publisher client. # When you publish a message, the client returns a future.
| 2.828336
| 3
|
ner/3DownLoadHTML.py
|
196sigma/ner-10ks
| 2
|
6626927
|
<gh_stars>1-10
import os,sys,csv,urllib2,time
os.chdir('/Users/alvinzuyinzheng/Dropbox/PythonWorkshop/scripts/')#<===The location of your file "LongCompanyList.csv
htmlSubPath = "./HTML/" #<===The subfolder with the 10-K files in HTML format
Form10kListFile = "10kList.csv" #a csv file (output of the 2Get10kLinks.py script) with the list of 10-K links
logFile = "10kDownloadLog.csv" #a csv file (output of the current script) with the download history of 10-K forms
def dowmload10k(tickerCode, docIndex, docLink, description, filingDate, newFilingDate, formLink,formName):
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
try:
pageRequest = urllib2.Request(formLink)
pageOpen = urllib2.urlopen(pageRequest)
pageRead = pageOpen.read()
htmlname = tickerCode+"_"+docIndex+"_"+newFilingDate+".htm"
htmlpath = htmlSubPath+htmlname
htmlfile = open(htmlpath,'wb')
htmlfile.write(pageRead)
htmlfile.close()
csvWriter.writerow([tickerCode, docIndex, docLink, description, filingDate, newFilingDate, formLink,formName, htmlname, ""])
except:
csvWriter.writerow([tickerCode, docIndex, docLink, description, filingDate, newFilingDate, formLink,formName, "","not downloaded"])
csvOutput.close()
def main():
if not os.path.isdir(htmlSubPath): ### <=== keep all HTML files in this subfolder
os.makedirs(htmlSubPath)
FormType = "10-K" ### <=== Type your document type here
nbDocPause = 5 ### <=== Type your number of documents to download in one batch
nbSecPause = 1 ### <=== Type your pausing time in seconds between each batch
FormYears = ['2014','2015'] ### <=== Type the years of documents to download here
csvFile = open(Form10kListFile,"r") #<===A csv file with the list of company ticker symbols (the file has a line with headers)
csvReader = csv.reader(csvFile,delimiter=",")
csvData = list(csvReader)
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
csvWriter.writerow(["Ticker", "DocIndex", "IndexLink", "Description", "FilingDate", "NewFilingDate", "Form10KLink","Form10KName", "FileName","Note"])
csvOutput.close()
i = 1
for rowData in csvData[1:]:
Ticker = rowData[0]
DocIndex = rowData[1]
IndexLink = rowData[2]
Description = rowData[3]
FilingDate = rowData[4]
NewFilingDate = rowData[5]
FormLink = rowData[6]
FormName = rowData[7]
for year in FormYears:
if year in FilingDate:
if ".htm" in FormName:
dowmload10k(Ticker, DocIndex, IndexLink, Description, FilingDate, NewFilingDate, FormLink,FormName)
elif ".txt" in FormName:
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
csvWriter.writerow([Ticker, DocIndex, IndexLink, Description, FilingDate, NewFilingDate, FormLink,FormName, "","Text format"])
csvOutput.close()
else:
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
csvWriter.writerow([Ticker, DocIndex, IndexLink, Description, FilingDate, NewFilingDate, FormLink,FormName,"", "No form"])
csvOutput.close()
if i%nbDocPause == 0:
print i
print "Pause for "+str(nbSecPause)+" second .... "
time.sleep(float(nbSecPause))
i=i+1
csvFile.close()
print "done!"
if __name__ == "__main__":
main()
|
import os,sys,csv,urllib2,time
os.chdir('/Users/alvinzuyinzheng/Dropbox/PythonWorkshop/scripts/')#<===The location of your file "LongCompanyList.csv
htmlSubPath = "./HTML/" #<===The subfolder with the 10-K files in HTML format
Form10kListFile = "10kList.csv" #a csv file (output of the 2Get10kLinks.py script) with the list of 10-K links
logFile = "10kDownloadLog.csv" #a csv file (output of the current script) with the download history of 10-K forms
def dowmload10k(tickerCode, docIndex, docLink, description, filingDate, newFilingDate, formLink,formName):
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
try:
pageRequest = urllib2.Request(formLink)
pageOpen = urllib2.urlopen(pageRequest)
pageRead = pageOpen.read()
htmlname = tickerCode+"_"+docIndex+"_"+newFilingDate+".htm"
htmlpath = htmlSubPath+htmlname
htmlfile = open(htmlpath,'wb')
htmlfile.write(pageRead)
htmlfile.close()
csvWriter.writerow([tickerCode, docIndex, docLink, description, filingDate, newFilingDate, formLink,formName, htmlname, ""])
except:
csvWriter.writerow([tickerCode, docIndex, docLink, description, filingDate, newFilingDate, formLink,formName, "","not downloaded"])
csvOutput.close()
def main():
if not os.path.isdir(htmlSubPath): ### <=== keep all HTML files in this subfolder
os.makedirs(htmlSubPath)
FormType = "10-K" ### <=== Type your document type here
nbDocPause = 5 ### <=== Type your number of documents to download in one batch
nbSecPause = 1 ### <=== Type your pausing time in seconds between each batch
FormYears = ['2014','2015'] ### <=== Type the years of documents to download here
csvFile = open(Form10kListFile,"r") #<===A csv file with the list of company ticker symbols (the file has a line with headers)
csvReader = csv.reader(csvFile,delimiter=",")
csvData = list(csvReader)
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
csvWriter.writerow(["Ticker", "DocIndex", "IndexLink", "Description", "FilingDate", "NewFilingDate", "Form10KLink","Form10KName", "FileName","Note"])
csvOutput.close()
i = 1
for rowData in csvData[1:]:
Ticker = rowData[0]
DocIndex = rowData[1]
IndexLink = rowData[2]
Description = rowData[3]
FilingDate = rowData[4]
NewFilingDate = rowData[5]
FormLink = rowData[6]
FormName = rowData[7]
for year in FormYears:
if year in FilingDate:
if ".htm" in FormName:
dowmload10k(Ticker, DocIndex, IndexLink, Description, FilingDate, NewFilingDate, FormLink,FormName)
elif ".txt" in FormName:
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
csvWriter.writerow([Ticker, DocIndex, IndexLink, Description, FilingDate, NewFilingDate, FormLink,FormName, "","Text format"])
csvOutput.close()
else:
csvOutput = open(logFile,"a+b")
csvWriter = csv.writer(csvOutput, quoting = csv.QUOTE_NONNUMERIC)
csvWriter.writerow([Ticker, DocIndex, IndexLink, Description, FilingDate, NewFilingDate, FormLink,FormName,"", "No form"])
csvOutput.close()
if i%nbDocPause == 0:
print i
print "Pause for "+str(nbSecPause)+" second .... "
time.sleep(float(nbSecPause))
i=i+1
csvFile.close()
print "done!"
if __name__ == "__main__":
main()
|
en
| 0.870104
|
#<===The location of your file "LongCompanyList.csv #<===The subfolder with the 10-K files in HTML format #a csv file (output of the 2Get10kLinks.py script) with the list of 10-K links #a csv file (output of the current script) with the download history of 10-K forms ### <=== keep all HTML files in this subfolder ### <=== Type your document type here ### <=== Type your number of documents to download in one batch ### <=== Type your pausing time in seconds between each batch ### <=== Type the years of documents to download here #<===A csv file with the list of company ticker symbols (the file has a line with headers)
| 2.738972
| 3
|
src/Case1.py
|
EstudoAAS/linear-algebra-refresher-course
| 0
|
6626928
|
<reponame>EstudoAAS/linear-algebra-refresher-course
import vector
def main():
vetor1 = vector.Vector([8.218,-9.341])
print(vetor1.plus(vector.Vector([-1.129,2.111])))
vetor2 = vector.Vector([7.119,8.215])
print(vetor2.minus(vector.Vector([-8.223,0.878])))
vetor3 = vector.Vector([1.671,-1.012,-0.318])
print(vetor3.times_scalar(7.41))
main()
|
import vector
def main():
vetor1 = vector.Vector([8.218,-9.341])
print(vetor1.plus(vector.Vector([-1.129,2.111])))
vetor2 = vector.Vector([7.119,8.215])
print(vetor2.minus(vector.Vector([-8.223,0.878])))
vetor3 = vector.Vector([1.671,-1.012,-0.318])
print(vetor3.times_scalar(7.41))
main()
|
none
| 1
| 3.586724
| 4
|
|
utils.py
|
cianfrocco-lab/GAN-for-Cryo-EM-image-denoising
| 10
|
6626929
|
<filename>utils.py<gh_stars>1-10
import numpy as np
import tensorflow as tf
import scipy.misc
def batch_norm(x, scope):
return tf.contrib.layers.batch_norm(x, decay=0.9, updates_collections=None, epsilon=1e-5, scale=True, scope=scope)
def conv2d(input, output_dim, f=4, stride=2, stddev=0.02, name="conv2d",pad='SAME'):
with tf.variable_scope(name):
weight = tf.get_variable('weight', [f, f, input.get_shape()[-1], output_dim],
initializer=tf.truncated_normal_initializer(stddev=stddev))
bias = tf.get_variable('bias', [output_dim], initializer=tf.constant_initializer(0.0))
conv = tf.nn.bias_add(tf.nn.conv2d(input, weight, strides=[1, stride, stride, 1], padding=pad), bias)
return conv
def deconv2d(input, output_shape, stride=2,k_h=4, k_w=4, stddev=0.02, name="deconv2d"):
with tf.variable_scope(name):
weight = tf.get_variable('weight', [k_h, k_w, output_shape[-1], input.get_shape()[-1]],
initializer=tf.random_normal_initializer(stddev=stddev))
bias = tf.get_variable('bias', [output_shape[-1]], initializer=tf.constant_initializer(0.0))
deconv = tf.nn.bias_add(tf.nn.conv2d_transpose(input, weight, output_shape=output_shape, strides=[1, stride, stride, 1]), bias)
return deconv
def Identity_block_for_D(X, filters, stage='DIstg'):
F1, F2, F3 = filters
X_shortcut=X
X1 = tf.nn.elu(batch_norm(conv2d(X,F1,f=1,stride=1,name=str(stage)+'A',pad='VALID'),str(stage)+'A'))
X2 = tf.nn.elu(batch_norm(conv2d(X1,F2,f=4,stride=1,name=str(stage)+'B',pad='SAME'),str(stage)+'B'))
X3 = batch_norm(conv2d(X2,F3,f=1,stride=1,name=str(stage)+'C',pad='VALID'),str(stage)+'C')
X4 = tf.add(X_shortcut,X3)
X5 = tf.nn.elu(X4)
return X5
def Conv_block_for_D(X, filters ,s=2,stage='DCstg'):
F1, F2, F3 = filters
X_shortcut = X
X1 = tf.nn.elu(batch_norm(conv2d(X,F1,f=4,stride=s,name=str(stage)+'A',pad='VALID'),str(stage)+'A'))
X2 = tf.nn.elu(batch_norm(conv2d(X1,F2,f=1,stride=1,name=str(stage)+'B',pad='SAME'),str(stage)+'B'))
X3 = batch_norm(conv2d(X2,F3,f=1,stride=1,name=str(stage)+'C',pad='VALID'),str(stage)+'C')
X_shortcut_new = batch_norm(conv2d(X_shortcut,F3,f=1,stride=s,name=str(stage)+'D',pad='VALID'),str(stage)+'D')
X4 = tf.add(X_shortcut_new,X3)
X5 = tf.nn.elu(X4)
return X5
def Identity_block_for_G(X, filters ,stage='Gstg'):
F1, F2, F3 = filters
X_shortcut = X
X1 = tf.nn.elu(batch_norm(conv2d(X,F1,f=1,stride=1,name=str(stage)+'A',pad='VALID'),str(stage)+'A'))
X2 = tf.nn.elu(batch_norm(conv2d(X1,F2,f=4,stride=1,name=str(stage)+'B',pad='SAME'),str(stage)+'B'))
X3 = batch_norm(conv2d(X2,F3,f=1,stride=1,name=str(stage)+'C',pad='VALID'),str(stage)+'C')
X4 = tf.add(X_shortcut,X3)
X5 = tf.nn.elu(X4)
return X5
|
<filename>utils.py<gh_stars>1-10
import numpy as np
import tensorflow as tf
import scipy.misc
def batch_norm(x, scope):
return tf.contrib.layers.batch_norm(x, decay=0.9, updates_collections=None, epsilon=1e-5, scale=True, scope=scope)
def conv2d(input, output_dim, f=4, stride=2, stddev=0.02, name="conv2d",pad='SAME'):
with tf.variable_scope(name):
weight = tf.get_variable('weight', [f, f, input.get_shape()[-1], output_dim],
initializer=tf.truncated_normal_initializer(stddev=stddev))
bias = tf.get_variable('bias', [output_dim], initializer=tf.constant_initializer(0.0))
conv = tf.nn.bias_add(tf.nn.conv2d(input, weight, strides=[1, stride, stride, 1], padding=pad), bias)
return conv
def deconv2d(input, output_shape, stride=2,k_h=4, k_w=4, stddev=0.02, name="deconv2d"):
with tf.variable_scope(name):
weight = tf.get_variable('weight', [k_h, k_w, output_shape[-1], input.get_shape()[-1]],
initializer=tf.random_normal_initializer(stddev=stddev))
bias = tf.get_variable('bias', [output_shape[-1]], initializer=tf.constant_initializer(0.0))
deconv = tf.nn.bias_add(tf.nn.conv2d_transpose(input, weight, output_shape=output_shape, strides=[1, stride, stride, 1]), bias)
return deconv
def Identity_block_for_D(X, filters, stage='DIstg'):
F1, F2, F3 = filters
X_shortcut=X
X1 = tf.nn.elu(batch_norm(conv2d(X,F1,f=1,stride=1,name=str(stage)+'A',pad='VALID'),str(stage)+'A'))
X2 = tf.nn.elu(batch_norm(conv2d(X1,F2,f=4,stride=1,name=str(stage)+'B',pad='SAME'),str(stage)+'B'))
X3 = batch_norm(conv2d(X2,F3,f=1,stride=1,name=str(stage)+'C',pad='VALID'),str(stage)+'C')
X4 = tf.add(X_shortcut,X3)
X5 = tf.nn.elu(X4)
return X5
def Conv_block_for_D(X, filters ,s=2,stage='DCstg'):
F1, F2, F3 = filters
X_shortcut = X
X1 = tf.nn.elu(batch_norm(conv2d(X,F1,f=4,stride=s,name=str(stage)+'A',pad='VALID'),str(stage)+'A'))
X2 = tf.nn.elu(batch_norm(conv2d(X1,F2,f=1,stride=1,name=str(stage)+'B',pad='SAME'),str(stage)+'B'))
X3 = batch_norm(conv2d(X2,F3,f=1,stride=1,name=str(stage)+'C',pad='VALID'),str(stage)+'C')
X_shortcut_new = batch_norm(conv2d(X_shortcut,F3,f=1,stride=s,name=str(stage)+'D',pad='VALID'),str(stage)+'D')
X4 = tf.add(X_shortcut_new,X3)
X5 = tf.nn.elu(X4)
return X5
def Identity_block_for_G(X, filters ,stage='Gstg'):
F1, F2, F3 = filters
X_shortcut = X
X1 = tf.nn.elu(batch_norm(conv2d(X,F1,f=1,stride=1,name=str(stage)+'A',pad='VALID'),str(stage)+'A'))
X2 = tf.nn.elu(batch_norm(conv2d(X1,F2,f=4,stride=1,name=str(stage)+'B',pad='SAME'),str(stage)+'B'))
X3 = batch_norm(conv2d(X2,F3,f=1,stride=1,name=str(stage)+'C',pad='VALID'),str(stage)+'C')
X4 = tf.add(X_shortcut,X3)
X5 = tf.nn.elu(X4)
return X5
|
none
| 1
| 2.323254
| 2
|
|
astropy_helpers/commands/test.py
|
jayvdb/astropy-helpers
| 30
|
6626930
|
<reponame>jayvdb/astropy-helpers
"""
Different implementations of the ``./setup.py test`` command depending on
what's locally available.
If Astropy v1.1 or later is available it should be possible to import
AstropyTest from ``astropy.tests.command``. Otherwise there is a skeleton
implementation that allows users to at least discover the ``./setup.py test``
command and learn that they need Astropy to run it.
"""
import os
from ..utils import import_file
# Previously these except statements caught only ImportErrors, but there are
# some other obscure exceptional conditions that can occur when importing
# astropy.tests (at least on older versions) that can cause these imports to
# fail
try:
# If we are testing astropy itself, we need to use import_file to avoid
# actually importing astropy (just the file we need).
command_file = os.path.join('astropy', 'tests', 'command.py')
if os.path.exists(command_file):
AstropyTest = import_file(command_file, 'astropy_tests_command').AstropyTest
else:
import astropy # noqa
from astropy.tests.command import AstropyTest
except Exception:
# No astropy at all--provide the dummy implementation
from ._dummy import _DummyCommand
class AstropyTest(_DummyCommand):
command_name = 'test'
description = 'Run the tests for this package'
error_msg = (
"The 'test' command requires the astropy package to be "
"installed and importable.")
|
"""
Different implementations of the ``./setup.py test`` command depending on
what's locally available.
If Astropy v1.1 or later is available it should be possible to import
AstropyTest from ``astropy.tests.command``. Otherwise there is a skeleton
implementation that allows users to at least discover the ``./setup.py test``
command and learn that they need Astropy to run it.
"""
import os
from ..utils import import_file
# Previously these except statements caught only ImportErrors, but there are
# some other obscure exceptional conditions that can occur when importing
# astropy.tests (at least on older versions) that can cause these imports to
# fail
try:
# If we are testing astropy itself, we need to use import_file to avoid
# actually importing astropy (just the file we need).
command_file = os.path.join('astropy', 'tests', 'command.py')
if os.path.exists(command_file):
AstropyTest = import_file(command_file, 'astropy_tests_command').AstropyTest
else:
import astropy # noqa
from astropy.tests.command import AstropyTest
except Exception:
# No astropy at all--provide the dummy implementation
from ._dummy import _DummyCommand
class AstropyTest(_DummyCommand):
command_name = 'test'
description = 'Run the tests for this package'
error_msg = (
"The 'test' command requires the astropy package to be "
"installed and importable.")
|
en
| 0.865599
|
Different implementations of the ``./setup.py test`` command depending on what's locally available. If Astropy v1.1 or later is available it should be possible to import AstropyTest from ``astropy.tests.command``. Otherwise there is a skeleton implementation that allows users to at least discover the ``./setup.py test`` command and learn that they need Astropy to run it. # Previously these except statements caught only ImportErrors, but there are # some other obscure exceptional conditions that can occur when importing # astropy.tests (at least on older versions) that can cause these imports to # fail # If we are testing astropy itself, we need to use import_file to avoid # actually importing astropy (just the file we need). # noqa # No astropy at all--provide the dummy implementation
| 2.483687
| 2
|
experiment/session.py
|
yvinkesteijn/RL_visual_task
| 0
|
6626931
|
from __future__ import absolute_import, division, print_function
from psychopy import visual, core, event, prefs
import sys, csv, time, os,datetime, time
import random as rd
class session(object):
def __init__(self, ppn, a_side, rwrd_sc,control_ph, learn_ph, test_ph, demo_ph,
img_time=1.250, cross_time = 0.5, a_time = 0.1):
self.c_ph, self.l_ph, self.t_ph, self.demo_ph = control_ph, learn_ph, test_ph, demo_ph
self.w_scr, self.h_scr = 1280, 800
self.a_side = a_side
self.rwrd_sc = rwrd_sc
self.trial_num = 300
self.demo_dir = 'images/demo/'
self.img_dir = 'images/task/'
self.out_dir = 'output/ppn/'
self.img_scl = 0.8
self.x_val = 0.5
self.cross_scl = 0.08
self.txt_scl = 0.2
self.a_time = 0.1
self.ppn = ppn
self.date_time = time.strftime("%y%m%d%H%M")
self.img_time = img_time
self.cross_time = cross_time
self.cur_reward = 0
self.trial_lst = []
def test(self):
self.ttl_timer = datetime.datetime.now()
self.win = visual.Window(size=(self.w_scr, self.h_scr),fullscr=True,
monitor='testMonitor')
event.globalKeys.add('q', func=self.quit_q)
self.win.mouseVisible = False
self.fix_cros = visual.ShapeStim(win=self.win, vertices=((0, -self.cross_scl),
(0, self.cross_scl), (0,0),(-self.cross_scl*(6/8),0),
(self.cross_scl*(6/8), 0)),lineWidth=4,closeShape=False,
lineColor="black")
for i in range(10):
timer_lst = []
in_txt = visual.ImageStim(win=self.win, image='images/instruct/exit.png', pos=(0,0))
in_txt.draw()
self.win.flip()
trial_tmr = core.Clock()
keys = event.waitKeys(maxWait=3.0, keyList=["z", "slash"], timeStamped=trial_tmr)
self.fix_cros.draw()
self.win.flip()
core.wait(0.5)
print(keys)
def create_window(self):
self.ttl_timer = datetime.datetime.now()
self.win = visual.Window(size=(self.w_scr, self.h_scr),fullscr=True,
monitor='testMonitor')
self.fix_cros = visual.ShapeStim(win=self.win, vertices=((0, -self.cross_scl),
(0, self.cross_scl), (0,0),(-self.cross_scl*(6/8),0),
(self.cross_scl*(6/8), 0)),lineWidth=4,closeShape=False,
lineColor="black")
start_data = [str(self.ttl_timer), 'start begin']
event.globalKeys.add('q', func=self.quit_q)
self.win.mouseVisible = False
print('start demo')
self.test_phase()
print('start phase 1')
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer), 'start control'])
con_log = self.set_two('control',self.c_ph, self.trial_num)
self.show_instruct('end_one.png')
print('start phase 2')
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer), 'start learning'])
learn_log = self.set_one('learning',self.l_ph, self.trial_num)
self.show_instruct('end_two.png')
print('start phase 3')
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer), 'start test'])
test_log = self.set_two('test',self.t_ph, self.trial_num)
self.show_instruct('exit.png')
# print(self.cur_reward)
self.create_csv()
self.win.close()
def test_phase(self):
demo_two = [self.demo_ph[0], self.demo_ph[1], self.demo_ph[2]]
demo_one = [self.demo_ph[0], self.demo_ph[3],self.demo_ph[2]]
self.show_instruct('demo.png')
self.show_instruct('demo_two.png')
self.set_two('demo_two', demo_two, 10)
if self.a_side == 'al':
dem_name = 'demo_second_al.png'
else:
dem_name = 'demo_second_ar.png'
self.show_instruct(dem_name)
self.set_one('demo_one', demo_one, 10)
self.cur_reward = 0
self.show_instruct('end_demo.png')
#
def show_instruct(self, file_path):
in_txt = visual.ImageStim(win=self.win, image='images/instruct/'+file_path, pos=(0,0))
in_txt.draw()
self.win.flip()
event.waitKeys()
def set_two(self, phase, img_set, reps):
if phase.startswith('demo'):
im_dir = self.demo_dir
else:
im_dir = self.img_dir
for i in range(reps):
ran_num = rd.randint(0, 1)
if ran_num == 1:
img_one = img_set[0][i]
img_two = img_set[1][i]
else:
img_one = img_set[1][i]
img_two = img_set[0][i]
img_l = visual.ImageStim(win=self.win, image=im_dir+img_one,pos=(-self.x_val,0))
img_r = visual.ImageStim(win=self.win, image=im_dir+img_two, pos=(self.x_val, 0))
img_l.size *= self.img_scl
img_r.size *= self.img_scl
img_l.draw()
img_r.draw()
self.win.flip()
trial_tmr = core.Clock()
keys = event.waitKeys(maxWait=self.img_time, keyList=["z", "slash"],timeStamped=trial_tmr)
if keys == None:
key_name = [None, None]
warn_img = 'images/instruct/warning_two.png'
warn_txt = visual.ImageStim(win=self.win, image=warn_img, pos=(0,0))
warn_txt.draw()
self.win.flip()
core.wait(0.5)
else:
key_name = keys[0]
self.get_cross()
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer),phase, i, img_set[2][i],
key_name[0], key_name[1], img_one, img_two,0, 0])
def set_one(self, phase, img_lst, reps):
'''Function creates second phase. Shows one image for 100ms and registers keys
Takes phasename, list with image names and number of trials as ppn_input
adds variables [key pressed, image name, RT, reward, total reward, etc.]
to log list of participant session'''
if phase.startswith('demo'):
im_dir = self.demo_dir
else:
im_dir = self.img_dir
for i in range(reps):
img_nm, img_rw, img_a = im_dir+img_lst[0][i], img_lst[1][i], img_lst[2][i]
img_show = visual.ImageStim(win=self.win, image=img_nm,pos=(0,0))
img_show.size *= self.img_scl
img_show.draw()
self.win.flip()
core.wait(self.a_time)
x_col = 'white'
self.fix_cros.lineColor = x_col
self.fix_cros.draw()
trial_tmr = core.Clock()
self.win.flip()
keys = event.waitKeys(maxWait=1.150, keyList=["z", "slash"],timeStamped=trial_tmr)
get_reward, col = 0, 'white'
if self.a_side == 'al':
key_na, key_a = 'slash', 'z'
else:
key_na, key_a = 'z','slash'
try:
key = keys[0]
if (key[0] == key_na and img_a == 0) or (key[0] == key_a and img_a == 1):
get_reward = img_rw
col = 'green'
fb_txt = 'Correct'
else:
col = 'red'
fb_txt = 'Incorrect'
except:
key = [None, None]
col = 'yellow'
fb_txt = 'Incorrect'
self.cur_reward += get_reward
stim1 = visual.TextStim(self.win, str(get_reward)+' points',
color=col, pos=(0,0))
stim2 = visual.TextStim(self.win, fb_txt,
color=col, pos=(0,0.3))
stim1.draw()
stim2.draw()
self.win.flip()
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer),phase, i, img_lst[2][i],
key[0],key[1], img_nm, img_rw,get_reward, self.cur_reward])
core.wait(1.0)
self.get_cross()
if i in {1, 99, 199, 299}:
rwrd_stim = visual.TextStim(self.win, 'total points: '+str(self.cur_reward),
color='white', pos=(0,0))
rwrd_stim.draw()
self.win.flip()
core.wait(1.0)
#
def get_cross(self):
'''Draws and shows black fixation cross on screen for [cross_time] time'''
x_col = 'black'
self.fix_cros.lineColor = x_col
self.fix_cros.draw()
self.win.flip()
core.wait(self.cross_time)
#
#
def create_csv(self):
'''Creates csv output file of session from session log list
Files can be found in output/ppn folder'''
ppn_form = ('0'*(2-len(str(self.ppn))))+str(self.ppn)
file_name = "PPN{}_{}_{}_{}.csv".format(ppn_form, self.date_time, self.a_side,self.rwrd_sc)
with open(self.out_dir+file_name, 'w', newline='') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=',')
csv_writer.writerow(['time_stamp','phase','trial_nr','animal','key','RT',
'img_l', 'img_r', 'reward', 'tot_reward'])
csv_writer.writerows(self.trial_lst)
return True
def quit_q(self):
'''saves current log to csv and quits session'''
self.create_csv()
core.quit()
# window()
|
from __future__ import absolute_import, division, print_function
from psychopy import visual, core, event, prefs
import sys, csv, time, os,datetime, time
import random as rd
class session(object):
def __init__(self, ppn, a_side, rwrd_sc,control_ph, learn_ph, test_ph, demo_ph,
img_time=1.250, cross_time = 0.5, a_time = 0.1):
self.c_ph, self.l_ph, self.t_ph, self.demo_ph = control_ph, learn_ph, test_ph, demo_ph
self.w_scr, self.h_scr = 1280, 800
self.a_side = a_side
self.rwrd_sc = rwrd_sc
self.trial_num = 300
self.demo_dir = 'images/demo/'
self.img_dir = 'images/task/'
self.out_dir = 'output/ppn/'
self.img_scl = 0.8
self.x_val = 0.5
self.cross_scl = 0.08
self.txt_scl = 0.2
self.a_time = 0.1
self.ppn = ppn
self.date_time = time.strftime("%y%m%d%H%M")
self.img_time = img_time
self.cross_time = cross_time
self.cur_reward = 0
self.trial_lst = []
def test(self):
self.ttl_timer = datetime.datetime.now()
self.win = visual.Window(size=(self.w_scr, self.h_scr),fullscr=True,
monitor='testMonitor')
event.globalKeys.add('q', func=self.quit_q)
self.win.mouseVisible = False
self.fix_cros = visual.ShapeStim(win=self.win, vertices=((0, -self.cross_scl),
(0, self.cross_scl), (0,0),(-self.cross_scl*(6/8),0),
(self.cross_scl*(6/8), 0)),lineWidth=4,closeShape=False,
lineColor="black")
for i in range(10):
timer_lst = []
in_txt = visual.ImageStim(win=self.win, image='images/instruct/exit.png', pos=(0,0))
in_txt.draw()
self.win.flip()
trial_tmr = core.Clock()
keys = event.waitKeys(maxWait=3.0, keyList=["z", "slash"], timeStamped=trial_tmr)
self.fix_cros.draw()
self.win.flip()
core.wait(0.5)
print(keys)
def create_window(self):
self.ttl_timer = datetime.datetime.now()
self.win = visual.Window(size=(self.w_scr, self.h_scr),fullscr=True,
monitor='testMonitor')
self.fix_cros = visual.ShapeStim(win=self.win, vertices=((0, -self.cross_scl),
(0, self.cross_scl), (0,0),(-self.cross_scl*(6/8),0),
(self.cross_scl*(6/8), 0)),lineWidth=4,closeShape=False,
lineColor="black")
start_data = [str(self.ttl_timer), 'start begin']
event.globalKeys.add('q', func=self.quit_q)
self.win.mouseVisible = False
print('start demo')
self.test_phase()
print('start phase 1')
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer), 'start control'])
con_log = self.set_two('control',self.c_ph, self.trial_num)
self.show_instruct('end_one.png')
print('start phase 2')
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer), 'start learning'])
learn_log = self.set_one('learning',self.l_ph, self.trial_num)
self.show_instruct('end_two.png')
print('start phase 3')
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer), 'start test'])
test_log = self.set_two('test',self.t_ph, self.trial_num)
self.show_instruct('exit.png')
# print(self.cur_reward)
self.create_csv()
self.win.close()
def test_phase(self):
demo_two = [self.demo_ph[0], self.demo_ph[1], self.demo_ph[2]]
demo_one = [self.demo_ph[0], self.demo_ph[3],self.demo_ph[2]]
self.show_instruct('demo.png')
self.show_instruct('demo_two.png')
self.set_two('demo_two', demo_two, 10)
if self.a_side == 'al':
dem_name = 'demo_second_al.png'
else:
dem_name = 'demo_second_ar.png'
self.show_instruct(dem_name)
self.set_one('demo_one', demo_one, 10)
self.cur_reward = 0
self.show_instruct('end_demo.png')
#
def show_instruct(self, file_path):
in_txt = visual.ImageStim(win=self.win, image='images/instruct/'+file_path, pos=(0,0))
in_txt.draw()
self.win.flip()
event.waitKeys()
def set_two(self, phase, img_set, reps):
if phase.startswith('demo'):
im_dir = self.demo_dir
else:
im_dir = self.img_dir
for i in range(reps):
ran_num = rd.randint(0, 1)
if ran_num == 1:
img_one = img_set[0][i]
img_two = img_set[1][i]
else:
img_one = img_set[1][i]
img_two = img_set[0][i]
img_l = visual.ImageStim(win=self.win, image=im_dir+img_one,pos=(-self.x_val,0))
img_r = visual.ImageStim(win=self.win, image=im_dir+img_two, pos=(self.x_val, 0))
img_l.size *= self.img_scl
img_r.size *= self.img_scl
img_l.draw()
img_r.draw()
self.win.flip()
trial_tmr = core.Clock()
keys = event.waitKeys(maxWait=self.img_time, keyList=["z", "slash"],timeStamped=trial_tmr)
if keys == None:
key_name = [None, None]
warn_img = 'images/instruct/warning_two.png'
warn_txt = visual.ImageStim(win=self.win, image=warn_img, pos=(0,0))
warn_txt.draw()
self.win.flip()
core.wait(0.5)
else:
key_name = keys[0]
self.get_cross()
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer),phase, i, img_set[2][i],
key_name[0], key_name[1], img_one, img_two,0, 0])
def set_one(self, phase, img_lst, reps):
'''Function creates second phase. Shows one image for 100ms and registers keys
Takes phasename, list with image names and number of trials as ppn_input
adds variables [key pressed, image name, RT, reward, total reward, etc.]
to log list of participant session'''
if phase.startswith('demo'):
im_dir = self.demo_dir
else:
im_dir = self.img_dir
for i in range(reps):
img_nm, img_rw, img_a = im_dir+img_lst[0][i], img_lst[1][i], img_lst[2][i]
img_show = visual.ImageStim(win=self.win, image=img_nm,pos=(0,0))
img_show.size *= self.img_scl
img_show.draw()
self.win.flip()
core.wait(self.a_time)
x_col = 'white'
self.fix_cros.lineColor = x_col
self.fix_cros.draw()
trial_tmr = core.Clock()
self.win.flip()
keys = event.waitKeys(maxWait=1.150, keyList=["z", "slash"],timeStamped=trial_tmr)
get_reward, col = 0, 'white'
if self.a_side == 'al':
key_na, key_a = 'slash', 'z'
else:
key_na, key_a = 'z','slash'
try:
key = keys[0]
if (key[0] == key_na and img_a == 0) or (key[0] == key_a and img_a == 1):
get_reward = img_rw
col = 'green'
fb_txt = 'Correct'
else:
col = 'red'
fb_txt = 'Incorrect'
except:
key = [None, None]
col = 'yellow'
fb_txt = 'Incorrect'
self.cur_reward += get_reward
stim1 = visual.TextStim(self.win, str(get_reward)+' points',
color=col, pos=(0,0))
stim2 = visual.TextStim(self.win, fb_txt,
color=col, pos=(0,0.3))
stim1.draw()
stim2.draw()
self.win.flip()
self.trial_lst.append([str(datetime.datetime.now() - self.ttl_timer),phase, i, img_lst[2][i],
key[0],key[1], img_nm, img_rw,get_reward, self.cur_reward])
core.wait(1.0)
self.get_cross()
if i in {1, 99, 199, 299}:
rwrd_stim = visual.TextStim(self.win, 'total points: '+str(self.cur_reward),
color='white', pos=(0,0))
rwrd_stim.draw()
self.win.flip()
core.wait(1.0)
#
def get_cross(self):
'''Draws and shows black fixation cross on screen for [cross_time] time'''
x_col = 'black'
self.fix_cros.lineColor = x_col
self.fix_cros.draw()
self.win.flip()
core.wait(self.cross_time)
#
#
def create_csv(self):
'''Creates csv output file of session from session log list
Files can be found in output/ppn folder'''
ppn_form = ('0'*(2-len(str(self.ppn))))+str(self.ppn)
file_name = "PPN{}_{}_{}_{}.csv".format(ppn_form, self.date_time, self.a_side,self.rwrd_sc)
with open(self.out_dir+file_name, 'w', newline='') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=',')
csv_writer.writerow(['time_stamp','phase','trial_nr','animal','key','RT',
'img_l', 'img_r', 'reward', 'tot_reward'])
csv_writer.writerows(self.trial_lst)
return True
def quit_q(self):
'''saves current log to csv and quits session'''
self.create_csv()
core.quit()
# window()
|
en
| 0.768396
|
# print(self.cur_reward) # Function creates second phase. Shows one image for 100ms and registers keys Takes phasename, list with image names and number of trials as ppn_input adds variables [key pressed, image name, RT, reward, total reward, etc.] to log list of participant session # Draws and shows black fixation cross on screen for [cross_time] time # # Creates csv output file of session from session log list Files can be found in output/ppn folder saves current log to csv and quits session # window()
| 2.126368
| 2
|
src/Basic_knowledge/Manual_implementation_neural_network/简单感知器.py
|
wynshiter/NLP_DEMO
| 7
|
6626932
|
'''
1. 处理单个输入
2. 处理2分类
'''
class Perceptron(object):
'''
初始化参数lr(用于调整训练步长,即 learning),iterations(迭代次数),权重w 与 bias 偏执
'''
def __init__(self,eta=0.01,iterations=10):
self.lr = eta
self.iterations = iterations
self.w = 0.0
self.bias = 0.0
#'''
#公式:
#Δw = lr * (y - y') * x
#Δbias = lr * (y - y')
#'''
def fit(self,X,Y):
for _ in range(self.iterations):
for i in range(len(X)):
x = X[i]
y = Y[i]
#首先获得真实值 y 与预测值 y' 的偏差,乘以一个较小的参数
# 【lr 值过小导致训练时间过长,难以判断是否收敛】
# 【lr 值过大则容易造成步长过大而无法收敛】
update = self.lr * (y - self.predict(x))
self.w += update * x
self.bias += update
# y'(预测值) = w * x + bias
def net_input(self,x):
return self.w * x + self.bias
def predict(self,x):
return 1.0 if self.net_input(x) > 0.0 else 0.0
x = [1, 2, 3, 10, 20, -2, -10, -100, -5, -20]
y = [1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0]
model = Perceptron(0.01,10)
model.fit(x,y)
test_x = [30, 40, -20, -60]
for i in range(len(test_x)):
print('input {} => predict: {}'.format(test_x[i],model.predict(test_x[i])))
print(model.w)
print(model.bias)
|
'''
1. 处理单个输入
2. 处理2分类
'''
class Perceptron(object):
'''
初始化参数lr(用于调整训练步长,即 learning),iterations(迭代次数),权重w 与 bias 偏执
'''
def __init__(self,eta=0.01,iterations=10):
self.lr = eta
self.iterations = iterations
self.w = 0.0
self.bias = 0.0
#'''
#公式:
#Δw = lr * (y - y') * x
#Δbias = lr * (y - y')
#'''
def fit(self,X,Y):
for _ in range(self.iterations):
for i in range(len(X)):
x = X[i]
y = Y[i]
#首先获得真实值 y 与预测值 y' 的偏差,乘以一个较小的参数
# 【lr 值过小导致训练时间过长,难以判断是否收敛】
# 【lr 值过大则容易造成步长过大而无法收敛】
update = self.lr * (y - self.predict(x))
self.w += update * x
self.bias += update
# y'(预测值) = w * x + bias
def net_input(self,x):
return self.w * x + self.bias
def predict(self,x):
return 1.0 if self.net_input(x) > 0.0 else 0.0
x = [1, 2, 3, 10, 20, -2, -10, -100, -5, -20]
y = [1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0]
model = Perceptron(0.01,10)
model.fit(x,y)
test_x = [30, 40, -20, -60]
for i in range(len(test_x)):
print('input {} => predict: {}'.format(test_x[i],model.predict(test_x[i])))
print(model.w)
print(model.bias)
|
zh
| 0.944729
|
1. 处理单个输入 2. 处理2分类 初始化参数lr(用于调整训练步长,即 learning),iterations(迭代次数),权重w 与 bias 偏执 #''' #公式: #Δw = lr * (y - y') * x #Δbias = lr * (y - y') #''' #首先获得真实值 y 与预测值 y' 的偏差,乘以一个较小的参数 # 【lr 值过小导致训练时间过长,难以判断是否收敛】 # 【lr 值过大则容易造成步长过大而无法收敛】 # y'(预测值) = w * x + bias
| 4.007336
| 4
|
bcbio/upload/galaxy.py
|
brentp/bcbio-nextgen
| 1
|
6626933
|
"""Move files to local Galaxy upload directory and add to Galaxy Data Libraries.
Required configurable variables in upload:
dir
"""
import collections
import os
from bcbio import utils
from bcbio.log import logger
from bcbio.upload import filesystem
# Avoid bioblend import errors, raising at time of use
try:
from bioblend.galaxy import GalaxyInstance
except ImportError:
GalaxyInstance = None
def update_file(finfo, sample_info, config):
"""Update file in Galaxy data libraries.
"""
if GalaxyInstance is None:
raise ImportError("Could not import bioblend.galaxy")
folder_name = "%s_%s" % (config["fc_name"], config["fc_date"])
storage_dir = utils.safe_makedir(os.path.join(config["dir"], folder_name))
storage_file = filesystem.copy_finfo(finfo, storage_dir)
if config.has_key("galaxy_url") and config.has_key("galaxy_api_key"):
gi = GalaxyInstance(config["galaxy_url"], config["galaxy_api_key"])
else:
raise ValueError("Galaxy upload requires `galaxy_url` and `galaxy_api_key` in config")
if storage_file and sample_info:
_to_datalibrary(storage_file, gi, folder_name, sample_info, config)
def _to_datalibrary(fname, gi, folder_name, sample_info, config):
"""Upload a file to a Galaxy data library in a project specific folder.
"""
library = _get_library(gi, sample_info, config)
libitems = gi.libraries.show_library(library.id, contents=True)
folder = _get_folder(gi, folder_name, library, libitems)
_file_to_folder(gi, fname, sample_info, libitems, library, folder)
def _file_to_folder(gi, fname, sample_info, libitems, library, folder):
"""Check if file exists on Galaxy, if not upload to specified folder.
"""
full_name = os.path.join(folder["name"], os.path.basename(fname))
for item in libitems:
if item["name"] == full_name:
return item
logger.info("Uploading to Galaxy: %s" % full_name)
return gi.libraries.upload_from_galaxy_filesystem(library.id, fname, folder_id=folder["id"],
link_data_only="link_to_files",
dbkey=sample_info["genome_build"],
roles=library.roles)
def _get_folder(gi, folder_name, library, libitems):
"""Retrieve or create a folder inside the library with the right now.
"""
for item in libitems:
if item["type"] == "folder" and item["name"] == "/%s" % folder_name:
return item
return gi.libraries.create_folder(library.id, folder_name)[0]
GalaxyLibrary = collections.namedtuple("GalaxyLibrary", ["id", "name", "roles"])
def _get_library(gi, sample_info, config):
"""Retrieve the appropriate data library for the current user.
"""
galaxy_lib = sample_info.get("galaxy_library",
config.get("galaxy_library"))
role = sample_info.get("galaxy_role",
config.get("galaxy_role"))
if galaxy_lib:
return _get_library_from_name(gi, galaxy_lib, role, sample_info)
elif sample_info.get("private_libs"):
return _library_from_nglims(gi, sample_info)
else:
raise ValueError("No Galaxy library specified for sample: %s" %
sample_info["description"])
def _get_library_from_name(gi, name, role, sample_info):
for lib in gi.libraries.get_libraries():
if lib["name"].lower().find(name.lower()) >= 0:
return GalaxyLibrary(lib["id"], lib["name"], role)
else:
raise ValueError("Could not find Galaxy library matching %s for sample %s" %
(name, sample_info["description"]))
def _library_from_nglims(gi, sample_info):
"""Retrieve upload library from nglims specified user libraries.
"""
check_names = [sample_info[x].lower()
for x in ["lab_association", "researcher"]
if sample_info[x]]
for libname, role in sample_info["private_libs"]:
# Try to find library for lab or rsearcher
if libname.lower() in check_names:
return _get_library_from_name(gi, libname, role, sample_info)
# default to first private library if available
if len(sample_info["private_libs"]) > 0:
libname, role = sample_info["private_libs"][0]
return _get_library_from_name(gi, libname, role, sample_info)
# otherwise use the lab association or researcher name
else:
libname = check_names[0]
return _get_library_from_name(gi, libname, None, sample_info)
|
"""Move files to local Galaxy upload directory and add to Galaxy Data Libraries.
Required configurable variables in upload:
dir
"""
import collections
import os
from bcbio import utils
from bcbio.log import logger
from bcbio.upload import filesystem
# Avoid bioblend import errors, raising at time of use
try:
from bioblend.galaxy import GalaxyInstance
except ImportError:
GalaxyInstance = None
def update_file(finfo, sample_info, config):
"""Update file in Galaxy data libraries.
"""
if GalaxyInstance is None:
raise ImportError("Could not import bioblend.galaxy")
folder_name = "%s_%s" % (config["fc_name"], config["fc_date"])
storage_dir = utils.safe_makedir(os.path.join(config["dir"], folder_name))
storage_file = filesystem.copy_finfo(finfo, storage_dir)
if config.has_key("galaxy_url") and config.has_key("galaxy_api_key"):
gi = GalaxyInstance(config["galaxy_url"], config["galaxy_api_key"])
else:
raise ValueError("Galaxy upload requires `galaxy_url` and `galaxy_api_key` in config")
if storage_file and sample_info:
_to_datalibrary(storage_file, gi, folder_name, sample_info, config)
def _to_datalibrary(fname, gi, folder_name, sample_info, config):
"""Upload a file to a Galaxy data library in a project specific folder.
"""
library = _get_library(gi, sample_info, config)
libitems = gi.libraries.show_library(library.id, contents=True)
folder = _get_folder(gi, folder_name, library, libitems)
_file_to_folder(gi, fname, sample_info, libitems, library, folder)
def _file_to_folder(gi, fname, sample_info, libitems, library, folder):
"""Check if file exists on Galaxy, if not upload to specified folder.
"""
full_name = os.path.join(folder["name"], os.path.basename(fname))
for item in libitems:
if item["name"] == full_name:
return item
logger.info("Uploading to Galaxy: %s" % full_name)
return gi.libraries.upload_from_galaxy_filesystem(library.id, fname, folder_id=folder["id"],
link_data_only="link_to_files",
dbkey=sample_info["genome_build"],
roles=library.roles)
def _get_folder(gi, folder_name, library, libitems):
"""Retrieve or create a folder inside the library with the right now.
"""
for item in libitems:
if item["type"] == "folder" and item["name"] == "/%s" % folder_name:
return item
return gi.libraries.create_folder(library.id, folder_name)[0]
GalaxyLibrary = collections.namedtuple("GalaxyLibrary", ["id", "name", "roles"])
def _get_library(gi, sample_info, config):
"""Retrieve the appropriate data library for the current user.
"""
galaxy_lib = sample_info.get("galaxy_library",
config.get("galaxy_library"))
role = sample_info.get("galaxy_role",
config.get("galaxy_role"))
if galaxy_lib:
return _get_library_from_name(gi, galaxy_lib, role, sample_info)
elif sample_info.get("private_libs"):
return _library_from_nglims(gi, sample_info)
else:
raise ValueError("No Galaxy library specified for sample: %s" %
sample_info["description"])
def _get_library_from_name(gi, name, role, sample_info):
for lib in gi.libraries.get_libraries():
if lib["name"].lower().find(name.lower()) >= 0:
return GalaxyLibrary(lib["id"], lib["name"], role)
else:
raise ValueError("Could not find Galaxy library matching %s for sample %s" %
(name, sample_info["description"]))
def _library_from_nglims(gi, sample_info):
"""Retrieve upload library from nglims specified user libraries.
"""
check_names = [sample_info[x].lower()
for x in ["lab_association", "researcher"]
if sample_info[x]]
for libname, role in sample_info["private_libs"]:
# Try to find library for lab or rsearcher
if libname.lower() in check_names:
return _get_library_from_name(gi, libname, role, sample_info)
# default to first private library if available
if len(sample_info["private_libs"]) > 0:
libname, role = sample_info["private_libs"][0]
return _get_library_from_name(gi, libname, role, sample_info)
# otherwise use the lab association or researcher name
else:
libname = check_names[0]
return _get_library_from_name(gi, libname, None, sample_info)
|
en
| 0.730962
|
Move files to local Galaxy upload directory and add to Galaxy Data Libraries. Required configurable variables in upload: dir # Avoid bioblend import errors, raising at time of use Update file in Galaxy data libraries. Upload a file to a Galaxy data library in a project specific folder. Check if file exists on Galaxy, if not upload to specified folder. Retrieve or create a folder inside the library with the right now. Retrieve the appropriate data library for the current user. Retrieve upload library from nglims specified user libraries. # Try to find library for lab or rsearcher # default to first private library if available # otherwise use the lab association or researcher name
| 2.650936
| 3
|
src/config.py
|
palmtreemodel/PalmTree
| 29
|
6626934
|
<gh_stars>10-100
"""
Configuration file.
"""
VOCAB_SIZE = 10000
USE_CUDA = True
DEVICES = [0]
CUDA_DEVICE = DEVICES[0]
VERSION = 1
MAXLEN = 10
|
"""
Configuration file.
"""
VOCAB_SIZE = 10000
USE_CUDA = True
DEVICES = [0]
CUDA_DEVICE = DEVICES[0]
VERSION = 1
MAXLEN = 10
|
en
| 0.385016
|
Configuration file.
| 1.239893
| 1
|
pipe-cli/src/utilities/cluster_manager.py
|
AlfiyaRF/cloud-pipeline
| 126
|
6626935
|
<reponame>AlfiyaRF/cloud-pipeline<filename>pipe-cli/src/utilities/cluster_manager.py
# Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
from src.api.preferenceapi import PreferenceAPI
from src.api.cluster import Cluster
DEFAULT_CORE_TYPE = 'm5'
CORE_TYPE_DELIMITER = '.'
CLUSTER_INSTANCE_TYPE_PREFERENCE = "cluster.instance.type"
class ClusterManager(object):
@classmethod
def calculate_cluster_from_cores(cls, ncores, core_type=None):
core_type = ClusterManager.get_core_type(core_type=core_type)
instance_types_list = Cluster.list_instance_types()
if len(instance_types_list) == 0:
raise RuntimeError("No instance types found")
return ClusterManager.get_instance_type(core_type, instance_types_list, ncores)
@classmethod
def get_core_type(cls, core_type=None):
if core_type:
return ClusterManager.parse_core_type(core_type)
core_type_from_env = os.environ.get('instance_type')
if not core_type_from_env:
core_type_from_env = os.environ.get('instance_size')
if not core_type_from_env:
default_instance_type = PreferenceAPI().get_preference(CLUSTER_INSTANCE_TYPE_PREFERENCE)
if default_instance_type and default_instance_type.value:
core_type_from_env = default_instance_type.value
if not core_type_from_env:
core_type_from_env = DEFAULT_CORE_TYPE
return ClusterManager.parse_core_type(core_type_from_env)
@classmethod
def parse_core_type(cls, core_type):
if CORE_TYPE_DELIMITER in core_type:
return core_type.split(CORE_TYPE_DELIMITER)[0]
return core_type
@classmethod
def get_cores_by_instance_type(cls, instance_cores, cores):
result = instance_cores
while cores > result:
result += instance_cores
return result
@classmethod
def get_instance_type(cls, core_type, instances, cores):
instances = [x for x in instances if core_type in x.name]
if len(instances) == 0:
raise RuntimeError("No instances found for type {}".format(core_type))
instance_name = ''
instance_cores = 0
total_nodes = 0
if cores == 0:
return ClusterManager.get_instance_type_object(instance_name, instance_cores, total_nodes)
instances_sorted = sorted(instances, key=lambda x: x.vcpu, reverse=True)
for instance_num in range(0, len(instances_sorted)-1):
instance_type = instances_sorted[instance_num]
instance_cores = instance_type.vcpu
instance_name = instance_type.name
if instance_cores > cores:
continue
elif instance_cores == cores:
return ClusterManager.get_instance_type_object(instance_name, instance_cores, 1)
else:
total_cores = ClusterManager.get_cores_by_instance_type(instance_cores, cores)
total_nodes = math.ceil(float(cores)/float(instance_cores))
total_cores_free = total_cores - cores
node_free_cores = total_cores_free / total_nodes
if node_free_cores * 2 >= instances_sorted[instance_num+1].vcpu:
continue
else:
break
return ClusterManager.get_instance_type_object(instance_name, instance_cores, total_nodes)
@classmethod
def get_instance_type_object(cls, instance_name, instance_cores, total_nodes):
return { "name": instance_name, "cores": instance_cores, "count": total_nodes }
|
# Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
from src.api.preferenceapi import PreferenceAPI
from src.api.cluster import Cluster
DEFAULT_CORE_TYPE = 'm5'
CORE_TYPE_DELIMITER = '.'
CLUSTER_INSTANCE_TYPE_PREFERENCE = "cluster.instance.type"
class ClusterManager(object):
@classmethod
def calculate_cluster_from_cores(cls, ncores, core_type=None):
core_type = ClusterManager.get_core_type(core_type=core_type)
instance_types_list = Cluster.list_instance_types()
if len(instance_types_list) == 0:
raise RuntimeError("No instance types found")
return ClusterManager.get_instance_type(core_type, instance_types_list, ncores)
@classmethod
def get_core_type(cls, core_type=None):
if core_type:
return ClusterManager.parse_core_type(core_type)
core_type_from_env = os.environ.get('instance_type')
if not core_type_from_env:
core_type_from_env = os.environ.get('instance_size')
if not core_type_from_env:
default_instance_type = PreferenceAPI().get_preference(CLUSTER_INSTANCE_TYPE_PREFERENCE)
if default_instance_type and default_instance_type.value:
core_type_from_env = default_instance_type.value
if not core_type_from_env:
core_type_from_env = DEFAULT_CORE_TYPE
return ClusterManager.parse_core_type(core_type_from_env)
@classmethod
def parse_core_type(cls, core_type):
if CORE_TYPE_DELIMITER in core_type:
return core_type.split(CORE_TYPE_DELIMITER)[0]
return core_type
@classmethod
def get_cores_by_instance_type(cls, instance_cores, cores):
result = instance_cores
while cores > result:
result += instance_cores
return result
@classmethod
def get_instance_type(cls, core_type, instances, cores):
instances = [x for x in instances if core_type in x.name]
if len(instances) == 0:
raise RuntimeError("No instances found for type {}".format(core_type))
instance_name = ''
instance_cores = 0
total_nodes = 0
if cores == 0:
return ClusterManager.get_instance_type_object(instance_name, instance_cores, total_nodes)
instances_sorted = sorted(instances, key=lambda x: x.vcpu, reverse=True)
for instance_num in range(0, len(instances_sorted)-1):
instance_type = instances_sorted[instance_num]
instance_cores = instance_type.vcpu
instance_name = instance_type.name
if instance_cores > cores:
continue
elif instance_cores == cores:
return ClusterManager.get_instance_type_object(instance_name, instance_cores, 1)
else:
total_cores = ClusterManager.get_cores_by_instance_type(instance_cores, cores)
total_nodes = math.ceil(float(cores)/float(instance_cores))
total_cores_free = total_cores - cores
node_free_cores = total_cores_free / total_nodes
if node_free_cores * 2 >= instances_sorted[instance_num+1].vcpu:
continue
else:
break
return ClusterManager.get_instance_type_object(instance_name, instance_cores, total_nodes)
@classmethod
def get_instance_type_object(cls, instance_name, instance_cores, total_nodes):
return { "name": instance_name, "cores": instance_cores, "count": total_nodes }
|
en
| 0.841866
|
# Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
| 1.633637
| 2
|
src/adk/api/remote_api.py
|
QuTech-Delft/qne-adk
| 1
|
6626936
|
<gh_stars>1-10
from pathlib import Path
from typing import Any, cast, Dict, List, Optional, Tuple, Union
import time
from apistar.exceptions import ErrorResponse
from adk import utils
from adk.api.qne_client import QneFrontendClient
from adk.exceptions import (ApiClientError, ApplicationNotFound, ExperimentFailed, ExperimentValueError,
JobTimeoutError)
from adk.generators.result_generator import ResultGenerator
from adk.managers.config_manager import ConfigManager
from adk.managers.auth_manager import AuthManager
from adk.managers.resource_manager import ResourceManager
from adk.type_aliases import (AppConfigType, AppResultType, AppSourceFilesType, AppSourceType, AppVersionType,
ApplicationType, ApplicationDataType, AssetType, assetNetworkType, ErrorDictType,
ExperimentType, FinalResultType, GenericNetworkData, ExperimentDataType, ResultType,
RoundSetType, round_resultType, cumulative_resultType, instructionsType, ChannelType,
parametersType, coordinatesType, listValuesType)
from adk.settings import BASE_DIR
class JobStatus:
""" The status of the qne_job can be 'NEW', 'RUNNING', 'COMPLETE', 'FAILED' """
NEW: str = 'NEW'
RUNNING: str = 'RUNNING'
COMPLETE: str = 'COMPLETE'
FAILED: str = 'FAILED'
# The final status of the qne_job, either successful or not
QNE_JOB_FINAL_STATES = (
JobStatus.COMPLETE,
JobStatus.FAILED,
)
# The status of the qne_job when it is executed successfully
QNE_JOB_SUCCESS_STATES = (
JobStatus.COMPLETE,
)
class RemoteApi:
"""
Defines the methods used for remote communication with api-router
"""
def __init__(self, config_manager: ConfigManager) -> None:
self.__config_manager: ConfigManager = config_manager
config_dir = self.__config_manager.get_config_dir()
self.auth_manager: AuthManager = AuthManager(config_dir,
self.__login_user, self.__login_anonymous, self.__logout_user)
self.__qne_client = QneFrontendClient(self.auth_manager)
self.__base_uri = self.auth_manager.get_active_host()
self.__username: Optional[str] = self.auth_manager.get_username(self.__base_uri)
self.__password: Optional[str] = self.auth_manager.get_password(self.__base_uri)
self.__refresh_token: Optional[str] = self.auth_manager.load_token(self.__base_uri)
self.__resource_manager = ResourceManager()
def __login_user(self, username: str, password: str, host: str) -> str:
self.__refresh_token = None
self.__base_uri = host
self.__username = username
self.__password = password
self.__refresh_token = self.__qne_client.login(username, password, host)
return self.__refresh_token
def __login_anonymous(self) -> str:
pass
def login(self, username: str, password: str, host: str) -> None:
"""
Login the user on host (uri) based upon the username/password values given
"""
self.auth_manager.login(username, password, host)
def __logout_user(self, host: str) -> None:
self.__qne_client.logout(host)
def logout(self, host: Optional[str]) -> bool:
"""
Logout the user by deleting the entry in the resource
"""
if not self.__qne_client.is_logged_in():
return False
self.auth_manager.logout(host)
return True
def get_active_host(self) -> str:
"""
Get the host as base_uri the user is logged in
"""
return self.__base_uri
def list_applications(self) -> List[ApplicationType]:
"""
Get the list of applications (public and enabled) from the api-router
Returns:
the list of applications
"""
return self.__qne_client.list_applications()
def delete_application(self, application_id: Optional[str]) -> bool:
"""
Delete the remote application for the authenticated user
Args:
application_id is the application to delete.
Returns:
False when not found 404 or another error
True when no error was raised
"""
if application_id is not None and application_id.isdigit():
try:
self.__qne_client.destroy_application(application_id)
except ErrorResponse:
return False
# application deleted successfully
return True
return False
def clone_application(self, application_name: str, new_application_name: str,
new_application_path: Path) -> None:
"""
Clone the application by copying the required files in the local application structure.
Args:
application_name: name of the application to be cloned
new_application_name: name of the application after cloning
new_application_path: location of application files
"""
application = self.__get_application_by_slug(application_name)
if application is None:
raise ApplicationNotFound(new_application_name)
new_application_name = new_application_name.lower()
local_app_src_path = new_application_path / 'src'
local_app_config_path = new_application_path / 'config'
local_app_src_path.mkdir(parents=True, exist_ok=True)
local_app_config_path.mkdir(parents=True, exist_ok=True)
app_config = self.__qne_client.app_config_application(str(application["url"]))
utils.write_json_file(local_app_config_path / 'network.json', app_config["network"])
utils.write_json_file(local_app_config_path / 'application.json', app_config["application"])
app_result = self.__qne_client.app_result_application(str(application["url"]))
utils.write_json_file(local_app_config_path / 'result.json',
{"round_result_view": app_result["round_result_view"],
"cumulative_result_view": app_result["cumulative_result_view"],
"final_result_view": app_result["final_result_view"]
})
app_source = self.__qne_client.app_source_application(str(application["url"]))
# Create python files from tarball
self.__resource_manager.generate_resources(self.__qne_client, app_source, new_application_path)
# Manifest.json configuration
utils.write_json_file(new_application_path / 'manifest.json', utils.get_default_manifest(new_application_name))
self.__config_manager.add_application(application_name=new_application_name,
application_path=new_application_path)
def publish_application(self, application_data: ApplicationDataType) -> bool:
"""
Publish the application by enabling the AppVersion.
Args:
application_data: application data from manifest.json
Returns:
True when published successfully, otherwise False
"""
if "app_version" in application_data["remote"]:
# check if application exists
application = self.__get_application(application_data)
# application must exist
if application is not None:
# update all remote info with latest data
application_data["remote"]["application"] = application["url"]
application_data["remote"]["application_id"] = application["id"]
application_data["remote"]["slug"] = application["slug"]
app_version = self.__partial_update_app_version(application_data, application)
application_data["remote"]["app_version"]["app_version"] = app_version["url"]
application_data["remote"]["app_version"]["app_config"] = app_version["app_config"]
application_data["remote"]["app_version"]["app_result"] = app_version["app_result"]
application_data["remote"]["app_version"]["app_source"] = app_version["app_source"]
application_data["remote"]["app_version"]["enabled"] = not app_version["is_disabled"]
application_data["remote"]["app_version"]["version"] = app_version["version"]
return True
return False
def upload_application(self, # pylint: disable=R0914
application_path: Path,
application_data: ApplicationDataType,
application_config: AppConfigType,
application_result: AppResultType,
application_source: List[str]) -> ApplicationDataType:
"""
Upload the application to the remote server.
Args:
application_path: location of application files
application_data: application data from manifest.json
application_config: application configuration structure
application_result: application result structure
application_source: source files for application
Returns:
True when uploaded successfully, otherwise False
"""
application = self.__get_application(application_data)
if application is None:
try:
# create application data structure for remote
application_data["remote"] = {"application": "",
"application_id": 0,
"slug": "",
"app_version": {
"enabled": False,
"version": 0,
"app_version": "",
"app_config": "",
"app_result": "",
"app_source": ""
}
}
# create Application
application = self.__create_application(application_data)
except Exception as e:
# rethrow exception
raise e
if application is not None:
# update all remote info with latest data
application_data["remote"]["application"] = application["url"]
application_data["remote"]["application_id"] = application["id"]
application_data["remote"]["slug"] = application["slug"]
app_version: AppVersionType = {}
try:
# create AppVersion
app_version = self.__create_app_version(application)
application_data["remote"]["app_version"]["app_version"] = app_version["url"]
application_data["remote"]["app_version"]["enabled"] = not app_version["is_disabled"]
application_data["remote"]["app_version"]["version"] = app_version["version"]
# new app_version reset registered application references
application_data["remote"]["app_version"]["app_config"] = ''
application_data["remote"]["app_version"]["app_result"] = ''
application_data["remote"]["app_version"]["app_source"] = ''
except ApiClientError as e:
if "Please complete" in str(e) and "app_version" in application_data["remote"]["app_version"] and \
application_data["remote"]["app_version"]["app_version"]:
# The (incomplete) AppVersion already existed, use this one to connect the not yet registered objects
app_version["url"] = application_data["remote"]["app_version"]["app_version"]
else:
# for now rethrow all other exceptions
raise e
try:
if not application_data["remote"]["app_version"]["app_config"]:
# create AppConfig
app_config = self.__create_app_config(application_data, application_config, app_version)
application_data["remote"]["app_version"]["app_config"] = app_config["url"]
if not application_data["remote"]["app_version"]["app_result"]:
# create AppResult
app_result = self.__create_app_result(application_result, app_version)
application_data["remote"]["app_version"]["app_result"] = app_result["url"]
if not application_data["remote"]["app_version"]["app_source"]:
# create AppSource
app_source = self.__create_app_source(application_data, app_version,
application_source, application_path)
application_data["remote"]["app_version"]["app_source"] = app_source["url"]
# Update application when AppVersion and its components is uploaded
application_to_update = self.__create_application_type(application_data)
self.__qne_client.partial_update_application(str(application["id"]), application_to_update)
except Exception as e:
# Something went wrong, delete the (just created) AppVersion (currently not supported by api-router)
# app_version_to_delete: AppVersionType = {
# "application": application["url"],
# "version": app_version["version"]
# }
# app_version = self.__qne_client.delete_app_version(app_version_to_delete)
# for now rethrow exception
raise e
return application_data
def __create_application(self, application_data: ApplicationDataType) -> ApplicationType:
"""
Create and send an Application object to api-router
"""
application_to_create = self.__create_application_type(application_data)
application = self.__qne_client.create_application(application_to_create)
return application
@staticmethod
def __create_application_type(application_data: ApplicationDataType) -> ApplicationType:
application_name = application_data["application"]["name"] if "name" in application_data["application"] else ""
application_description =\
application_data["application"]["description"] if "description" in application_data["application"] else ""
application_author =\
application_data["application"]["author"] if "author" in application_data["application"] else ""
application_email =\
application_data["application"]["email"] if "email" in application_data["application"] else ""
application: ApplicationType = {
"name": application_name,
"description": application_description,
"author": application_author,
"email": application_email
}
return application
def __create_app_version(self, application: ApplicationType, version: int = 1) -> AppVersionType:
"""
Create and send an AppVersion object to api-router
"""
app_version_to_create = self.__create_app_version_type(application, version)
app_version = self.__qne_client.create_app_version(app_version_to_create)
return app_version
@staticmethod
def __create_app_version_type(application: ApplicationType, version: int = 1) -> AppVersionType:
app_version: AppVersionType = {
"application": application["url"],
"is_disabled": True
}
return app_version
def __create_app_config(self, application_data: ApplicationDataType, application_config: AppConfigType,
app_version: AppVersionType) -> AppConfigType:
"""
Create and send an AppConfig object to api-router
"""
app_config_to_create = self.__create_app_config_type(application_data, application_config, app_version)
app_config = self.__qne_client.create_app_config(app_config_to_create)
return app_config
@staticmethod
def __create_app_config_type(application_data: ApplicationDataType, application_config: AppConfigType,
app_version: AppVersionType) -> AppConfigType:
multi_round = application_data["application"]["multi_round"] if \
"multi_round" in application_data["application"] else False
app_config: AppConfigType = {
"app_version": app_version["url"],
"network": application_config["network"],
"application": application_config["application"],
"multi_round": multi_round
}
return app_config
def __create_app_result(self, application_result: AppResultType, app_version: AppVersionType) -> AppResultType:
"""
Create and send an AppResult object to api-router
"""
app_result_to_create = self.__create_app_result_type(application_result, app_version)
app_result = self.__qne_client.create_app_result(app_result_to_create)
return app_result
@staticmethod
def __create_app_result_type(application_result: AppResultType, app_version: AppVersionType) -> AppResultType:
app_result: AppResultType = {
"app_version": app_version["url"],
"round_result_view": application_result["round_result_view"],
"cumulative_result_view": application_result["cumulative_result_view"],
"final_result_view": application_result["final_result_view"]
}
return app_result
def __create_app_source(self, application_data: ApplicationDataType, app_version: AppVersionType,
application_source: List[str], application_path: Path) -> AppSourceType:
"""
Create and send an AppSource object to api-router
"""
# create the resource
resource_path, resource_file = self.__resource_manager.prepare_resources(application_data, application_path,
application_source)
# send the resource
with open(resource_path, 'rb') as tarball:
app_source_files: AppSourceFilesType = {
"source_files": (resource_file, tarball), # pylint: disable=R1732
"app_version": (None, app_version['url']),
"output_parser": (None, '{}')
}
app_source = self.__qne_client.create_app_source(app_source_files)
# delete the resource
self.__resource_manager.delete_resources(application_data, application_path)
return app_source
@staticmethod
def __partial_update_version_type(application: ApplicationType) -> AppVersionType:
app_version: AppVersionType = {
"application": application["url"],
'is_disabled': False
}
return app_version
def __partial_update_app_version(self, application_data: ApplicationDataType,
application: ApplicationType) -> AppVersionType:
"""
Create and send an AppVersion object to api-router for updating it
"""
app_version_to_update = self.__partial_update_version_type(application)
app_version_url = application_data["remote"]["app_version"]["app_version"]
app_version = self.__qne_client.partial_update_app_version(app_version_url, app_version_to_update)
return app_version
def __get_application(self, application_data: ApplicationDataType) -> Optional[ApplicationType]:
"""
Get the application object from api-router for this remote application
Args:
application_data: application information from manifest
Returns:
Application object or None when the application was not found remotely
"""
application = None
if "application_id" in application_data["remote"]:
application_id = str(application_data["remote"]["application_id"])
application = self.__get_application_by_id(application_id)
if application is None and "slug" in application_data["remote"]:
application_slug = str(application_data["remote"]["slug"])
application = self.__get_application_by_slug(application_slug)
return application
def __get_application_by_id(self, application_id: Optional[str]) -> Optional[ApplicationType]:
"""
Get the application object from api-router for this remote application
Args:
application_id: id of the application
Returns:
Application object or None when the application was not found remotely
"""
if application_id is not None and application_id.isdigit():
app_list = self.list_applications()
for application in app_list:
if application["id"] == int(application_id):
return application
return None
def __get_application_by_slug(self, application_slug: str) -> Optional[ApplicationType]:
"""
Get the application object from api-router for this remote application.
Returned applications are not disabled and either are public with enabled versions or are owned by user
Args:
application_slug: Slug of the application
Returns:
Application object or None when the application was not found remotely
"""
app_list = self.list_applications()
for application in app_list:
if application["slug"] == application_slug.lower():
return application
return None
def get_application_id(self, application_slug: str) -> Optional[int]:
"""
Get the app id from api-router for this remote application
Args:
application_slug: Slug of the application
Returns:
id or None when the application was not found remotely
"""
application = self.__get_application_by_slug(application_slug)
if application is not None:
return int(application["id"])
return None
def get_application_config(self, application_slug: str) -> Optional[AppConfigType]:
"""
Get the app config for the relevant app_version from api-router for this remote application.
For the author of the application this is the app_config linked to the highest numbered app_version (which can
be disabled, because it is not yet published),
For all the other users this will be the highest versioned enabled app_version
Args:
application_slug: Slug of the application
Returns:
App config or None when the application was not found remotely
"""
application = self.__get_application_by_slug(application_slug)
if application is not None:
app_versions = self.__qne_client.app_versions_application(str(application["url"]))
if len(app_versions) > 0:
app_version = app_versions[0]
app_config = self.__qne_client.app_config_appversion(str(app_version["url"]))
if app_config["app_version"] is not None:
return app_config
return None
def validate_application(self, application_slug: str) -> ErrorDictType:
"""
Function that checks if:
- The application is valid by validating if it exists remotely
Args:
application_slug: Slug of the application
returns:
Returns empty list when all validations passes
Returns dict containing error messages of the validations that failed
"""
error_dict: ErrorDictType = utils.get_empty_errordict()
if None is self.__get_application_by_slug(application_slug):
error_dict["error"].append(f"Application '{application_slug}' does not exist, is not enabled, "
f"is not public or is owned by someone else")
return error_dict
def delete_experiment(self, experiment_id: Optional[str]) -> bool:
"""
Delete the remote experiment for the authenticated user
Args:
experiment_id: is the experiment to delete.
Returns:
False when not found 404 or another error occurs
True when no error was raised. The experiment is deleted (including all assets/round_sets/results)
"""
if experiment_id is not None and experiment_id.isdigit():
try:
self.__qne_client.destroy_experiment(experiment_id)
except ErrorResponse:
return False
return True
return False
def experiments_list(self) -> List[ExperimentType]:
"""
Get the remote experiments for the authenticated user from api-router
"""
experiment_list = self.__qne_client.list_experiments()
for experiment in experiment_list:
app_version = self.__qne_client.retrieve_appversion(str(experiment["app_version"]))
application = self.__qne_client.retrieve_application(str(app_version["application"]))
experiment["name"] = application["slug"]
return experiment_list
def __create_experiment(self, application_slug: str, app_version: str) -> ExperimentType:
"""
Create and send an experiment object to api-router
"""
experiment_to_create = self.__create_experiment_type(application_slug, app_version)
experiment = self.__qne_client.create_experiment(experiment_to_create)
return experiment
def __create_experiment_type(self, application_slug: str, app_version: str) -> ExperimentType:
"""
Create and return an experiment object for sending to api-router
"""
user = self.__qne_client.retrieve_user()
if user is None:
raise ExperimentValueError("Current logged in user not a valid remote user")
application = self.__get_application_by_slug(application_slug)
if application is None:
raise ExperimentValueError(f"Application in experiment data '{application_slug}' is not a remote "
f"application")
app_config = self.get_application_config(application_slug)
if app_config is None:
raise ExperimentValueError(f"Application '{application_slug}' does not have a valid application "
f"configuration")
app_version_url = str(app_config["app_version"])
if app_version != app_version_url:
raise ExperimentValueError(f"App version in experiment data '{app_version}' is not equal to the "
f"current remote version of the application '{app_version_url}'")
user_url = user["url"]
experiment: ExperimentType = {
"app_version": app_version_url,
"personal_note": "Experiment created by qne-adk",
"is_marked": False,
"owner": user_url
}
return experiment
def __create_asset(self, experiment_asset: AssetType, experiment_url: str) -> AssetType:
"""
Create and send an asset object to api-router
"""
asset_to_create = self.__translate_asset(experiment_asset, experiment_url)
asset = self.__qne_client.create_asset(asset_to_create)
return asset
@staticmethod
def __translate_asset(asset_to_create: AssetType, experiment_url: str) -> AssetType:
"""
Because of differences in channel definition for api-router networks and asset networks we need a fix to
translate these (local) channels to a format that the backend expects.
Also the asset needs an experiment entry with the experiment url
"""
asset_network = cast(assetNetworkType, asset_to_create["network"])
experiment_channels = asset_network["channels"]
new_channels = []
for channel in experiment_channels:
new_channel = {"node_slug1": channel["node1"],
"node_slug2": channel["node2"],
"parameters": channel["parameters"]}
new_channels.append(new_channel)
asset_network["channels"] = new_channels
asset_to_create["experiment"] = experiment_url
return asset_to_create
def __create_round_set(self, asset_url: str, number_of_rounds: int) -> RoundSetType:
"""
Create and send a round set object to api-router
"""
round_set_to_create = self.__create_round_set_type(asset_url, number_of_rounds)
round_set = self.__qne_client.create_roundset(round_set_to_create)
return round_set
@staticmethod
def __create_round_set_type(asset_url: str, number_of_rounds: int) -> RoundSetType:
"""
Create and return a round set object for sending to api-router
"""
round_set: RoundSetType = {
"number_of_rounds": number_of_rounds,
"status": "NEW",
"input": asset_url
}
return round_set
def run_experiment(self, experiment_data: ExperimentDataType) -> Tuple[str, str]:
"""
Send the objects to api-router to run this experiment. The steps are:
1. add an experiment for this app_version
2. add an asset object that holds the app config
3. add a round set for number_of_rounds rounds to run the experiment
Args:
experiment_data: which holds the experiment data needed to generate the experiment
"""
application_slug = experiment_data["meta"]["application"]["slug"]
app_version = experiment_data["meta"]["application"]["app_version"]
experiment = self.__create_experiment(application_slug, app_version)
experiment_asset = experiment_data["asset"]
asset = self.__create_asset(experiment_asset, str(experiment["url"]))
number_of_rounds = experiment_data["meta"]["number_of_rounds"]
round_set = self.__create_round_set(str(asset["url"]), number_of_rounds)
return str(round_set["url"]), str(experiment["id"])
def get_results(self, round_set_url: str, block: bool = False, timeout: Optional[int] = None,
wait: int = 2) -> Optional[List[ResultType]]:
"""
For a job running, get the results. When block is True, block the call for 'timeout' seconds until the result
is received
Args:
round_set_url: which holds the results and status of the run of the remote experiment
block: When True retry for a number of seconds
timeout: retry for this number of seconds to get the result (None = no timeout)
wait: number of seconds to wait between calls to api router for the results
"""
start_time = time.time()
round_set = self.__qne_client.retrieve_roundset(round_set_url)
status = round_set["status"]
while block and status not in QNE_JOB_FINAL_STATES:
elapsed_time = time.time() - start_time
if timeout is not None and elapsed_time > timeout:
raise JobTimeoutError(f"Failed getting result for round set '{round_set_url}': timeout reached. "
f"Try again later using command 'experiment results'")
time.sleep(wait)
round_set = self.__qne_client.retrieve_roundset(round_set_url)
status = round_set["status"]
return self.__get_results(round_set)
def __get_results(self, round_set: RoundSetType) -> Optional[List[ResultType]]:
"""
For a completed job, get the results
Args:
round_set: which holds the results and status of the run of the remote experiment
"""
status = round_set["status"]
round_set_url = round_set["url"]
if status in QNE_JOB_FINAL_STATES:
if status in QNE_JOB_SUCCESS_STATES:
# round_set completed
round_set_url = str(round_set["url"])
results = self.__qne_client.results_roundset(round_set_url)
result_list = []
for result in results:
round_result = ResultGenerator.generate(round_set,
cast(int, result["round_number"]),
cast(round_resultType, result["round_result"]),
cast(instructionsType, result["instructions"]),
cast(cumulative_resultType, result["cumulative_result"]))
result_list.append(round_result)
return result_list
# round set failed
raise ExperimentFailed(f"Experiment for round set '{round_set_url}' failed. No results available")
return None
def __get_final_result(self, round_set: RoundSetType) -> Optional[FinalResultType]:
"""
For a completed job, get the final result.
Args:
round_set: which holds the results and status of the run of the remote experiment
"""
status = round_set["status"]
round_set_url = str(round_set["url"])
if status in QNE_JOB_FINAL_STATES:
if status in QNE_JOB_SUCCESS_STATES:
# round_set completed
final_result: FinalResultType = {}
try:
final_result = self.__qne_client.final_results_roundset(round_set_url)
except ErrorResponse:
# leave final_result empty
pass
return final_result
# round set failed
raise ExperimentFailed(f"Experiment for round set '{round_set_url}' failed. No results available")
return None
def list_networks(self) -> List[Dict[str, Any]]:
"""
Function to list the networks
Returns:
A list of networks
"""
return self.__qne_client.list_networks()
@staticmethod
def __write_network_data(entity_name: str, network_data: GenericNetworkData) -> None:
"""
Writes the json file specified by the parameter 'entity_name'.
entity_name can be 'networks', 'channels', 'nodes', 'templates'
Args:
entity_name: The type of the data to read
network_data: The data to write
"""
file_name = Path(BASE_DIR) / 'networks' / f'{entity_name}.json'
utils.write_json_file(file_name, network_data)
@staticmethod
def __read_network_data(entity_name: str) -> GenericNetworkData:
""" TODO: In local api the network is already read. It may be more efficient to use these values """
file_name = Path(BASE_DIR) / 'networks' / f'{entity_name}.json'
return cast(GenericNetworkData, utils.read_json_file(file_name))
def __update_networks_networks(self, overwrite: bool) -> None:
"""
Get the remote networks and update the local network definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "networks"
networks_json = {}
network_json = {} if overwrite else self.__read_network_data(entity)[entity]
networks = self.__qne_client.list_networks()
for network in networks:
network_type_json: Dict[str, Union[str, List[str]]] = {}
network_type = self.__qne_client.retrieve_network(str(network["url"]))
network_type_json["name"] = str(network_type["name"])
network_type_json["slug"] = str(network_type["slug"])
network_type_channels: List[str] = []
for channel in cast(List[ChannelType], network_type["channels"]):
network_type_channels.append(str(channel["slug"]))
network_type_json["channels"] = network_type_channels
network_json[network["slug"]] = network_type_json
networks_json[entity] = network_json
self.__write_network_data(entity, networks_json)
@staticmethod
def __update_list(list_of_dict: List[Dict[str, Any]], dict_item: Dict[str, Any], overwrite: bool) -> None:
if overwrite:
list_of_dict.append(dict_item)
else:
# overwrite if it existed, otherwise append
found = False
for i, _ in enumerate(list_of_dict):
if list_of_dict[i]["slug"] == dict_item["slug"]:
list_of_dict[i] = dict_item
found = True
break
if not found:
list_of_dict.append(dict_item)
def __update_networks_channels(self, overwrite: bool) -> None:
"""
Get the remote channels and update the local channel definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "channels"
channels_json = {}
channel_json = [] if overwrite else self.__read_network_data(entity)[entity]
channels = self.__qne_client.list_channels()
for channel in channels:
channel_type_json: Dict[str, Union[str, List[str]]] = {"slug": str(channel["slug"])}
node = self.__qne_client.retrieve_node(str(channel["node1"]))
channel_type_json["node1"] = str(node["slug"])
node = self.__qne_client.retrieve_node(str(channel["node2"]))
channel_type_json["node2"] = str(node["slug"])
channel_parameters: List[str] = []
for parameter in cast(parametersType, channel["parameters"]):
template = self.__qne_client.retrieve_template(parameter)
channel_parameters.append(str(template["slug"]))
channel_type_json["parameters"] = channel_parameters
self.__update_list(channel_json, channel_type_json, overwrite)
channels_json[entity] = channel_json
self.__write_network_data(entity, channels_json)
def __update_networks_nodes(self, overwrite: bool) -> None:
"""
Get the remote nodes and update the local node definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "nodes"
nodes_json = {}
node_json = [] if overwrite else self.__read_network_data(entity)[entity]
nodes = self.__qne_client.list_nodes()
for node in nodes:
node_type_json = {"name": node["name"], "slug": node["slug"],
"coordinates": {"latitude": cast(coordinatesType, node["coordinates"])["latitude"],
"longitude": cast(coordinatesType, node["coordinates"])["longitude"]}}
node_parameters: List[str] = []
for parameter in cast(parametersType, node["node_parameters"]):
template = self.__qne_client.retrieve_template(parameter)
node_parameters.append(str(template["slug"]))
node_type_json["node_parameters"] = node_parameters
node_type_json["number_of_qubits"] = node["number_of_qubits"]
qubit_parameters: List[str] = []
for parameter in cast(parametersType, node["qubit_parameters"]):
template = self.__qne_client.retrieve_template(parameter)
qubit_parameters.append(str(template["slug"]))
node_type_json["qubit_parameters"] = qubit_parameters
self.__update_list(node_json, node_type_json, overwrite)
nodes_json[entity] = node_json
self.__write_network_data(entity, nodes_json)
def __update_networks_templates(self, overwrite: bool) -> None:
"""
Get the remote templates and update the local template definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "templates"
templates_json = {}
template_json = [] if overwrite else self.__read_network_data(entity)[entity]
templates = self.__qne_client.list_templates()
for template in templates:
del template["id"]
del template["url"]
del template["description"]
template_values = []
for value in cast(listValuesType, template["values"]):
value["unit"] = ""
value["scale_value"] = 1.0
template_values.append(value)
self.__update_list(template_json, template, overwrite)
templates_json[entity] = template_json
self.__write_network_data(entity, templates_json)
def update_networks(self, overwrite: bool) -> bool:
"""
Get the remote networks and update the local network definitions
Args:
overwrite: When True replace the local files, otherwise try to merge (keeping the new local network
entities)
Returns:
success (True) or not (False)
"""
try:
self.__update_networks_networks(overwrite)
self.__update_networks_channels(overwrite)
self.__update_networks_nodes(overwrite)
self.__update_networks_templates(overwrite)
except Exception:
return False
return True
|
from pathlib import Path
from typing import Any, cast, Dict, List, Optional, Tuple, Union
import time
from apistar.exceptions import ErrorResponse
from adk import utils
from adk.api.qne_client import QneFrontendClient
from adk.exceptions import (ApiClientError, ApplicationNotFound, ExperimentFailed, ExperimentValueError,
JobTimeoutError)
from adk.generators.result_generator import ResultGenerator
from adk.managers.config_manager import ConfigManager
from adk.managers.auth_manager import AuthManager
from adk.managers.resource_manager import ResourceManager
from adk.type_aliases import (AppConfigType, AppResultType, AppSourceFilesType, AppSourceType, AppVersionType,
ApplicationType, ApplicationDataType, AssetType, assetNetworkType, ErrorDictType,
ExperimentType, FinalResultType, GenericNetworkData, ExperimentDataType, ResultType,
RoundSetType, round_resultType, cumulative_resultType, instructionsType, ChannelType,
parametersType, coordinatesType, listValuesType)
from adk.settings import BASE_DIR
class JobStatus:
""" The status of the qne_job can be 'NEW', 'RUNNING', 'COMPLETE', 'FAILED' """
NEW: str = 'NEW'
RUNNING: str = 'RUNNING'
COMPLETE: str = 'COMPLETE'
FAILED: str = 'FAILED'
# The final status of the qne_job, either successful or not
QNE_JOB_FINAL_STATES = (
JobStatus.COMPLETE,
JobStatus.FAILED,
)
# The status of the qne_job when it is executed successfully
QNE_JOB_SUCCESS_STATES = (
JobStatus.COMPLETE,
)
class RemoteApi:
"""
Defines the methods used for remote communication with api-router
"""
def __init__(self, config_manager: ConfigManager) -> None:
self.__config_manager: ConfigManager = config_manager
config_dir = self.__config_manager.get_config_dir()
self.auth_manager: AuthManager = AuthManager(config_dir,
self.__login_user, self.__login_anonymous, self.__logout_user)
self.__qne_client = QneFrontendClient(self.auth_manager)
self.__base_uri = self.auth_manager.get_active_host()
self.__username: Optional[str] = self.auth_manager.get_username(self.__base_uri)
self.__password: Optional[str] = self.auth_manager.get_password(self.__base_uri)
self.__refresh_token: Optional[str] = self.auth_manager.load_token(self.__base_uri)
self.__resource_manager = ResourceManager()
def __login_user(self, username: str, password: str, host: str) -> str:
self.__refresh_token = None
self.__base_uri = host
self.__username = username
self.__password = password
self.__refresh_token = self.__qne_client.login(username, password, host)
return self.__refresh_token
def __login_anonymous(self) -> str:
pass
def login(self, username: str, password: str, host: str) -> None:
"""
Login the user on host (uri) based upon the username/password values given
"""
self.auth_manager.login(username, password, host)
def __logout_user(self, host: str) -> None:
self.__qne_client.logout(host)
def logout(self, host: Optional[str]) -> bool:
"""
Logout the user by deleting the entry in the resource
"""
if not self.__qne_client.is_logged_in():
return False
self.auth_manager.logout(host)
return True
def get_active_host(self) -> str:
"""
Get the host as base_uri the user is logged in
"""
return self.__base_uri
def list_applications(self) -> List[ApplicationType]:
"""
Get the list of applications (public and enabled) from the api-router
Returns:
the list of applications
"""
return self.__qne_client.list_applications()
def delete_application(self, application_id: Optional[str]) -> bool:
"""
Delete the remote application for the authenticated user
Args:
application_id is the application to delete.
Returns:
False when not found 404 or another error
True when no error was raised
"""
if application_id is not None and application_id.isdigit():
try:
self.__qne_client.destroy_application(application_id)
except ErrorResponse:
return False
# application deleted successfully
return True
return False
def clone_application(self, application_name: str, new_application_name: str,
new_application_path: Path) -> None:
"""
Clone the application by copying the required files in the local application structure.
Args:
application_name: name of the application to be cloned
new_application_name: name of the application after cloning
new_application_path: location of application files
"""
application = self.__get_application_by_slug(application_name)
if application is None:
raise ApplicationNotFound(new_application_name)
new_application_name = new_application_name.lower()
local_app_src_path = new_application_path / 'src'
local_app_config_path = new_application_path / 'config'
local_app_src_path.mkdir(parents=True, exist_ok=True)
local_app_config_path.mkdir(parents=True, exist_ok=True)
app_config = self.__qne_client.app_config_application(str(application["url"]))
utils.write_json_file(local_app_config_path / 'network.json', app_config["network"])
utils.write_json_file(local_app_config_path / 'application.json', app_config["application"])
app_result = self.__qne_client.app_result_application(str(application["url"]))
utils.write_json_file(local_app_config_path / 'result.json',
{"round_result_view": app_result["round_result_view"],
"cumulative_result_view": app_result["cumulative_result_view"],
"final_result_view": app_result["final_result_view"]
})
app_source = self.__qne_client.app_source_application(str(application["url"]))
# Create python files from tarball
self.__resource_manager.generate_resources(self.__qne_client, app_source, new_application_path)
# Manifest.json configuration
utils.write_json_file(new_application_path / 'manifest.json', utils.get_default_manifest(new_application_name))
self.__config_manager.add_application(application_name=new_application_name,
application_path=new_application_path)
def publish_application(self, application_data: ApplicationDataType) -> bool:
"""
Publish the application by enabling the AppVersion.
Args:
application_data: application data from manifest.json
Returns:
True when published successfully, otherwise False
"""
if "app_version" in application_data["remote"]:
# check if application exists
application = self.__get_application(application_data)
# application must exist
if application is not None:
# update all remote info with latest data
application_data["remote"]["application"] = application["url"]
application_data["remote"]["application_id"] = application["id"]
application_data["remote"]["slug"] = application["slug"]
app_version = self.__partial_update_app_version(application_data, application)
application_data["remote"]["app_version"]["app_version"] = app_version["url"]
application_data["remote"]["app_version"]["app_config"] = app_version["app_config"]
application_data["remote"]["app_version"]["app_result"] = app_version["app_result"]
application_data["remote"]["app_version"]["app_source"] = app_version["app_source"]
application_data["remote"]["app_version"]["enabled"] = not app_version["is_disabled"]
application_data["remote"]["app_version"]["version"] = app_version["version"]
return True
return False
def upload_application(self, # pylint: disable=R0914
application_path: Path,
application_data: ApplicationDataType,
application_config: AppConfigType,
application_result: AppResultType,
application_source: List[str]) -> ApplicationDataType:
"""
Upload the application to the remote server.
Args:
application_path: location of application files
application_data: application data from manifest.json
application_config: application configuration structure
application_result: application result structure
application_source: source files for application
Returns:
True when uploaded successfully, otherwise False
"""
application = self.__get_application(application_data)
if application is None:
try:
# create application data structure for remote
application_data["remote"] = {"application": "",
"application_id": 0,
"slug": "",
"app_version": {
"enabled": False,
"version": 0,
"app_version": "",
"app_config": "",
"app_result": "",
"app_source": ""
}
}
# create Application
application = self.__create_application(application_data)
except Exception as e:
# rethrow exception
raise e
if application is not None:
# update all remote info with latest data
application_data["remote"]["application"] = application["url"]
application_data["remote"]["application_id"] = application["id"]
application_data["remote"]["slug"] = application["slug"]
app_version: AppVersionType = {}
try:
# create AppVersion
app_version = self.__create_app_version(application)
application_data["remote"]["app_version"]["app_version"] = app_version["url"]
application_data["remote"]["app_version"]["enabled"] = not app_version["is_disabled"]
application_data["remote"]["app_version"]["version"] = app_version["version"]
# new app_version reset registered application references
application_data["remote"]["app_version"]["app_config"] = ''
application_data["remote"]["app_version"]["app_result"] = ''
application_data["remote"]["app_version"]["app_source"] = ''
except ApiClientError as e:
if "Please complete" in str(e) and "app_version" in application_data["remote"]["app_version"] and \
application_data["remote"]["app_version"]["app_version"]:
# The (incomplete) AppVersion already existed, use this one to connect the not yet registered objects
app_version["url"] = application_data["remote"]["app_version"]["app_version"]
else:
# for now rethrow all other exceptions
raise e
try:
if not application_data["remote"]["app_version"]["app_config"]:
# create AppConfig
app_config = self.__create_app_config(application_data, application_config, app_version)
application_data["remote"]["app_version"]["app_config"] = app_config["url"]
if not application_data["remote"]["app_version"]["app_result"]:
# create AppResult
app_result = self.__create_app_result(application_result, app_version)
application_data["remote"]["app_version"]["app_result"] = app_result["url"]
if not application_data["remote"]["app_version"]["app_source"]:
# create AppSource
app_source = self.__create_app_source(application_data, app_version,
application_source, application_path)
application_data["remote"]["app_version"]["app_source"] = app_source["url"]
# Update application when AppVersion and its components is uploaded
application_to_update = self.__create_application_type(application_data)
self.__qne_client.partial_update_application(str(application["id"]), application_to_update)
except Exception as e:
# Something went wrong, delete the (just created) AppVersion (currently not supported by api-router)
# app_version_to_delete: AppVersionType = {
# "application": application["url"],
# "version": app_version["version"]
# }
# app_version = self.__qne_client.delete_app_version(app_version_to_delete)
# for now rethrow exception
raise e
return application_data
def __create_application(self, application_data: ApplicationDataType) -> ApplicationType:
"""
Create and send an Application object to api-router
"""
application_to_create = self.__create_application_type(application_data)
application = self.__qne_client.create_application(application_to_create)
return application
@staticmethod
def __create_application_type(application_data: ApplicationDataType) -> ApplicationType:
application_name = application_data["application"]["name"] if "name" in application_data["application"] else ""
application_description =\
application_data["application"]["description"] if "description" in application_data["application"] else ""
application_author =\
application_data["application"]["author"] if "author" in application_data["application"] else ""
application_email =\
application_data["application"]["email"] if "email" in application_data["application"] else ""
application: ApplicationType = {
"name": application_name,
"description": application_description,
"author": application_author,
"email": application_email
}
return application
def __create_app_version(self, application: ApplicationType, version: int = 1) -> AppVersionType:
"""
Create and send an AppVersion object to api-router
"""
app_version_to_create = self.__create_app_version_type(application, version)
app_version = self.__qne_client.create_app_version(app_version_to_create)
return app_version
@staticmethod
def __create_app_version_type(application: ApplicationType, version: int = 1) -> AppVersionType:
app_version: AppVersionType = {
"application": application["url"],
"is_disabled": True
}
return app_version
def __create_app_config(self, application_data: ApplicationDataType, application_config: AppConfigType,
app_version: AppVersionType) -> AppConfigType:
"""
Create and send an AppConfig object to api-router
"""
app_config_to_create = self.__create_app_config_type(application_data, application_config, app_version)
app_config = self.__qne_client.create_app_config(app_config_to_create)
return app_config
@staticmethod
def __create_app_config_type(application_data: ApplicationDataType, application_config: AppConfigType,
app_version: AppVersionType) -> AppConfigType:
multi_round = application_data["application"]["multi_round"] if \
"multi_round" in application_data["application"] else False
app_config: AppConfigType = {
"app_version": app_version["url"],
"network": application_config["network"],
"application": application_config["application"],
"multi_round": multi_round
}
return app_config
def __create_app_result(self, application_result: AppResultType, app_version: AppVersionType) -> AppResultType:
"""
Create and send an AppResult object to api-router
"""
app_result_to_create = self.__create_app_result_type(application_result, app_version)
app_result = self.__qne_client.create_app_result(app_result_to_create)
return app_result
@staticmethod
def __create_app_result_type(application_result: AppResultType, app_version: AppVersionType) -> AppResultType:
app_result: AppResultType = {
"app_version": app_version["url"],
"round_result_view": application_result["round_result_view"],
"cumulative_result_view": application_result["cumulative_result_view"],
"final_result_view": application_result["final_result_view"]
}
return app_result
def __create_app_source(self, application_data: ApplicationDataType, app_version: AppVersionType,
application_source: List[str], application_path: Path) -> AppSourceType:
"""
Create and send an AppSource object to api-router
"""
# create the resource
resource_path, resource_file = self.__resource_manager.prepare_resources(application_data, application_path,
application_source)
# send the resource
with open(resource_path, 'rb') as tarball:
app_source_files: AppSourceFilesType = {
"source_files": (resource_file, tarball), # pylint: disable=R1732
"app_version": (None, app_version['url']),
"output_parser": (None, '{}')
}
app_source = self.__qne_client.create_app_source(app_source_files)
# delete the resource
self.__resource_manager.delete_resources(application_data, application_path)
return app_source
@staticmethod
def __partial_update_version_type(application: ApplicationType) -> AppVersionType:
app_version: AppVersionType = {
"application": application["url"],
'is_disabled': False
}
return app_version
def __partial_update_app_version(self, application_data: ApplicationDataType,
application: ApplicationType) -> AppVersionType:
"""
Create and send an AppVersion object to api-router for updating it
"""
app_version_to_update = self.__partial_update_version_type(application)
app_version_url = application_data["remote"]["app_version"]["app_version"]
app_version = self.__qne_client.partial_update_app_version(app_version_url, app_version_to_update)
return app_version
def __get_application(self, application_data: ApplicationDataType) -> Optional[ApplicationType]:
"""
Get the application object from api-router for this remote application
Args:
application_data: application information from manifest
Returns:
Application object or None when the application was not found remotely
"""
application = None
if "application_id" in application_data["remote"]:
application_id = str(application_data["remote"]["application_id"])
application = self.__get_application_by_id(application_id)
if application is None and "slug" in application_data["remote"]:
application_slug = str(application_data["remote"]["slug"])
application = self.__get_application_by_slug(application_slug)
return application
def __get_application_by_id(self, application_id: Optional[str]) -> Optional[ApplicationType]:
"""
Get the application object from api-router for this remote application
Args:
application_id: id of the application
Returns:
Application object or None when the application was not found remotely
"""
if application_id is not None and application_id.isdigit():
app_list = self.list_applications()
for application in app_list:
if application["id"] == int(application_id):
return application
return None
def __get_application_by_slug(self, application_slug: str) -> Optional[ApplicationType]:
"""
Get the application object from api-router for this remote application.
Returned applications are not disabled and either are public with enabled versions or are owned by user
Args:
application_slug: Slug of the application
Returns:
Application object or None when the application was not found remotely
"""
app_list = self.list_applications()
for application in app_list:
if application["slug"] == application_slug.lower():
return application
return None
def get_application_id(self, application_slug: str) -> Optional[int]:
"""
Get the app id from api-router for this remote application
Args:
application_slug: Slug of the application
Returns:
id or None when the application was not found remotely
"""
application = self.__get_application_by_slug(application_slug)
if application is not None:
return int(application["id"])
return None
def get_application_config(self, application_slug: str) -> Optional[AppConfigType]:
"""
Get the app config for the relevant app_version from api-router for this remote application.
For the author of the application this is the app_config linked to the highest numbered app_version (which can
be disabled, because it is not yet published),
For all the other users this will be the highest versioned enabled app_version
Args:
application_slug: Slug of the application
Returns:
App config or None when the application was not found remotely
"""
application = self.__get_application_by_slug(application_slug)
if application is not None:
app_versions = self.__qne_client.app_versions_application(str(application["url"]))
if len(app_versions) > 0:
app_version = app_versions[0]
app_config = self.__qne_client.app_config_appversion(str(app_version["url"]))
if app_config["app_version"] is not None:
return app_config
return None
def validate_application(self, application_slug: str) -> ErrorDictType:
"""
Function that checks if:
- The application is valid by validating if it exists remotely
Args:
application_slug: Slug of the application
returns:
Returns empty list when all validations passes
Returns dict containing error messages of the validations that failed
"""
error_dict: ErrorDictType = utils.get_empty_errordict()
if None is self.__get_application_by_slug(application_slug):
error_dict["error"].append(f"Application '{application_slug}' does not exist, is not enabled, "
f"is not public or is owned by someone else")
return error_dict
def delete_experiment(self, experiment_id: Optional[str]) -> bool:
"""
Delete the remote experiment for the authenticated user
Args:
experiment_id: is the experiment to delete.
Returns:
False when not found 404 or another error occurs
True when no error was raised. The experiment is deleted (including all assets/round_sets/results)
"""
if experiment_id is not None and experiment_id.isdigit():
try:
self.__qne_client.destroy_experiment(experiment_id)
except ErrorResponse:
return False
return True
return False
def experiments_list(self) -> List[ExperimentType]:
"""
Get the remote experiments for the authenticated user from api-router
"""
experiment_list = self.__qne_client.list_experiments()
for experiment in experiment_list:
app_version = self.__qne_client.retrieve_appversion(str(experiment["app_version"]))
application = self.__qne_client.retrieve_application(str(app_version["application"]))
experiment["name"] = application["slug"]
return experiment_list
def __create_experiment(self, application_slug: str, app_version: str) -> ExperimentType:
"""
Create and send an experiment object to api-router
"""
experiment_to_create = self.__create_experiment_type(application_slug, app_version)
experiment = self.__qne_client.create_experiment(experiment_to_create)
return experiment
def __create_experiment_type(self, application_slug: str, app_version: str) -> ExperimentType:
"""
Create and return an experiment object for sending to api-router
"""
user = self.__qne_client.retrieve_user()
if user is None:
raise ExperimentValueError("Current logged in user not a valid remote user")
application = self.__get_application_by_slug(application_slug)
if application is None:
raise ExperimentValueError(f"Application in experiment data '{application_slug}' is not a remote "
f"application")
app_config = self.get_application_config(application_slug)
if app_config is None:
raise ExperimentValueError(f"Application '{application_slug}' does not have a valid application "
f"configuration")
app_version_url = str(app_config["app_version"])
if app_version != app_version_url:
raise ExperimentValueError(f"App version in experiment data '{app_version}' is not equal to the "
f"current remote version of the application '{app_version_url}'")
user_url = user["url"]
experiment: ExperimentType = {
"app_version": app_version_url,
"personal_note": "Experiment created by qne-adk",
"is_marked": False,
"owner": user_url
}
return experiment
def __create_asset(self, experiment_asset: AssetType, experiment_url: str) -> AssetType:
"""
Create and send an asset object to api-router
"""
asset_to_create = self.__translate_asset(experiment_asset, experiment_url)
asset = self.__qne_client.create_asset(asset_to_create)
return asset
@staticmethod
def __translate_asset(asset_to_create: AssetType, experiment_url: str) -> AssetType:
"""
Because of differences in channel definition for api-router networks and asset networks we need a fix to
translate these (local) channels to a format that the backend expects.
Also the asset needs an experiment entry with the experiment url
"""
asset_network = cast(assetNetworkType, asset_to_create["network"])
experiment_channels = asset_network["channels"]
new_channels = []
for channel in experiment_channels:
new_channel = {"node_slug1": channel["node1"],
"node_slug2": channel["node2"],
"parameters": channel["parameters"]}
new_channels.append(new_channel)
asset_network["channels"] = new_channels
asset_to_create["experiment"] = experiment_url
return asset_to_create
def __create_round_set(self, asset_url: str, number_of_rounds: int) -> RoundSetType:
"""
Create and send a round set object to api-router
"""
round_set_to_create = self.__create_round_set_type(asset_url, number_of_rounds)
round_set = self.__qne_client.create_roundset(round_set_to_create)
return round_set
@staticmethod
def __create_round_set_type(asset_url: str, number_of_rounds: int) -> RoundSetType:
"""
Create and return a round set object for sending to api-router
"""
round_set: RoundSetType = {
"number_of_rounds": number_of_rounds,
"status": "NEW",
"input": asset_url
}
return round_set
def run_experiment(self, experiment_data: ExperimentDataType) -> Tuple[str, str]:
"""
Send the objects to api-router to run this experiment. The steps are:
1. add an experiment for this app_version
2. add an asset object that holds the app config
3. add a round set for number_of_rounds rounds to run the experiment
Args:
experiment_data: which holds the experiment data needed to generate the experiment
"""
application_slug = experiment_data["meta"]["application"]["slug"]
app_version = experiment_data["meta"]["application"]["app_version"]
experiment = self.__create_experiment(application_slug, app_version)
experiment_asset = experiment_data["asset"]
asset = self.__create_asset(experiment_asset, str(experiment["url"]))
number_of_rounds = experiment_data["meta"]["number_of_rounds"]
round_set = self.__create_round_set(str(asset["url"]), number_of_rounds)
return str(round_set["url"]), str(experiment["id"])
def get_results(self, round_set_url: str, block: bool = False, timeout: Optional[int] = None,
wait: int = 2) -> Optional[List[ResultType]]:
"""
For a job running, get the results. When block is True, block the call for 'timeout' seconds until the result
is received
Args:
round_set_url: which holds the results and status of the run of the remote experiment
block: When True retry for a number of seconds
timeout: retry for this number of seconds to get the result (None = no timeout)
wait: number of seconds to wait between calls to api router for the results
"""
start_time = time.time()
round_set = self.__qne_client.retrieve_roundset(round_set_url)
status = round_set["status"]
while block and status not in QNE_JOB_FINAL_STATES:
elapsed_time = time.time() - start_time
if timeout is not None and elapsed_time > timeout:
raise JobTimeoutError(f"Failed getting result for round set '{round_set_url}': timeout reached. "
f"Try again later using command 'experiment results'")
time.sleep(wait)
round_set = self.__qne_client.retrieve_roundset(round_set_url)
status = round_set["status"]
return self.__get_results(round_set)
def __get_results(self, round_set: RoundSetType) -> Optional[List[ResultType]]:
"""
For a completed job, get the results
Args:
round_set: which holds the results and status of the run of the remote experiment
"""
status = round_set["status"]
round_set_url = round_set["url"]
if status in QNE_JOB_FINAL_STATES:
if status in QNE_JOB_SUCCESS_STATES:
# round_set completed
round_set_url = str(round_set["url"])
results = self.__qne_client.results_roundset(round_set_url)
result_list = []
for result in results:
round_result = ResultGenerator.generate(round_set,
cast(int, result["round_number"]),
cast(round_resultType, result["round_result"]),
cast(instructionsType, result["instructions"]),
cast(cumulative_resultType, result["cumulative_result"]))
result_list.append(round_result)
return result_list
# round set failed
raise ExperimentFailed(f"Experiment for round set '{round_set_url}' failed. No results available")
return None
def __get_final_result(self, round_set: RoundSetType) -> Optional[FinalResultType]:
"""
For a completed job, get the final result.
Args:
round_set: which holds the results and status of the run of the remote experiment
"""
status = round_set["status"]
round_set_url = str(round_set["url"])
if status in QNE_JOB_FINAL_STATES:
if status in QNE_JOB_SUCCESS_STATES:
# round_set completed
final_result: FinalResultType = {}
try:
final_result = self.__qne_client.final_results_roundset(round_set_url)
except ErrorResponse:
# leave final_result empty
pass
return final_result
# round set failed
raise ExperimentFailed(f"Experiment for round set '{round_set_url}' failed. No results available")
return None
def list_networks(self) -> List[Dict[str, Any]]:
"""
Function to list the networks
Returns:
A list of networks
"""
return self.__qne_client.list_networks()
@staticmethod
def __write_network_data(entity_name: str, network_data: GenericNetworkData) -> None:
"""
Writes the json file specified by the parameter 'entity_name'.
entity_name can be 'networks', 'channels', 'nodes', 'templates'
Args:
entity_name: The type of the data to read
network_data: The data to write
"""
file_name = Path(BASE_DIR) / 'networks' / f'{entity_name}.json'
utils.write_json_file(file_name, network_data)
@staticmethod
def __read_network_data(entity_name: str) -> GenericNetworkData:
""" TODO: In local api the network is already read. It may be more efficient to use these values """
file_name = Path(BASE_DIR) / 'networks' / f'{entity_name}.json'
return cast(GenericNetworkData, utils.read_json_file(file_name))
def __update_networks_networks(self, overwrite: bool) -> None:
"""
Get the remote networks and update the local network definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "networks"
networks_json = {}
network_json = {} if overwrite else self.__read_network_data(entity)[entity]
networks = self.__qne_client.list_networks()
for network in networks:
network_type_json: Dict[str, Union[str, List[str]]] = {}
network_type = self.__qne_client.retrieve_network(str(network["url"]))
network_type_json["name"] = str(network_type["name"])
network_type_json["slug"] = str(network_type["slug"])
network_type_channels: List[str] = []
for channel in cast(List[ChannelType], network_type["channels"]):
network_type_channels.append(str(channel["slug"]))
network_type_json["channels"] = network_type_channels
network_json[network["slug"]] = network_type_json
networks_json[entity] = network_json
self.__write_network_data(entity, networks_json)
@staticmethod
def __update_list(list_of_dict: List[Dict[str, Any]], dict_item: Dict[str, Any], overwrite: bool) -> None:
if overwrite:
list_of_dict.append(dict_item)
else:
# overwrite if it existed, otherwise append
found = False
for i, _ in enumerate(list_of_dict):
if list_of_dict[i]["slug"] == dict_item["slug"]:
list_of_dict[i] = dict_item
found = True
break
if not found:
list_of_dict.append(dict_item)
def __update_networks_channels(self, overwrite: bool) -> None:
"""
Get the remote channels and update the local channel definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "channels"
channels_json = {}
channel_json = [] if overwrite else self.__read_network_data(entity)[entity]
channels = self.__qne_client.list_channels()
for channel in channels:
channel_type_json: Dict[str, Union[str, List[str]]] = {"slug": str(channel["slug"])}
node = self.__qne_client.retrieve_node(str(channel["node1"]))
channel_type_json["node1"] = str(node["slug"])
node = self.__qne_client.retrieve_node(str(channel["node2"]))
channel_type_json["node2"] = str(node["slug"])
channel_parameters: List[str] = []
for parameter in cast(parametersType, channel["parameters"]):
template = self.__qne_client.retrieve_template(parameter)
channel_parameters.append(str(template["slug"]))
channel_type_json["parameters"] = channel_parameters
self.__update_list(channel_json, channel_type_json, overwrite)
channels_json[entity] = channel_json
self.__write_network_data(entity, channels_json)
def __update_networks_nodes(self, overwrite: bool) -> None:
"""
Get the remote nodes and update the local node definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "nodes"
nodes_json = {}
node_json = [] if overwrite else self.__read_network_data(entity)[entity]
nodes = self.__qne_client.list_nodes()
for node in nodes:
node_type_json = {"name": node["name"], "slug": node["slug"],
"coordinates": {"latitude": cast(coordinatesType, node["coordinates"])["latitude"],
"longitude": cast(coordinatesType, node["coordinates"])["longitude"]}}
node_parameters: List[str] = []
for parameter in cast(parametersType, node["node_parameters"]):
template = self.__qne_client.retrieve_template(parameter)
node_parameters.append(str(template["slug"]))
node_type_json["node_parameters"] = node_parameters
node_type_json["number_of_qubits"] = node["number_of_qubits"]
qubit_parameters: List[str] = []
for parameter in cast(parametersType, node["qubit_parameters"]):
template = self.__qne_client.retrieve_template(parameter)
qubit_parameters.append(str(template["slug"]))
node_type_json["qubit_parameters"] = qubit_parameters
self.__update_list(node_json, node_type_json, overwrite)
nodes_json[entity] = node_json
self.__write_network_data(entity, nodes_json)
def __update_networks_templates(self, overwrite: bool) -> None:
"""
Get the remote templates and update the local template definitions
Args:
overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network
entities)
"""
entity = "templates"
templates_json = {}
template_json = [] if overwrite else self.__read_network_data(entity)[entity]
templates = self.__qne_client.list_templates()
for template in templates:
del template["id"]
del template["url"]
del template["description"]
template_values = []
for value in cast(listValuesType, template["values"]):
value["unit"] = ""
value["scale_value"] = 1.0
template_values.append(value)
self.__update_list(template_json, template, overwrite)
templates_json[entity] = template_json
self.__write_network_data(entity, templates_json)
def update_networks(self, overwrite: bool) -> bool:
"""
Get the remote networks and update the local network definitions
Args:
overwrite: When True replace the local files, otherwise try to merge (keeping the new local network
entities)
Returns:
success (True) or not (False)
"""
try:
self.__update_networks_networks(overwrite)
self.__update_networks_channels(overwrite)
self.__update_networks_nodes(overwrite)
self.__update_networks_templates(overwrite)
except Exception:
return False
return True
|
en
| 0.825976
|
The status of the qne_job can be 'NEW', 'RUNNING', 'COMPLETE', 'FAILED' # The final status of the qne_job, either successful or not # The status of the qne_job when it is executed successfully Defines the methods used for remote communication with api-router Login the user on host (uri) based upon the username/password values given Logout the user by deleting the entry in the resource Get the host as base_uri the user is logged in Get the list of applications (public and enabled) from the api-router Returns: the list of applications Delete the remote application for the authenticated user Args: application_id is the application to delete. Returns: False when not found 404 or another error True when no error was raised # application deleted successfully Clone the application by copying the required files in the local application structure. Args: application_name: name of the application to be cloned new_application_name: name of the application after cloning new_application_path: location of application files # Create python files from tarball # Manifest.json configuration Publish the application by enabling the AppVersion. Args: application_data: application data from manifest.json Returns: True when published successfully, otherwise False # check if application exists # application must exist # update all remote info with latest data # pylint: disable=R0914 Upload the application to the remote server. Args: application_path: location of application files application_data: application data from manifest.json application_config: application configuration structure application_result: application result structure application_source: source files for application Returns: True when uploaded successfully, otherwise False # create application data structure for remote # create Application # rethrow exception # update all remote info with latest data # create AppVersion # new app_version reset registered application references # The (incomplete) AppVersion already existed, use this one to connect the not yet registered objects # for now rethrow all other exceptions # create AppConfig # create AppResult # create AppSource # Update application when AppVersion and its components is uploaded # Something went wrong, delete the (just created) AppVersion (currently not supported by api-router) # app_version_to_delete: AppVersionType = { # "application": application["url"], # "version": app_version["version"] # } # app_version = self.__qne_client.delete_app_version(app_version_to_delete) # for now rethrow exception Create and send an Application object to api-router Create and send an AppVersion object to api-router Create and send an AppConfig object to api-router Create and send an AppResult object to api-router Create and send an AppSource object to api-router # create the resource # send the resource # pylint: disable=R1732 # delete the resource Create and send an AppVersion object to api-router for updating it Get the application object from api-router for this remote application Args: application_data: application information from manifest Returns: Application object or None when the application was not found remotely Get the application object from api-router for this remote application Args: application_id: id of the application Returns: Application object or None when the application was not found remotely Get the application object from api-router for this remote application. Returned applications are not disabled and either are public with enabled versions or are owned by user Args: application_slug: Slug of the application Returns: Application object or None when the application was not found remotely Get the app id from api-router for this remote application Args: application_slug: Slug of the application Returns: id or None when the application was not found remotely Get the app config for the relevant app_version from api-router for this remote application. For the author of the application this is the app_config linked to the highest numbered app_version (which can be disabled, because it is not yet published), For all the other users this will be the highest versioned enabled app_version Args: application_slug: Slug of the application Returns: App config or None when the application was not found remotely Function that checks if: - The application is valid by validating if it exists remotely Args: application_slug: Slug of the application returns: Returns empty list when all validations passes Returns dict containing error messages of the validations that failed Delete the remote experiment for the authenticated user Args: experiment_id: is the experiment to delete. Returns: False when not found 404 or another error occurs True when no error was raised. The experiment is deleted (including all assets/round_sets/results) Get the remote experiments for the authenticated user from api-router Create and send an experiment object to api-router Create and return an experiment object for sending to api-router Create and send an asset object to api-router Because of differences in channel definition for api-router networks and asset networks we need a fix to translate these (local) channels to a format that the backend expects. Also the asset needs an experiment entry with the experiment url Create and send a round set object to api-router Create and return a round set object for sending to api-router Send the objects to api-router to run this experiment. The steps are: 1. add an experiment for this app_version 2. add an asset object that holds the app config 3. add a round set for number_of_rounds rounds to run the experiment Args: experiment_data: which holds the experiment data needed to generate the experiment For a job running, get the results. When block is True, block the call for 'timeout' seconds until the result is received Args: round_set_url: which holds the results and status of the run of the remote experiment block: When True retry for a number of seconds timeout: retry for this number of seconds to get the result (None = no timeout) wait: number of seconds to wait between calls to api router for the results For a completed job, get the results Args: round_set: which holds the results and status of the run of the remote experiment # round_set completed # round set failed For a completed job, get the final result. Args: round_set: which holds the results and status of the run of the remote experiment # round_set completed # leave final_result empty # round set failed Function to list the networks Returns: A list of networks Writes the json file specified by the parameter 'entity_name'. entity_name can be 'networks', 'channels', 'nodes', 'templates' Args: entity_name: The type of the data to read network_data: The data to write TODO: In local api the network is already read. It may be more efficient to use these values Get the remote networks and update the local network definitions Args: overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network entities) # overwrite if it existed, otherwise append Get the remote channels and update the local channel definitions Args: overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network entities) Get the remote nodes and update the local node definitions Args: overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network entities) Get the remote templates and update the local template definitions Args: overwrite: When True, replace the local files. Otherwise try to merge (keeping the new local network entities) Get the remote networks and update the local network definitions Args: overwrite: When True replace the local files, otherwise try to merge (keeping the new local network entities) Returns: success (True) or not (False)
| 1.8773
| 2
|
course_grader/apps.py
|
uw-it-aca/gradepage
| 1
|
6626937
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.apps import AppConfig
from restclients_core.dao import MockDAO
import os
class CourseGraderConfig(AppConfig):
name = 'course_grader'
def ready(self):
mocks = os.path.join(os.path.dirname(__file__), 'resources')
MockDAO.register_mock_path(mocks)
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.apps import AppConfig
from restclients_core.dao import MockDAO
import os
class CourseGraderConfig(AppConfig):
name = 'course_grader'
def ready(self):
mocks = os.path.join(os.path.dirname(__file__), 'resources')
MockDAO.register_mock_path(mocks)
|
en
| 0.374447
|
# Copyright 2021 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0
| 1.986374
| 2
|
FPSTest/onChip/display_num.py
|
AndyZ-Salz/BadApple_QuecPython
| 0
|
6626938
|
<reponame>AndyZ-Salz/BadApple_QuecPython<filename>FPSTest/onChip/display_num.py<gh_stars>0
# -*- coding: UTF-8 -*-
import utime
'''
如果用户使用的固件版本中没有checkNet库,请将checkNet.mpy文件上传到模块的usr目录,
并将 import checkNet 改为 from usr import checkNet
'''
import checkNet
from usr import st7789v
'''
下面两个全局变量是必须有的,用户可以根据自己的实际项目修改下面两个全局变量的值,
在执行用户代码前,会先打印这两个变量的值。
'''
PROJECT_NAME = "QuecPython_ST7789V_LCD_FPSTest"
PROJECT_VERSION = "1.0.0"
checknet = checkNet.CheckNetwork(PROJECT_NAME, PROJECT_VERSION)
lcd_st7789v = st7789v.ST7789V(240, 240)
if __name__ == '__main__':
'''
手动运行本例程时,可以去掉该延时,如果将例程文件名改为main.py,希望开机自动运行时,需要加上该延时,
否则无法从CDC口看到下面的 poweron_print_once() 中打印的信息
'''
# utime.sleep(5)
checknet.poweron_print_once()
'''
如果用户程序包含网络相关代码,必须执行 wait_network_connected() 等待网络就绪(拨号成功);
如果是网络无关代码,可以屏蔽 wait_network_connected()
'''
# checknet.wait_network_connected()
# 用户代码
'''######################【User code star】###################################################'''
'''
要显示的图片像素为 99*100,下面设置显示图片的起始坐标位置为(70,70)
要注意:显示图片时,最后两个参数传入的是图片大小,即宽高,不是终点坐标
'''
# 80*80
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_1.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_2.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_3.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_4.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_5.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
print()
# 120*120
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_1.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_2.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_3.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_4.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_5.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
print()
# 240*240
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_1.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_2.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_3.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_4.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_5.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
'''######################【User code end 】###################################################'''
|
# -*- coding: UTF-8 -*-
import utime
'''
如果用户使用的固件版本中没有checkNet库,请将checkNet.mpy文件上传到模块的usr目录,
并将 import checkNet 改为 from usr import checkNet
'''
import checkNet
from usr import st7789v
'''
下面两个全局变量是必须有的,用户可以根据自己的实际项目修改下面两个全局变量的值,
在执行用户代码前,会先打印这两个变量的值。
'''
PROJECT_NAME = "QuecPython_ST7789V_LCD_FPSTest"
PROJECT_VERSION = "1.0.0"
checknet = checkNet.CheckNetwork(PROJECT_NAME, PROJECT_VERSION)
lcd_st7789v = st7789v.ST7789V(240, 240)
if __name__ == '__main__':
'''
手动运行本例程时,可以去掉该延时,如果将例程文件名改为main.py,希望开机自动运行时,需要加上该延时,
否则无法从CDC口看到下面的 poweron_print_once() 中打印的信息
'''
# utime.sleep(5)
checknet.poweron_print_once()
'''
如果用户程序包含网络相关代码,必须执行 wait_network_connected() 等待网络就绪(拨号成功);
如果是网络无关代码,可以屏蔽 wait_network_connected()
'''
# checknet.wait_network_connected()
# 用户代码
'''######################【User code star】###################################################'''
'''
要显示的图片像素为 99*100,下面设置显示图片的起始坐标位置为(70,70)
要注意:显示图片时,最后两个参数传入的是图片大小,即宽高,不是终点坐标
'''
# 80*80
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_1.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_2.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_3.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_4.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image80_5.txt", 80, 80, 80, 80, 25)
timeB = utime.ticks_ms()
print("image80:"+str(utime.ticks_diff(timeB,timeA))+"ms")
print()
# 120*120
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_1.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_2.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_3.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_4.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image120_5.txt", 60, 60, 120, 120, 17)
timeB = utime.ticks_ms()
print("image120:"+str(utime.ticks_diff(timeB,timeA))+"ms")
print()
# 240*240
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_1.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_2.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_3.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_4.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
timeA = utime.ticks_ms()
lcd_st7789v.lcd_show_image_file("usr/image240_5.txt", 0, 0, 240, 240, 8)
timeB = utime.ticks_ms()
print("image240:" + str(utime.ticks_diff(timeB, timeA))+"ms")
'''######################【User code end 】###################################################'''
|
zh
| 0.908343
|
# -*- coding: UTF-8 -*- 如果用户使用的固件版本中没有checkNet库,请将checkNet.mpy文件上传到模块的usr目录, 并将 import checkNet 改为 from usr import checkNet 下面两个全局变量是必须有的,用户可以根据自己的实际项目修改下面两个全局变量的值, 在执行用户代码前,会先打印这两个变量的值。 手动运行本例程时,可以去掉该延时,如果将例程文件名改为main.py,希望开机自动运行时,需要加上该延时, 否则无法从CDC口看到下面的 poweron_print_once() 中打印的信息 # utime.sleep(5) 如果用户程序包含网络相关代码,必须执行 wait_network_connected() 等待网络就绪(拨号成功); 如果是网络无关代码,可以屏蔽 wait_network_connected() # checknet.wait_network_connected() # 用户代码 ######################【User code star】################################################### 要显示的图片像素为 99*100,下面设置显示图片的起始坐标位置为(70,70) 要注意:显示图片时,最后两个参数传入的是图片大小,即宽高,不是终点坐标 # 80*80 # 120*120 # 240*240 ######################【User code end 】###################################################
| 2.14234
| 2
|
test/TestSkipInsideYaml.py
|
xoxys/ansible-lint
| 0
|
6626939
|
<reponame>xoxys/ansible-lint
import pytest
ROLE_TASKS = '''\
---
- name: test 303
command: git log
changed_when: false
- name: test 303 (skipped)
command: git log # noqa 303
changed_when: false
'''
ROLE_TASKS_WITH_BLOCK = '''\
---
- name: bad git 1 # noqa 401
action: git a=b c=d
- name: bad git 2
action: git a=b c=d
- name: Block with rescue and always section
block:
- name: bad git 3 # noqa 401
action: git a=b c=d
- name: bad git 4
action: git a=b c=d
rescue:
- name: bad git 5 # noqa 401
action: git a=b c=d
- name: bad git 6
action: git a=b c=d
always:
- name: bad git 7 # noqa 401
action: git a=b c=d
- name: bad git 8
action: git a=b c=d
'''
PLAYBOOK = '''\
- hosts: all
tasks:
- name: test 402
action: hg
- name: test 402 (skipped) # noqa 402
action: hg
- name: test 401 and 501
become_user: alice
action: git
- name: test 401 and 501 (skipped) # noqa 401 501
become_user: alice
action: git
- name: test YAML and 206
get_url:
url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf
dest: "{{dest_proj_path}}/foo.conf"
- name: test YAML and 206 (skipped)
get_url:
url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf # noqa YAML
dest: "{{dest_proj_path}}/foo.conf" # noqa 206
- name: test 302
command: creates=B chmod 644 A
- name: test 302
command: warn=yes creates=B chmod 644 A
- name: test 302 (skipped via no warn)
command: warn=no creates=B chmod 644 A
- name: test 302 (skipped via skip_ansible_lint)
command: creates=B chmod 644 A
tags:
- skip_ansible_lint
'''
ROLE_META = '''\
galaxy_info: # noqa 701
author: your name # noqa 703
description: missing min_ansible_version and platforms. author default not changed
license: MIT
'''
ROLE_TASKS_WITH_BLOCK_BECOME = '''\
- hosts: localhost
tasks:
- name: foo
become: true
block:
- name: bar
become_user: jonhdaa
command: "/etc/test.sh"
'''
def test_role_tasks(default_text_runner):
results = default_text_runner.run_role_tasks_main(ROLE_TASKS)
assert len(results) == 1
def test_role_tasks_with_block(default_text_runner):
results = default_text_runner.run_role_tasks_main(ROLE_TASKS_WITH_BLOCK)
assert len(results) == 4
@pytest.mark.parametrize(
('playbook_src', 'results_num'),
(
(PLAYBOOK, 7),
(ROLE_TASKS_WITH_BLOCK_BECOME, 0),
),
ids=('generic', 'with block become inheritance'),
)
def test_playbook(default_text_runner, playbook_src, results_num):
results = default_text_runner.run_playbook(playbook_src)
assert len(results) == results_num
def test_role_meta(default_text_runner):
results = default_text_runner.run_role_meta_main(ROLE_META)
assert len(results) == 0
|
import pytest
ROLE_TASKS = '''\
---
- name: test 303
command: git log
changed_when: false
- name: test 303 (skipped)
command: git log # noqa 303
changed_when: false
'''
ROLE_TASKS_WITH_BLOCK = '''\
---
- name: bad git 1 # noqa 401
action: git a=b c=d
- name: bad git 2
action: git a=b c=d
- name: Block with rescue and always section
block:
- name: bad git 3 # noqa 401
action: git a=b c=d
- name: bad git 4
action: git a=b c=d
rescue:
- name: bad git 5 # noqa 401
action: git a=b c=d
- name: bad git 6
action: git a=b c=d
always:
- name: bad git 7 # noqa 401
action: git a=b c=d
- name: bad git 8
action: git a=b c=d
'''
PLAYBOOK = '''\
- hosts: all
tasks:
- name: test 402
action: hg
- name: test 402 (skipped) # noqa 402
action: hg
- name: test 401 and 501
become_user: alice
action: git
- name: test 401 and 501 (skipped) # noqa 401 501
become_user: alice
action: git
- name: test YAML and 206
get_url:
url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf
dest: "{{dest_proj_path}}/foo.conf"
- name: test YAML and 206 (skipped)
get_url:
url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf # noqa YAML
dest: "{{dest_proj_path}}/foo.conf" # noqa 206
- name: test 302
command: creates=B chmod 644 A
- name: test 302
command: warn=yes creates=B chmod 644 A
- name: test 302 (skipped via no warn)
command: warn=no creates=B chmod 644 A
- name: test 302 (skipped via skip_ansible_lint)
command: creates=B chmod 644 A
tags:
- skip_ansible_lint
'''
ROLE_META = '''\
galaxy_info: # noqa 701
author: your name # noqa 703
description: missing min_ansible_version and platforms. author default not changed
license: MIT
'''
ROLE_TASKS_WITH_BLOCK_BECOME = '''\
- hosts: localhost
tasks:
- name: foo
become: true
block:
- name: bar
become_user: jonhdaa
command: "/etc/test.sh"
'''
def test_role_tasks(default_text_runner):
results = default_text_runner.run_role_tasks_main(ROLE_TASKS)
assert len(results) == 1
def test_role_tasks_with_block(default_text_runner):
results = default_text_runner.run_role_tasks_main(ROLE_TASKS_WITH_BLOCK)
assert len(results) == 4
@pytest.mark.parametrize(
('playbook_src', 'results_num'),
(
(PLAYBOOK, 7),
(ROLE_TASKS_WITH_BLOCK_BECOME, 0),
),
ids=('generic', 'with block become inheritance'),
)
def test_playbook(default_text_runner, playbook_src, results_num):
results = default_text_runner.run_playbook(playbook_src)
assert len(results) == results_num
def test_role_meta(default_text_runner):
results = default_text_runner.run_role_meta_main(ROLE_META)
assert len(results) == 0
|
en
| 0.462376
|
\ --- - name: test 303 command: git log changed_when: false - name: test 303 (skipped) command: git log # noqa 303 changed_when: false \ --- - name: bad git 1 # noqa 401 action: git a=b c=d - name: bad git 2 action: git a=b c=d - name: Block with rescue and always section block: - name: bad git 3 # noqa 401 action: git a=b c=d - name: bad git 4 action: git a=b c=d rescue: - name: bad git 5 # noqa 401 action: git a=b c=d - name: bad git 6 action: git a=b c=d always: - name: bad git 7 # noqa 401 action: git a=b c=d - name: bad git 8 action: git a=b c=d \ - hosts: all tasks: - name: test 402 action: hg - name: test 402 (skipped) # noqa 402 action: hg - name: test 401 and 501 become_user: alice action: git - name: test 401 and 501 (skipped) # noqa 401 501 become_user: alice action: git - name: test YAML and 206 get_url: url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf dest: "{{dest_proj_path}}/foo.conf" - name: test YAML and 206 (skipped) get_url: url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf # noqa YAML dest: "{{dest_proj_path}}/foo.conf" # noqa 206 - name: test 302 command: creates=B chmod 644 A - name: test 302 command: warn=yes creates=B chmod 644 A - name: test 302 (skipped via no warn) command: warn=no creates=B chmod 644 A - name: test 302 (skipped via skip_ansible_lint) command: creates=B chmod 644 A tags: - skip_ansible_lint \ galaxy_info: # noqa 701 author: your name # noqa 703 description: missing min_ansible_version and platforms. author default not changed license: MIT \ - hosts: localhost tasks: - name: foo become: true block: - name: bar become_user: jonhdaa command: "/etc/test.sh"
| 2.032583
| 2
|
pyecobee/objects/thermostat.py
|
gleblanc1783/Pyecobee
| 29
|
6626940
|
"""
This module is home to the Thermostat class
"""
from pyecobee.ecobee_object import EcobeeObject
class Thermostat(EcobeeObject):
"""
This class has been auto generated by scraping
https://www.ecobee.com/home/developer/api/documentation/v1/objects/Thermostat.shtml
Attribute names have been generated by converting ecobee property
names from camelCase to snake_case.
A getter property has been generated for each attribute.
A setter property has been generated for each attribute whose value
of READONLY is "no".
An __init__ argument without a default value has been generated if
the value of REQUIRED is "yes".
An __init__ argument with a default value of None has been generated
if the value of REQUIRED is "no".
"""
__slots__ = [
'_identifier',
'_name',
'_thermostat_rev',
'_is_registered',
'_model_number',
'_brand',
'_features',
'_last_modified',
'_thermostat_time',
'_utc_time',
'_audio',
'_alerts',
'_reminders',
'_settings',
'_runtime',
'_extended_runtime',
'_electricity',
'_devices',
'_location',
'_energy',
'_technician',
'_utility',
'_management',
'_weather',
'_events',
'_program',
'_house_details',
'_oem_cfg',
'_equipment_status',
'_notification_settings',
'_privacy',
'_version',
'_security_settings',
'_filter_subscription',
'_remote_sensors',
]
attribute_name_map = {
'identifier': 'identifier',
'name': 'name',
'thermostat_rev': 'thermostatRev',
'thermostatRev': 'thermostat_rev',
'is_registered': 'isRegistered',
'isRegistered': 'is_registered',
'model_number': 'modelNumber',
'modelNumber': 'model_number',
'brand': 'brand',
'features': 'features',
'last_modified': 'lastModified',
'lastModified': 'last_modified',
'thermostat_time': 'thermostatTime',
'thermostatTime': 'thermostat_time',
'utc_time': 'utcTime',
'utcTime': 'utc_time',
'audio': 'audio',
'alerts': 'alerts',
'reminders': 'reminders',
'settings': 'settings',
'runtime': 'runtime',
'extended_runtime': 'extendedRuntime',
'extendedRuntime': 'extended_runtime',
'electricity': 'electricity',
'devices': 'devices',
'location': 'location',
'energy': 'energy',
'technician': 'technician',
'utility': 'utility',
'management': 'management',
'weather': 'weather',
'events': 'events',
'program': 'program',
'house_details': 'houseDetails',
'houseDetails': 'house_details',
'oem_cfg': 'oemCfg',
'oemCfg': 'oem_cfg',
'equipment_status': 'equipmentStatus',
'equipmentStatus': 'equipment_status',
'notification_settings': 'notificationSettings',
'notificationSettings': 'notification_settings',
'privacy': 'privacy',
'version': 'version',
'security_settings': 'securitySettings',
'securitySettings': 'security_settings',
'filter_subscription': 'filterSubscription',
'filterSubscription': 'filter_subscription',
'remote_sensors': 'remoteSensors',
'remoteSensors': 'remote_sensors',
}
attribute_type_map = {
'identifier': 'six.text_type',
'name': 'six.text_type',
'thermostat_rev': 'six.text_type',
'is_registered': 'bool',
'model_number': 'six.text_type',
'brand': 'six.text_type',
'features': 'six.text_type',
'last_modified': 'six.text_type',
'thermostat_time': 'six.text_type',
'utc_time': 'six.text_type',
'audio': 'Audio',
'alerts': 'List[Alert]',
'reminders': 'List[ThermostatReminder2]',
'settings': 'Settings',
'runtime': 'Runtime',
'extended_runtime': 'ExtendedRuntime',
'electricity': 'Electricity',
'devices': 'List[Device]',
'location': 'Location',
'energy': 'Energy',
'technician': 'Technician',
'utility': 'Utility',
'management': 'Management',
'weather': 'Weather',
'events': 'List[Event]',
'program': 'Program',
'house_details': 'HouseDetails',
'oem_cfg': 'ThermostatOemCfg',
'equipment_status': 'six.text_type',
'notification_settings': 'NotificationSettings',
'privacy': 'ThermostatPrivacy',
'version': 'Version',
'security_settings': 'SecuritySettings',
'filter_subscription': 'ApiFilterSubscription',
'remote_sensors': 'List[RemoteSensor]',
}
def __init__(
self,
identifier,
name=None,
thermostat_rev=None,
is_registered=None,
model_number=None,
brand=None,
features=None,
last_modified=None,
thermostat_time=None,
utc_time=None,
audio=None,
alerts=None,
reminders=None,
settings=None,
runtime=None,
extended_runtime=None,
electricity=None,
devices=None,
location=None,
energy=None,
technician=None,
utility=None,
management=None,
weather=None,
events=None,
program=None,
house_details=None,
oem_cfg=None,
equipment_status=None,
notification_settings=None,
privacy=None,
version=None,
security_settings=None,
filter_subscription=None,
remote_sensors=None,
):
"""
Construct a Thermostat instance
"""
self._identifier = identifier
self._name = name
self._thermostat_rev = thermostat_rev
self._is_registered = is_registered
self._model_number = model_number
self._brand = brand
self._features = features
self._last_modified = last_modified
self._thermostat_time = thermostat_time
self._utc_time = utc_time
self._audio = audio
self._alerts = alerts
self._reminders = reminders
self._settings = settings
self._runtime = runtime
self._extended_runtime = extended_runtime
self._electricity = electricity
self._devices = devices
self._location = location
self._energy = energy
self._technician = technician
self._utility = utility
self._management = management
self._weather = weather
self._events = events
self._program = program
self._house_details = house_details
self._oem_cfg = oem_cfg
self._equipment_status = equipment_status
self._notification_settings = notification_settings
self._privacy = privacy
self._version = version
self._security_settings = security_settings
self._filter_subscription = filter_subscription
self._remote_sensors = remote_sensors
@property
def identifier(self):
"""
Gets the identifier attribute of this Thermostat instance.
:return: The value of the identifier attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._identifier
@property
def name(self):
"""
Gets the name attribute of this Thermostat instance.
:return: The value of the name attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name attribute of this Thermostat instance.
:param name: The name value to set for the name attribute of
this Thermostat instance.
:type: six.text_type
"""
self._name = name
@property
def thermostat_rev(self):
"""
Gets the thermostat_rev attribute of this Thermostat instance.
:return: The value of the thermostat_rev attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._thermostat_rev
@property
def is_registered(self):
"""
Gets the is_registered attribute of this Thermostat instance.
:return: The value of the is_registered attribute of this
Thermostat instance.
:rtype: bool
"""
return self._is_registered
@property
def model_number(self):
"""
Gets the model_number attribute of this Thermostat instance.
:return: The value of the model_number attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._model_number
@property
def brand(self):
"""
Gets the brand attribute of this Thermostat instance.
:return: The value of the brand attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._brand
@property
def features(self):
"""
Gets the features attribute of this Thermostat instance.
:return: The value of the features attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._features
@property
def last_modified(self):
"""
Gets the last_modified attribute of this Thermostat instance.
:return: The value of the last_modified attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._last_modified
@property
def thermostat_time(self):
"""
Gets the thermostat_time attribute of this Thermostat instance.
:return: The value of the thermostat_time attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._thermostat_time
@property
def utc_time(self):
"""
Gets the utc_time attribute of this Thermostat instance.
:return: The value of the utc_time attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._utc_time
@property
def audio(self):
"""
Gets the audio attribute of this Thermostat instance.
:return: The value of the audio attribute of this Thermostat
instance.
:rtype: Audio
"""
return self._audio
@audio.setter
def audio(self, audio):
"""
Sets the audio attribute of this Thermostat instance.
:param audio: The audio value to set for the audio attribute of
this Thermostat instance.
:type: Audio
"""
self._audio = audio
@property
def alerts(self):
"""
Gets the alerts attribute of this Thermostat instance.
:return: The value of the alerts attribute of this Thermostat
instance.
:rtype: List[Alert]
"""
return self._alerts
@property
def reminders(self):
"""
Gets the reminders attribute of this Thermostat instance.
:return: The value of the reminders attribute of this Thermostat
instance.
:rtype: List[ThermostatReminder2]
"""
return self._reminders
@property
def settings(self):
"""
Gets the settings attribute of this Thermostat instance.
:return: The value of the settings attribute of this Thermostat
instance.
:rtype: Settings
"""
return self._settings
@settings.setter
def settings(self, settings):
"""
Sets the settings attribute of this Thermostat instance.
:param settings: The settings value to set for the settings
attribute of this Thermostat instance.
:type: Settings
"""
self._settings = settings
@property
def runtime(self):
"""
Gets the runtime attribute of this Thermostat instance.
:return: The value of the runtime attribute of this Thermostat
instance.
:rtype: Runtime
"""
return self._runtime
@property
def extended_runtime(self):
"""
Gets the extended_runtime attribute of this Thermostat instance.
:return: The value of the extended_runtime attribute of this
Thermostat instance.
:rtype: ExtendedRuntime
"""
return self._extended_runtime
@property
def electricity(self):
"""
Gets the electricity attribute of this Thermostat instance.
:return: The value of the electricity attribute of this
Thermostat instance.
:rtype: Electricity
"""
return self._electricity
@property
def devices(self):
"""
Gets the devices attribute of this Thermostat instance.
:return: The value of the devices attribute of this Thermostat
instance.
:rtype: List[Device]
"""
return self._devices
@property
def location(self):
"""
Gets the location attribute of this Thermostat instance.
:return: The value of the location attribute of this Thermostat
instance.
:rtype: Location
"""
return self._location
@location.setter
def location(self, location):
"""
Sets the location attribute of this Thermostat instance.
:param location: The location value to set for the location
attribute of this Thermostat instance.
:type: Location
"""
self._location = location
@property
def energy(self):
"""
Gets the energy attribute of this Thermostat instance.
:return: The value of the energy attribute of this Thermostat
instance.
:rtype: Energy
"""
return self._energy
@energy.setter
def energy(self, energy):
"""
Sets the energy attribute of this Thermostat instance.
:param energy: The energy value to set for the energy attribute
of this Thermostat instance.
:type: Energy
"""
self._energy = energy
@property
def technician(self):
"""
Gets the technician attribute of this Thermostat instance.
:return: The value of the technician attribute of this
Thermostat instance.
:rtype: Technician
"""
return self._technician
@property
def utility(self):
"""
Gets the utility attribute of this Thermostat instance.
:return: The value of the utility attribute of this Thermostat
instance.
:rtype: Utility
"""
return self._utility
@property
def management(self):
"""
Gets the management attribute of this Thermostat instance.
:return: The value of the management attribute of this
Thermostat instance.
:rtype: Management
"""
return self._management
@property
def weather(self):
"""
Gets the weather attribute of this Thermostat instance.
:return: The value of the weather attribute of this Thermostat
instance.
:rtype: Weather
"""
return self._weather
@property
def events(self):
"""
Gets the events attribute of this Thermostat instance.
:return: The value of the events attribute of this Thermostat
instance.
:rtype: List[Event]
"""
return self._events
@property
def program(self):
"""
Gets the program attribute of this Thermostat instance.
:return: The value of the program attribute of this Thermostat
instance.
:rtype: Program
"""
return self._program
@program.setter
def program(self, program):
"""
Sets the program attribute of this Thermostat instance.
:param program: The program value to set for the program
attribute of this Thermostat instance.
:type: Program
"""
self._program = program
@property
def house_details(self):
"""
Gets the house_details attribute of this Thermostat instance.
:return: The value of the house_details attribute of this
Thermostat instance.
:rtype: HouseDetails
"""
return self._house_details
@house_details.setter
def house_details(self, house_details):
"""
Sets the house_details attribute of this Thermostat instance.
:param house_details: The house_details value to set for the
house_details attribute of this Thermostat instance.
:type: HouseDetails
"""
self._house_details = house_details
@property
def oem_cfg(self):
"""
Gets the oem_cfg attribute of this Thermostat instance.
:return: The value of the oem_cfg attribute of this Thermostat
instance.
:rtype: ThermostatOemCfg
"""
return self._oem_cfg
@oem_cfg.setter
def oem_cfg(self, oem_cfg):
"""
Sets the oem_cfg attribute of this Thermostat instance.
:param oem_cfg: The oem_cfg value to set for the oem_cfg
attribute of this Thermostat instance.
:type: ThermostatOemCfg
"""
self._oem_cfg = oem_cfg
@property
def equipment_status(self):
"""
Gets the equipment_status attribute of this Thermostat instance.
:return: The value of the equipment_status attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._equipment_status
@property
def notification_settings(self):
"""
Gets the notification_settings attribute of this Thermostat
instance.
:return: The value of the notification_settings attribute of
this Thermostat instance.
:rtype: NotificationSettings
"""
return self._notification_settings
@notification_settings.setter
def notification_settings(self, notification_settings):
"""
Sets the notification_settings attribute of this Thermostat
instance.
:param notification_settings: The notification_settings value to
set for the notification_settings attribute of this Thermostat
instance.
:type: NotificationSettings
"""
self._notification_settings = notification_settings
@property
def privacy(self):
"""
Gets the privacy attribute of this Thermostat instance.
:return: The value of the privacy attribute of this Thermostat
instance.
:rtype: ThermostatPrivacy
"""
return self._privacy
@privacy.setter
def privacy(self, privacy):
"""
Sets the privacy attribute of this Thermostat instance.
:param privacy: The privacy value to set for the privacy
attribute of this Thermostat instance.
:type: ThermostatPrivacy
"""
self._privacy = privacy
@property
def version(self):
"""
Gets the version attribute of this Thermostat instance.
:return: The value of the version attribute of this Thermostat
instance.
:rtype: Version
"""
return self._version
@property
def security_settings(self):
"""
Gets the security_settings attribute of this Thermostat
instance.
:return: The value of the security_settings attribute of this
Thermostat instance.
:rtype: SecuritySettings
"""
return self._security_settings
@security_settings.setter
def security_settings(self, security_settings):
"""
Sets the security_settings attribute of this Thermostat
instance.
:param security_settings: The security_settings value to set for
the security_settings attribute of this Thermostat instance.
:type: SecuritySettings
"""
self._security_settings = security_settings
@property
def filter_subscription(self):
"""
Gets the filter_subscription attribute of this Thermostat
instance.
:return: The value of the filter_subscription attribute of this
Thermostat instance.
:rtype: ApiFilterSubscription
"""
return self._filter_subscription
@filter_subscription.setter
def filter_subscription(self, filter_subscription):
"""
Sets the filter_subscription attribute of this Thermostat
instance.
:param filter_subscription: The filter_subscription value to set
for the filter_subscription attribute of this Thermostat
instance.
:type: ApiFilterSubscription
"""
self._filter_subscription = filter_subscription
@property
def remote_sensors(self):
"""
Gets the remote_sensors attribute of this Thermostat instance.
:return: The value of the remote_sensors attribute of this
Thermostat instance.
:rtype: List[RemoteSensor]
"""
return self._remote_sensors
|
"""
This module is home to the Thermostat class
"""
from pyecobee.ecobee_object import EcobeeObject
class Thermostat(EcobeeObject):
"""
This class has been auto generated by scraping
https://www.ecobee.com/home/developer/api/documentation/v1/objects/Thermostat.shtml
Attribute names have been generated by converting ecobee property
names from camelCase to snake_case.
A getter property has been generated for each attribute.
A setter property has been generated for each attribute whose value
of READONLY is "no".
An __init__ argument without a default value has been generated if
the value of REQUIRED is "yes".
An __init__ argument with a default value of None has been generated
if the value of REQUIRED is "no".
"""
__slots__ = [
'_identifier',
'_name',
'_thermostat_rev',
'_is_registered',
'_model_number',
'_brand',
'_features',
'_last_modified',
'_thermostat_time',
'_utc_time',
'_audio',
'_alerts',
'_reminders',
'_settings',
'_runtime',
'_extended_runtime',
'_electricity',
'_devices',
'_location',
'_energy',
'_technician',
'_utility',
'_management',
'_weather',
'_events',
'_program',
'_house_details',
'_oem_cfg',
'_equipment_status',
'_notification_settings',
'_privacy',
'_version',
'_security_settings',
'_filter_subscription',
'_remote_sensors',
]
attribute_name_map = {
'identifier': 'identifier',
'name': 'name',
'thermostat_rev': 'thermostatRev',
'thermostatRev': 'thermostat_rev',
'is_registered': 'isRegistered',
'isRegistered': 'is_registered',
'model_number': 'modelNumber',
'modelNumber': 'model_number',
'brand': 'brand',
'features': 'features',
'last_modified': 'lastModified',
'lastModified': 'last_modified',
'thermostat_time': 'thermostatTime',
'thermostatTime': 'thermostat_time',
'utc_time': 'utcTime',
'utcTime': 'utc_time',
'audio': 'audio',
'alerts': 'alerts',
'reminders': 'reminders',
'settings': 'settings',
'runtime': 'runtime',
'extended_runtime': 'extendedRuntime',
'extendedRuntime': 'extended_runtime',
'electricity': 'electricity',
'devices': 'devices',
'location': 'location',
'energy': 'energy',
'technician': 'technician',
'utility': 'utility',
'management': 'management',
'weather': 'weather',
'events': 'events',
'program': 'program',
'house_details': 'houseDetails',
'houseDetails': 'house_details',
'oem_cfg': 'oemCfg',
'oemCfg': 'oem_cfg',
'equipment_status': 'equipmentStatus',
'equipmentStatus': 'equipment_status',
'notification_settings': 'notificationSettings',
'notificationSettings': 'notification_settings',
'privacy': 'privacy',
'version': 'version',
'security_settings': 'securitySettings',
'securitySettings': 'security_settings',
'filter_subscription': 'filterSubscription',
'filterSubscription': 'filter_subscription',
'remote_sensors': 'remoteSensors',
'remoteSensors': 'remote_sensors',
}
attribute_type_map = {
'identifier': 'six.text_type',
'name': 'six.text_type',
'thermostat_rev': 'six.text_type',
'is_registered': 'bool',
'model_number': 'six.text_type',
'brand': 'six.text_type',
'features': 'six.text_type',
'last_modified': 'six.text_type',
'thermostat_time': 'six.text_type',
'utc_time': 'six.text_type',
'audio': 'Audio',
'alerts': 'List[Alert]',
'reminders': 'List[ThermostatReminder2]',
'settings': 'Settings',
'runtime': 'Runtime',
'extended_runtime': 'ExtendedRuntime',
'electricity': 'Electricity',
'devices': 'List[Device]',
'location': 'Location',
'energy': 'Energy',
'technician': 'Technician',
'utility': 'Utility',
'management': 'Management',
'weather': 'Weather',
'events': 'List[Event]',
'program': 'Program',
'house_details': 'HouseDetails',
'oem_cfg': 'ThermostatOemCfg',
'equipment_status': 'six.text_type',
'notification_settings': 'NotificationSettings',
'privacy': 'ThermostatPrivacy',
'version': 'Version',
'security_settings': 'SecuritySettings',
'filter_subscription': 'ApiFilterSubscription',
'remote_sensors': 'List[RemoteSensor]',
}
def __init__(
self,
identifier,
name=None,
thermostat_rev=None,
is_registered=None,
model_number=None,
brand=None,
features=None,
last_modified=None,
thermostat_time=None,
utc_time=None,
audio=None,
alerts=None,
reminders=None,
settings=None,
runtime=None,
extended_runtime=None,
electricity=None,
devices=None,
location=None,
energy=None,
technician=None,
utility=None,
management=None,
weather=None,
events=None,
program=None,
house_details=None,
oem_cfg=None,
equipment_status=None,
notification_settings=None,
privacy=None,
version=None,
security_settings=None,
filter_subscription=None,
remote_sensors=None,
):
"""
Construct a Thermostat instance
"""
self._identifier = identifier
self._name = name
self._thermostat_rev = thermostat_rev
self._is_registered = is_registered
self._model_number = model_number
self._brand = brand
self._features = features
self._last_modified = last_modified
self._thermostat_time = thermostat_time
self._utc_time = utc_time
self._audio = audio
self._alerts = alerts
self._reminders = reminders
self._settings = settings
self._runtime = runtime
self._extended_runtime = extended_runtime
self._electricity = electricity
self._devices = devices
self._location = location
self._energy = energy
self._technician = technician
self._utility = utility
self._management = management
self._weather = weather
self._events = events
self._program = program
self._house_details = house_details
self._oem_cfg = oem_cfg
self._equipment_status = equipment_status
self._notification_settings = notification_settings
self._privacy = privacy
self._version = version
self._security_settings = security_settings
self._filter_subscription = filter_subscription
self._remote_sensors = remote_sensors
@property
def identifier(self):
"""
Gets the identifier attribute of this Thermostat instance.
:return: The value of the identifier attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._identifier
@property
def name(self):
"""
Gets the name attribute of this Thermostat instance.
:return: The value of the name attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name attribute of this Thermostat instance.
:param name: The name value to set for the name attribute of
this Thermostat instance.
:type: six.text_type
"""
self._name = name
@property
def thermostat_rev(self):
"""
Gets the thermostat_rev attribute of this Thermostat instance.
:return: The value of the thermostat_rev attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._thermostat_rev
@property
def is_registered(self):
"""
Gets the is_registered attribute of this Thermostat instance.
:return: The value of the is_registered attribute of this
Thermostat instance.
:rtype: bool
"""
return self._is_registered
@property
def model_number(self):
"""
Gets the model_number attribute of this Thermostat instance.
:return: The value of the model_number attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._model_number
@property
def brand(self):
"""
Gets the brand attribute of this Thermostat instance.
:return: The value of the brand attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._brand
@property
def features(self):
"""
Gets the features attribute of this Thermostat instance.
:return: The value of the features attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._features
@property
def last_modified(self):
"""
Gets the last_modified attribute of this Thermostat instance.
:return: The value of the last_modified attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._last_modified
@property
def thermostat_time(self):
"""
Gets the thermostat_time attribute of this Thermostat instance.
:return: The value of the thermostat_time attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._thermostat_time
@property
def utc_time(self):
"""
Gets the utc_time attribute of this Thermostat instance.
:return: The value of the utc_time attribute of this Thermostat
instance.
:rtype: six.text_type
"""
return self._utc_time
@property
def audio(self):
"""
Gets the audio attribute of this Thermostat instance.
:return: The value of the audio attribute of this Thermostat
instance.
:rtype: Audio
"""
return self._audio
@audio.setter
def audio(self, audio):
"""
Sets the audio attribute of this Thermostat instance.
:param audio: The audio value to set for the audio attribute of
this Thermostat instance.
:type: Audio
"""
self._audio = audio
@property
def alerts(self):
"""
Gets the alerts attribute of this Thermostat instance.
:return: The value of the alerts attribute of this Thermostat
instance.
:rtype: List[Alert]
"""
return self._alerts
@property
def reminders(self):
"""
Gets the reminders attribute of this Thermostat instance.
:return: The value of the reminders attribute of this Thermostat
instance.
:rtype: List[ThermostatReminder2]
"""
return self._reminders
@property
def settings(self):
"""
Gets the settings attribute of this Thermostat instance.
:return: The value of the settings attribute of this Thermostat
instance.
:rtype: Settings
"""
return self._settings
@settings.setter
def settings(self, settings):
"""
Sets the settings attribute of this Thermostat instance.
:param settings: The settings value to set for the settings
attribute of this Thermostat instance.
:type: Settings
"""
self._settings = settings
@property
def runtime(self):
"""
Gets the runtime attribute of this Thermostat instance.
:return: The value of the runtime attribute of this Thermostat
instance.
:rtype: Runtime
"""
return self._runtime
@property
def extended_runtime(self):
"""
Gets the extended_runtime attribute of this Thermostat instance.
:return: The value of the extended_runtime attribute of this
Thermostat instance.
:rtype: ExtendedRuntime
"""
return self._extended_runtime
@property
def electricity(self):
"""
Gets the electricity attribute of this Thermostat instance.
:return: The value of the electricity attribute of this
Thermostat instance.
:rtype: Electricity
"""
return self._electricity
@property
def devices(self):
"""
Gets the devices attribute of this Thermostat instance.
:return: The value of the devices attribute of this Thermostat
instance.
:rtype: List[Device]
"""
return self._devices
@property
def location(self):
"""
Gets the location attribute of this Thermostat instance.
:return: The value of the location attribute of this Thermostat
instance.
:rtype: Location
"""
return self._location
@location.setter
def location(self, location):
"""
Sets the location attribute of this Thermostat instance.
:param location: The location value to set for the location
attribute of this Thermostat instance.
:type: Location
"""
self._location = location
@property
def energy(self):
"""
Gets the energy attribute of this Thermostat instance.
:return: The value of the energy attribute of this Thermostat
instance.
:rtype: Energy
"""
return self._energy
@energy.setter
def energy(self, energy):
"""
Sets the energy attribute of this Thermostat instance.
:param energy: The energy value to set for the energy attribute
of this Thermostat instance.
:type: Energy
"""
self._energy = energy
@property
def technician(self):
"""
Gets the technician attribute of this Thermostat instance.
:return: The value of the technician attribute of this
Thermostat instance.
:rtype: Technician
"""
return self._technician
@property
def utility(self):
"""
Gets the utility attribute of this Thermostat instance.
:return: The value of the utility attribute of this Thermostat
instance.
:rtype: Utility
"""
return self._utility
@property
def management(self):
"""
Gets the management attribute of this Thermostat instance.
:return: The value of the management attribute of this
Thermostat instance.
:rtype: Management
"""
return self._management
@property
def weather(self):
"""
Gets the weather attribute of this Thermostat instance.
:return: The value of the weather attribute of this Thermostat
instance.
:rtype: Weather
"""
return self._weather
@property
def events(self):
"""
Gets the events attribute of this Thermostat instance.
:return: The value of the events attribute of this Thermostat
instance.
:rtype: List[Event]
"""
return self._events
@property
def program(self):
"""
Gets the program attribute of this Thermostat instance.
:return: The value of the program attribute of this Thermostat
instance.
:rtype: Program
"""
return self._program
@program.setter
def program(self, program):
"""
Sets the program attribute of this Thermostat instance.
:param program: The program value to set for the program
attribute of this Thermostat instance.
:type: Program
"""
self._program = program
@property
def house_details(self):
"""
Gets the house_details attribute of this Thermostat instance.
:return: The value of the house_details attribute of this
Thermostat instance.
:rtype: HouseDetails
"""
return self._house_details
@house_details.setter
def house_details(self, house_details):
"""
Sets the house_details attribute of this Thermostat instance.
:param house_details: The house_details value to set for the
house_details attribute of this Thermostat instance.
:type: HouseDetails
"""
self._house_details = house_details
@property
def oem_cfg(self):
"""
Gets the oem_cfg attribute of this Thermostat instance.
:return: The value of the oem_cfg attribute of this Thermostat
instance.
:rtype: ThermostatOemCfg
"""
return self._oem_cfg
@oem_cfg.setter
def oem_cfg(self, oem_cfg):
"""
Sets the oem_cfg attribute of this Thermostat instance.
:param oem_cfg: The oem_cfg value to set for the oem_cfg
attribute of this Thermostat instance.
:type: ThermostatOemCfg
"""
self._oem_cfg = oem_cfg
@property
def equipment_status(self):
"""
Gets the equipment_status attribute of this Thermostat instance.
:return: The value of the equipment_status attribute of this
Thermostat instance.
:rtype: six.text_type
"""
return self._equipment_status
@property
def notification_settings(self):
"""
Gets the notification_settings attribute of this Thermostat
instance.
:return: The value of the notification_settings attribute of
this Thermostat instance.
:rtype: NotificationSettings
"""
return self._notification_settings
@notification_settings.setter
def notification_settings(self, notification_settings):
"""
Sets the notification_settings attribute of this Thermostat
instance.
:param notification_settings: The notification_settings value to
set for the notification_settings attribute of this Thermostat
instance.
:type: NotificationSettings
"""
self._notification_settings = notification_settings
@property
def privacy(self):
"""
Gets the privacy attribute of this Thermostat instance.
:return: The value of the privacy attribute of this Thermostat
instance.
:rtype: ThermostatPrivacy
"""
return self._privacy
@privacy.setter
def privacy(self, privacy):
"""
Sets the privacy attribute of this Thermostat instance.
:param privacy: The privacy value to set for the privacy
attribute of this Thermostat instance.
:type: ThermostatPrivacy
"""
self._privacy = privacy
@property
def version(self):
"""
Gets the version attribute of this Thermostat instance.
:return: The value of the version attribute of this Thermostat
instance.
:rtype: Version
"""
return self._version
@property
def security_settings(self):
"""
Gets the security_settings attribute of this Thermostat
instance.
:return: The value of the security_settings attribute of this
Thermostat instance.
:rtype: SecuritySettings
"""
return self._security_settings
@security_settings.setter
def security_settings(self, security_settings):
"""
Sets the security_settings attribute of this Thermostat
instance.
:param security_settings: The security_settings value to set for
the security_settings attribute of this Thermostat instance.
:type: SecuritySettings
"""
self._security_settings = security_settings
@property
def filter_subscription(self):
"""
Gets the filter_subscription attribute of this Thermostat
instance.
:return: The value of the filter_subscription attribute of this
Thermostat instance.
:rtype: ApiFilterSubscription
"""
return self._filter_subscription
@filter_subscription.setter
def filter_subscription(self, filter_subscription):
"""
Sets the filter_subscription attribute of this Thermostat
instance.
:param filter_subscription: The filter_subscription value to set
for the filter_subscription attribute of this Thermostat
instance.
:type: ApiFilterSubscription
"""
self._filter_subscription = filter_subscription
@property
def remote_sensors(self):
"""
Gets the remote_sensors attribute of this Thermostat instance.
:return: The value of the remote_sensors attribute of this
Thermostat instance.
:rtype: List[RemoteSensor]
"""
return self._remote_sensors
|
en
| 0.706711
|
This module is home to the Thermostat class This class has been auto generated by scraping https://www.ecobee.com/home/developer/api/documentation/v1/objects/Thermostat.shtml Attribute names have been generated by converting ecobee property names from camelCase to snake_case. A getter property has been generated for each attribute. A setter property has been generated for each attribute whose value of READONLY is "no". An __init__ argument without a default value has been generated if the value of REQUIRED is "yes". An __init__ argument with a default value of None has been generated if the value of REQUIRED is "no". Construct a Thermostat instance Gets the identifier attribute of this Thermostat instance. :return: The value of the identifier attribute of this Thermostat instance. :rtype: six.text_type Gets the name attribute of this Thermostat instance. :return: The value of the name attribute of this Thermostat instance. :rtype: six.text_type Sets the name attribute of this Thermostat instance. :param name: The name value to set for the name attribute of this Thermostat instance. :type: six.text_type Gets the thermostat_rev attribute of this Thermostat instance. :return: The value of the thermostat_rev attribute of this Thermostat instance. :rtype: six.text_type Gets the is_registered attribute of this Thermostat instance. :return: The value of the is_registered attribute of this Thermostat instance. :rtype: bool Gets the model_number attribute of this Thermostat instance. :return: The value of the model_number attribute of this Thermostat instance. :rtype: six.text_type Gets the brand attribute of this Thermostat instance. :return: The value of the brand attribute of this Thermostat instance. :rtype: six.text_type Gets the features attribute of this Thermostat instance. :return: The value of the features attribute of this Thermostat instance. :rtype: six.text_type Gets the last_modified attribute of this Thermostat instance. :return: The value of the last_modified attribute of this Thermostat instance. :rtype: six.text_type Gets the thermostat_time attribute of this Thermostat instance. :return: The value of the thermostat_time attribute of this Thermostat instance. :rtype: six.text_type Gets the utc_time attribute of this Thermostat instance. :return: The value of the utc_time attribute of this Thermostat instance. :rtype: six.text_type Gets the audio attribute of this Thermostat instance. :return: The value of the audio attribute of this Thermostat instance. :rtype: Audio Sets the audio attribute of this Thermostat instance. :param audio: The audio value to set for the audio attribute of this Thermostat instance. :type: Audio Gets the alerts attribute of this Thermostat instance. :return: The value of the alerts attribute of this Thermostat instance. :rtype: List[Alert] Gets the reminders attribute of this Thermostat instance. :return: The value of the reminders attribute of this Thermostat instance. :rtype: List[ThermostatReminder2] Gets the settings attribute of this Thermostat instance. :return: The value of the settings attribute of this Thermostat instance. :rtype: Settings Sets the settings attribute of this Thermostat instance. :param settings: The settings value to set for the settings attribute of this Thermostat instance. :type: Settings Gets the runtime attribute of this Thermostat instance. :return: The value of the runtime attribute of this Thermostat instance. :rtype: Runtime Gets the extended_runtime attribute of this Thermostat instance. :return: The value of the extended_runtime attribute of this Thermostat instance. :rtype: ExtendedRuntime Gets the electricity attribute of this Thermostat instance. :return: The value of the electricity attribute of this Thermostat instance. :rtype: Electricity Gets the devices attribute of this Thermostat instance. :return: The value of the devices attribute of this Thermostat instance. :rtype: List[Device] Gets the location attribute of this Thermostat instance. :return: The value of the location attribute of this Thermostat instance. :rtype: Location Sets the location attribute of this Thermostat instance. :param location: The location value to set for the location attribute of this Thermostat instance. :type: Location Gets the energy attribute of this Thermostat instance. :return: The value of the energy attribute of this Thermostat instance. :rtype: Energy Sets the energy attribute of this Thermostat instance. :param energy: The energy value to set for the energy attribute of this Thermostat instance. :type: Energy Gets the technician attribute of this Thermostat instance. :return: The value of the technician attribute of this Thermostat instance. :rtype: Technician Gets the utility attribute of this Thermostat instance. :return: The value of the utility attribute of this Thermostat instance. :rtype: Utility Gets the management attribute of this Thermostat instance. :return: The value of the management attribute of this Thermostat instance. :rtype: Management Gets the weather attribute of this Thermostat instance. :return: The value of the weather attribute of this Thermostat instance. :rtype: Weather Gets the events attribute of this Thermostat instance. :return: The value of the events attribute of this Thermostat instance. :rtype: List[Event] Gets the program attribute of this Thermostat instance. :return: The value of the program attribute of this Thermostat instance. :rtype: Program Sets the program attribute of this Thermostat instance. :param program: The program value to set for the program attribute of this Thermostat instance. :type: Program Gets the house_details attribute of this Thermostat instance. :return: The value of the house_details attribute of this Thermostat instance. :rtype: HouseDetails Sets the house_details attribute of this Thermostat instance. :param house_details: The house_details value to set for the house_details attribute of this Thermostat instance. :type: HouseDetails Gets the oem_cfg attribute of this Thermostat instance. :return: The value of the oem_cfg attribute of this Thermostat instance. :rtype: ThermostatOemCfg Sets the oem_cfg attribute of this Thermostat instance. :param oem_cfg: The oem_cfg value to set for the oem_cfg attribute of this Thermostat instance. :type: ThermostatOemCfg Gets the equipment_status attribute of this Thermostat instance. :return: The value of the equipment_status attribute of this Thermostat instance. :rtype: six.text_type Gets the notification_settings attribute of this Thermostat instance. :return: The value of the notification_settings attribute of this Thermostat instance. :rtype: NotificationSettings Sets the notification_settings attribute of this Thermostat instance. :param notification_settings: The notification_settings value to set for the notification_settings attribute of this Thermostat instance. :type: NotificationSettings Gets the privacy attribute of this Thermostat instance. :return: The value of the privacy attribute of this Thermostat instance. :rtype: ThermostatPrivacy Sets the privacy attribute of this Thermostat instance. :param privacy: The privacy value to set for the privacy attribute of this Thermostat instance. :type: ThermostatPrivacy Gets the version attribute of this Thermostat instance. :return: The value of the version attribute of this Thermostat instance. :rtype: Version Gets the security_settings attribute of this Thermostat instance. :return: The value of the security_settings attribute of this Thermostat instance. :rtype: SecuritySettings Sets the security_settings attribute of this Thermostat instance. :param security_settings: The security_settings value to set for the security_settings attribute of this Thermostat instance. :type: SecuritySettings Gets the filter_subscription attribute of this Thermostat instance. :return: The value of the filter_subscription attribute of this Thermostat instance. :rtype: ApiFilterSubscription Sets the filter_subscription attribute of this Thermostat instance. :param filter_subscription: The filter_subscription value to set for the filter_subscription attribute of this Thermostat instance. :type: ApiFilterSubscription Gets the remote_sensors attribute of this Thermostat instance. :return: The value of the remote_sensors attribute of this Thermostat instance. :rtype: List[RemoteSensor]
| 2.141803
| 2
|
api/config/settings/env.py
|
nonwander/dating-site
| 0
|
6626941
|
import environ
ROOT_DIR = environ.Path(__file__) - 4
APPS_DIR = ROOT_DIR.path('api')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
env_file = str(ROOT_DIR.path('.env'))
print(f'Loading : {env_file}')
env.read_env(env_file)
print('The .env file has been loaded. See base.py for more information')
|
import environ
ROOT_DIR = environ.Path(__file__) - 4
APPS_DIR = ROOT_DIR.path('api')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
env_file = str(ROOT_DIR.path('.env'))
print(f'Loading : {env_file}')
env.read_env(env_file)
print('The .env file has been loaded. See base.py for more information')
|
none
| 1
| 2.151277
| 2
|
|
python/learn/mqtt/sub.py
|
qrsforever/workspace
| 2
|
6626942
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# @file app.py
# @brief
# @author QRS
# @blog qrsforever.github.io
# @version 1.0
# @date 2019-05-13 17:21:59
import paho.mqtt.client as mqtt
import os
HOST = "127.0.0.1"
PORT = 1883
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+ str(rc))
def on_message(client, userdata, msg):
print(msg.topic + " " + str(msg.payload))
client = mqtt.Client("100001")
client.on_connect = on_connect
client.on_message = on_message
client.username_pw_set("stocktech", "stocktech");
client.connect(os.environ.get('HOST', HOST), PORT, 60)
client.subscribe("100001/stocktech/tapereading/#")
# client.loop_forever()
while True:
client.loop(10)
print("loop")
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# @file app.py
# @brief
# @author QRS
# @blog qrsforever.github.io
# @version 1.0
# @date 2019-05-13 17:21:59
import paho.mqtt.client as mqtt
import os
HOST = "127.0.0.1"
PORT = 1883
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+ str(rc))
def on_message(client, userdata, msg):
print(msg.topic + " " + str(msg.payload))
client = mqtt.Client("100001")
client.on_connect = on_connect
client.on_message = on_message
client.username_pw_set("stocktech", "stocktech");
client.connect(os.environ.get('HOST', HOST), PORT, 60)
client.subscribe("100001/stocktech/tapereading/#")
# client.loop_forever()
while True:
client.loop(10)
print("loop")
|
en
| 0.19373
|
#!/usr/bin/python3 # -*- coding: utf-8 -*- # @file app.py # @brief # @author QRS # @blog qrsforever.github.io # @version 1.0 # @date 2019-05-13 17:21:59 #") # client.loop_forever()
| 2.684138
| 3
|
mmdet3d/core/optimizer/hybrid_optimizer.py
|
zhyever/SimIPU
| 29
|
6626943
|
from mmcv.runner.optimizer import OPTIMIZERS
from torch.optim import Optimizer
@OPTIMIZERS.register_module()
class HybridOptimizer(Optimizer):
"""Hybrid Optimizer that contains multiple optimizers This optimizer
applies the hybrid optimzation for multi-modality models."""
def __init__(self, optimizers, step_intervals=None):
self.optimizers = optimizers
self.param_groups = []
for optimizer in self.optimizers:
self.param_groups += optimizer.param_groups
if not isinstance(step_intervals, list):
step_intervals = [1] * len(self.optimizers)
self.step_intervals = step_intervals
self.num_step_updated = 0
def __getstate__(self):
return {
'num_step_updated':
self.num_step_updated,
'defaults': [optimizer.defaults for optimizer in self.optimizers],
'state': [optimizer.state for optimizer in self.optimizers],
'param_groups':
[optimizer.param_groups for optimizer in self.optimizers],
}
def __setstate__(self, state):
self.__dict__.update(state)
def __repr__(self):
format_string = self.__class__.__name__ + ' (\n'
for optimizer, interval in zip(self.optimizers, self.step_intervals):
format_string += 'Update interval: {}\n'.format(interval)
format_string += optimizer.__repr__().replace('\n', '\n ') + ',\n'
format_string += ')'
return format_string
def state_dict(self):
state_dicts = [optimizer.state_dict() for optimizer in self.optimizers]
return {
'num_step_updated':
self.num_step_updated,
'state': [state_dict['state'] for state_dict in state_dicts],
'param_groups':
[state_dict['param_groups'] for state_dict in state_dicts],
}
def load_state_dict(self, state_dict):
r"""Loads the optimizer state.
Arguments:
state_dict (dict): optimizer state. Should be an object returned
from a call to :meth:`state_dict`.
"""
assert len(state_dict['state']) == len(self.optimizers)
assert len(state_dict['param_groups']) == len(self.optimizers)
for i, (single_state, single_param_groups) in enumerate(
zip(state_dict['state'], state_dict['param_groups'])):
single_state_dict = dict(
state=single_state, param_groups=single_param_groups)
self.optimizers[i].load_state_dict(single_state_dict)
self.param_groups = []
for optimizer in self.optimizers:
self.param_groups += optimizer.param_groups
self.num_step_updated = state_dict['num_step_updated']
def zero_grad(self):
r"""Clears the gradients of all optimized :class:`torch.Tensor` s."""
for optimizer in self.optimizers:
optimizer.zero_grad()
def step(self, closure=None):
r"""Performs a single optimization step (parameter update).
Arguments:
closure (callable): A closure that reevaluates the model and
returns the loss. Optional for most optimizers.
Returns:
Tensor or None: calculated loss if `closure` is not None.
If `closure` is None, None will be returned
"""
loss = None
if closure is not None:
loss = closure()
self.num_step_updated += 1
for step_interval, optimizer in zip(self.step_intervals,
self.optimizers):
if self.num_step_updated % step_interval == 0:
optimizer.step()
return loss
def add_param_group(self, param_group):
raise NotImplementedError
|
from mmcv.runner.optimizer import OPTIMIZERS
from torch.optim import Optimizer
@OPTIMIZERS.register_module()
class HybridOptimizer(Optimizer):
"""Hybrid Optimizer that contains multiple optimizers This optimizer
applies the hybrid optimzation for multi-modality models."""
def __init__(self, optimizers, step_intervals=None):
self.optimizers = optimizers
self.param_groups = []
for optimizer in self.optimizers:
self.param_groups += optimizer.param_groups
if not isinstance(step_intervals, list):
step_intervals = [1] * len(self.optimizers)
self.step_intervals = step_intervals
self.num_step_updated = 0
def __getstate__(self):
return {
'num_step_updated':
self.num_step_updated,
'defaults': [optimizer.defaults for optimizer in self.optimizers],
'state': [optimizer.state for optimizer in self.optimizers],
'param_groups':
[optimizer.param_groups for optimizer in self.optimizers],
}
def __setstate__(self, state):
self.__dict__.update(state)
def __repr__(self):
format_string = self.__class__.__name__ + ' (\n'
for optimizer, interval in zip(self.optimizers, self.step_intervals):
format_string += 'Update interval: {}\n'.format(interval)
format_string += optimizer.__repr__().replace('\n', '\n ') + ',\n'
format_string += ')'
return format_string
def state_dict(self):
state_dicts = [optimizer.state_dict() for optimizer in self.optimizers]
return {
'num_step_updated':
self.num_step_updated,
'state': [state_dict['state'] for state_dict in state_dicts],
'param_groups':
[state_dict['param_groups'] for state_dict in state_dicts],
}
def load_state_dict(self, state_dict):
r"""Loads the optimizer state.
Arguments:
state_dict (dict): optimizer state. Should be an object returned
from a call to :meth:`state_dict`.
"""
assert len(state_dict['state']) == len(self.optimizers)
assert len(state_dict['param_groups']) == len(self.optimizers)
for i, (single_state, single_param_groups) in enumerate(
zip(state_dict['state'], state_dict['param_groups'])):
single_state_dict = dict(
state=single_state, param_groups=single_param_groups)
self.optimizers[i].load_state_dict(single_state_dict)
self.param_groups = []
for optimizer in self.optimizers:
self.param_groups += optimizer.param_groups
self.num_step_updated = state_dict['num_step_updated']
def zero_grad(self):
r"""Clears the gradients of all optimized :class:`torch.Tensor` s."""
for optimizer in self.optimizers:
optimizer.zero_grad()
def step(self, closure=None):
r"""Performs a single optimization step (parameter update).
Arguments:
closure (callable): A closure that reevaluates the model and
returns the loss. Optional for most optimizers.
Returns:
Tensor or None: calculated loss if `closure` is not None.
If `closure` is None, None will be returned
"""
loss = None
if closure is not None:
loss = closure()
self.num_step_updated += 1
for step_interval, optimizer in zip(self.step_intervals,
self.optimizers):
if self.num_step_updated % step_interval == 0:
optimizer.step()
return loss
def add_param_group(self, param_group):
raise NotImplementedError
|
en
| 0.759741
|
Hybrid Optimizer that contains multiple optimizers This optimizer applies the hybrid optimzation for multi-modality models. Loads the optimizer state. Arguments: state_dict (dict): optimizer state. Should be an object returned from a call to :meth:`state_dict`. Clears the gradients of all optimized :class:`torch.Tensor` s. Performs a single optimization step (parameter update). Arguments: closure (callable): A closure that reevaluates the model and returns the loss. Optional for most optimizers. Returns: Tensor or None: calculated loss if `closure` is not None. If `closure` is None, None will be returned
| 2.403013
| 2
|
test/test_data_on_home_page.py
|
JuliaBessonova/python_training
| 0
|
6626944
|
import re
from model.contact import Contact
def test_data_on_home_page(app, db):
uicontacts = sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
dbcontacts = sorted(db.get_contact_list(), key=Contact.id_or_max)
for contact in range(len(uicontacts)):
assert uicontacts[contact].lastname == dbcontacts[contact].lastname
assert uicontacts[contact].firstname == dbcontacts[contact].firstname
assert uicontacts[contact].address == dbcontacts[contact].address
assert uicontacts[contact].all_emails_from_home_page == merge_emails_like_on_home_page(dbcontacts[contact])
assert uicontacts[contact].all_phones_from_home_page == merge_phones_like_on_home_page(dbcontacts[contact])
def merge_emails_like_on_home_page(contact):
return "\n".join(filter(lambda x: x!="",
filter(lambda x: x is not None, [contact.email, contact.email2, contact.email3])))
def clear(s):
return re.sub("[() -]", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.homephone, contact.mobilephone, contact.workphone, contact.secondaryphone]))))
|
import re
from model.contact import Contact
def test_data_on_home_page(app, db):
uicontacts = sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
dbcontacts = sorted(db.get_contact_list(), key=Contact.id_or_max)
for contact in range(len(uicontacts)):
assert uicontacts[contact].lastname == dbcontacts[contact].lastname
assert uicontacts[contact].firstname == dbcontacts[contact].firstname
assert uicontacts[contact].address == dbcontacts[contact].address
assert uicontacts[contact].all_emails_from_home_page == merge_emails_like_on_home_page(dbcontacts[contact])
assert uicontacts[contact].all_phones_from_home_page == merge_phones_like_on_home_page(dbcontacts[contact])
def merge_emails_like_on_home_page(contact):
return "\n".join(filter(lambda x: x!="",
filter(lambda x: x is not None, [contact.email, contact.email2, contact.email3])))
def clear(s):
return re.sub("[() -]", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.homephone, contact.mobilephone, contact.workphone, contact.secondaryphone]))))
|
none
| 1
| 2.651837
| 3
|
|
examples/plot_summary.py
|
Jiaming1999/ChainConsumer
| 55
|
6626945
|
# -*- coding: utf-8 -*-
"""
============
Plot Summary
============
Have a bunch of models and want to compare summaries, but in a plot instead of LaTeX? Can do!
"""
###############################################################################
# Lets add a bunch of chains represnting all these different models of ours.
import numpy as np
from chainconsumer import ChainConsumer
def get_instance():
np.random.seed(0)
c = ChainConsumer()
parameters = ["$x$", r"$\Omega_\epsilon$", "$r^2(x_0)$"]
for name in ["Ref. model", "Test A", "Test B", "Test C"]:
# Add some random data
mean = np.random.normal(loc=0, scale=3, size=3)
sigma = np.random.uniform(low=1, high=3, size=3)
data = np.random.multivariate_normal(mean=mean, cov=np.diag(sigma**2), size=100000)
c.add_chain(data, parameters=parameters, name=name)
return c
###############################################################################
# If we want the full shape of the distributions, well, thats the default
# behaviour!
c = get_instance()
c.configure(bar_shade=True)
c.plotter.plot_summary()
###############################################################################
# But lets make some changes. Say we don't like the colourful text. And we
# want errorbars, not distributions. And some fun truth values.
c = get_instance()
c.configure(legend_color_text=False)
c.configure_truth(ls=":", color="#FB8C00")
c.plotter.plot_summary(errorbar=True, truth=[[0], [-1, 1], [-2, 0, 2]])
###############################################################################
# Even better, lets use our reference model as the truth value and not plot
# it with the others
c = get_instance()
c.configure(legend_color_text=False)
c.configure_truth(ls="-", color="#555555")
c.plotter.plot_summary(errorbar=True, truth="Ref. model", include_truth_chain=False, extra_parameter_spacing=1.5)
|
# -*- coding: utf-8 -*-
"""
============
Plot Summary
============
Have a bunch of models and want to compare summaries, but in a plot instead of LaTeX? Can do!
"""
###############################################################################
# Lets add a bunch of chains represnting all these different models of ours.
import numpy as np
from chainconsumer import ChainConsumer
def get_instance():
np.random.seed(0)
c = ChainConsumer()
parameters = ["$x$", r"$\Omega_\epsilon$", "$r^2(x_0)$"]
for name in ["Ref. model", "Test A", "Test B", "Test C"]:
# Add some random data
mean = np.random.normal(loc=0, scale=3, size=3)
sigma = np.random.uniform(low=1, high=3, size=3)
data = np.random.multivariate_normal(mean=mean, cov=np.diag(sigma**2), size=100000)
c.add_chain(data, parameters=parameters, name=name)
return c
###############################################################################
# If we want the full shape of the distributions, well, thats the default
# behaviour!
c = get_instance()
c.configure(bar_shade=True)
c.plotter.plot_summary()
###############################################################################
# But lets make some changes. Say we don't like the colourful text. And we
# want errorbars, not distributions. And some fun truth values.
c = get_instance()
c.configure(legend_color_text=False)
c.configure_truth(ls=":", color="#FB8C00")
c.plotter.plot_summary(errorbar=True, truth=[[0], [-1, 1], [-2, 0, 2]])
###############################################################################
# Even better, lets use our reference model as the truth value and not plot
# it with the others
c = get_instance()
c.configure(legend_color_text=False)
c.configure_truth(ls="-", color="#555555")
c.plotter.plot_summary(errorbar=True, truth="Ref. model", include_truth_chain=False, extra_parameter_spacing=1.5)
|
en
| 0.334376
|
# -*- coding: utf-8 -*- ============ Plot Summary ============ Have a bunch of models and want to compare summaries, but in a plot instead of LaTeX? Can do! ############################################################################### # Lets add a bunch of chains represnting all these different models of ours. # Add some random data ############################################################################### # If we want the full shape of the distributions, well, thats the default # behaviour! ############################################################################### # But lets make some changes. Say we don't like the colourful text. And we # want errorbars, not distributions. And some fun truth values. ############################################################################### # Even better, lets use our reference model as the truth value and not plot # it with the others
| 2.511575
| 3
|
freyja/updates.py
|
tomkinsc/Freyja
| 19
|
6626946
|
import urllib.request
import os
import sys
import subprocess
def download_tree(locDir):
url = "http://hgdownload.soe.ucsc.edu/goldenPath/wuhCor1/"\
"UShER_SARS-CoV-2/public-latest.all.masked.pb.gz"
treePath = os.path.join(locDir, "public-latest.all.masked.pb.gz")
urllib.request.urlretrieve(url, treePath)
return treePath
def convert_tree(locDir):
print(locDir)
treePath = os.path.join(locDir, "public-latest.all.masked.pb.gz")
varCmd = f"matUtils extract -i {treePath} -C lineagePaths.txt"
sys.stdout.flush() # force python to flush
completed = subprocess.run(varCmd, shell=True, executable="/bin/bash",
stdout=subprocess.DEVNULL)
return completed
def get_curated_lineage_data(locDir):
url2 = "https://raw.githubusercontent.com/outbreak-info/outbreak.info/"\
"master/web/src/assets/genomics/curated_lineages.json"
urllib.request.urlretrieve(url2,
os.path.join(locDir,
"curated_lineages.json"))
if __name__ == '__main__':
download_tree()
get_curated_lineage_data()
convert_tree()
|
import urllib.request
import os
import sys
import subprocess
def download_tree(locDir):
url = "http://hgdownload.soe.ucsc.edu/goldenPath/wuhCor1/"\
"UShER_SARS-CoV-2/public-latest.all.masked.pb.gz"
treePath = os.path.join(locDir, "public-latest.all.masked.pb.gz")
urllib.request.urlretrieve(url, treePath)
return treePath
def convert_tree(locDir):
print(locDir)
treePath = os.path.join(locDir, "public-latest.all.masked.pb.gz")
varCmd = f"matUtils extract -i {treePath} -C lineagePaths.txt"
sys.stdout.flush() # force python to flush
completed = subprocess.run(varCmd, shell=True, executable="/bin/bash",
stdout=subprocess.DEVNULL)
return completed
def get_curated_lineage_data(locDir):
url2 = "https://raw.githubusercontent.com/outbreak-info/outbreak.info/"\
"master/web/src/assets/genomics/curated_lineages.json"
urllib.request.urlretrieve(url2,
os.path.join(locDir,
"curated_lineages.json"))
if __name__ == '__main__':
download_tree()
get_curated_lineage_data()
convert_tree()
|
en
| 0.61542
|
# force python to flush
| 2.695351
| 3
|
config.py
|
LordMartian/pytorch-template
| 0
|
6626947
|
<gh_stars>0
"""
Author: <NAME>
Description: File for setting values for the configuration options.
- Project structure paths
- Hyperparameters
"""
args = dict()
# project structure
args["CODE_DIRECTORY"] = "absolute path to code directory"
args["DATA_DIRECTORY"] = "absolute path to dataset directory"
args["DEMO_DIRECTORY"] = "absolute path to directory containing demo samples"
args["PRETRAINED_WEIGHTS_FILE"] = "/saved/weights/pretrained_weights.pt"
args["TRAINED_WEIGHTS_FILE"] = "/saved/weights/trained_weights.pt"
# data
args["VALIDATION_SPLIT"] = 0.05
args["NUM_WORKERS"] = 4
# training
args["SEED"] = 10
args["BATCH_SIZE"] = 4
args["NUM_EPOCHS"] = 75
args["SAVE_FREQUENCY"] = 5
# optimizer and scheduler
args["LEARNING_RATE"] = 0.001
args["MOMENTUM1"] = 0.9
args["MOMENTUM2"] = 0.999
args["LR_DECAY"] = 0.95
# loss
args["LAMBDA"] = 0.03
if __name__ == "__main__":
for key, value in args.items():
print(str(key) + " : " + str(value))
|
"""
Author: <NAME>
Description: File for setting values for the configuration options.
- Project structure paths
- Hyperparameters
"""
args = dict()
# project structure
args["CODE_DIRECTORY"] = "absolute path to code directory"
args["DATA_DIRECTORY"] = "absolute path to dataset directory"
args["DEMO_DIRECTORY"] = "absolute path to directory containing demo samples"
args["PRETRAINED_WEIGHTS_FILE"] = "/saved/weights/pretrained_weights.pt"
args["TRAINED_WEIGHTS_FILE"] = "/saved/weights/trained_weights.pt"
# data
args["VALIDATION_SPLIT"] = 0.05
args["NUM_WORKERS"] = 4
# training
args["SEED"] = 10
args["BATCH_SIZE"] = 4
args["NUM_EPOCHS"] = 75
args["SAVE_FREQUENCY"] = 5
# optimizer and scheduler
args["LEARNING_RATE"] = 0.001
args["MOMENTUM1"] = 0.9
args["MOMENTUM2"] = 0.999
args["LR_DECAY"] = 0.95
# loss
args["LAMBDA"] = 0.03
if __name__ == "__main__":
for key, value in args.items():
print(str(key) + " : " + str(value))
|
en
| 0.61938
|
Author: <NAME> Description: File for setting values for the configuration options. - Project structure paths - Hyperparameters # project structure # data # training # optimizer and scheduler # loss
| 2.113589
| 2
|
pyqtgraph/examples/test_Arrow.py
|
robertsj/poropy
| 1
|
6626948
|
# -*- coding: utf-8 -*-
## Add path to library (just for examples; you do not need this)
import sys, os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
import numpy as np
from PyQt4 import QtGui, QtCore
import pyqtgraph as pg
app = QtGui.QApplication([])
mw = QtGui.QMainWindow()
p = pg.PlotWidget()
mw.setCentralWidget(p)
c = p.plot(x=np.sin(np.linspace(0, 2*np.pi, 100)), y=np.cos(np.linspace(0, 2*np.pi, 100)))
a = pg.CurveArrow(c)
p.addItem(a)
mw.show()
anim = a.makeAnimation(loop=-1)
anim.start()
## Start Qt event loop unless running in interactive mode.
if sys.flags.interactive != 1:
app.exec_()
|
# -*- coding: utf-8 -*-
## Add path to library (just for examples; you do not need this)
import sys, os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
import numpy as np
from PyQt4 import QtGui, QtCore
import pyqtgraph as pg
app = QtGui.QApplication([])
mw = QtGui.QMainWindow()
p = pg.PlotWidget()
mw.setCentralWidget(p)
c = p.plot(x=np.sin(np.linspace(0, 2*np.pi, 100)), y=np.cos(np.linspace(0, 2*np.pi, 100)))
a = pg.CurveArrow(c)
p.addItem(a)
mw.show()
anim = a.makeAnimation(loop=-1)
anim.start()
## Start Qt event loop unless running in interactive mode.
if sys.flags.interactive != 1:
app.exec_()
|
en
| 0.680104
|
# -*- coding: utf-8 -*- ## Add path to library (just for examples; you do not need this) ## Start Qt event loop unless running in interactive mode.
| 2.386478
| 2
|
devp2p/tests/test_peer.py
|
vaporyproject/pydevp2p
| 0
|
6626949
|
<filename>devp2p/tests/test_peer.py<gh_stars>0
from devp2p import peermanager
from devp2p import crypto
from devp2p.app import BaseApp
import devp2p.muxsession
import rlp
import devp2p.p2p_protocol
import time
import gevent
import copy
def get_connected_apps():
a_config = dict(p2p=dict(listen_host='127.0.0.1', listen_port=3000),
node=dict(privkey_hex=crypto.sha3('a').encode('hex')))
b_config = copy.deepcopy(a_config)
b_config['p2p']['listen_port'] = 3001
b_config['node']['privkey_hex'] = crypto.sha3('b').encode('hex')
a_app = BaseApp(a_config)
peermanager.PeerManager.register_with_app(a_app)
a_app.start()
b_app = BaseApp(b_config)
peermanager.PeerManager.register_with_app(b_app)
b_app.start()
a_peermgr = a_app.services.peermanager
b_peermgr = b_app.services.peermanager
# connect
host = b_config['p2p']['listen_host']
port = b_config['p2p']['listen_port']
pubkey = crypto.privtopub(b_config['node']['privkey_hex'].decode('hex'))
a_peermgr.connect((host, port), remote_pubkey=pubkey)
return a_app, b_app
def test_handshake():
a_app, b_app = get_connected_apps()
gevent.sleep(1)
assert a_app.services.peermanager.peers
assert b_app.services.peermanager.peers
a_app.stop()
b_app.stop()
def test_big_transfer():
class transfer(devp2p.p2p_protocol.BaseProtocol.command):
cmd_id = 4
structure = [('raw_data', rlp.sedes.binary)]
def create(self, proto, raw_data=''):
return [raw_data]
# money patches
devp2p.p2p_protocol.P2PProtocol.transfer = transfer
devp2p.muxsession.MultiplexedSession.max_window_size = 8 * 1024
a_app, b_app = get_connected_apps()
gevent.sleep(.1)
a_protocol = a_app.services.peermanager.peers[0].protocols[devp2p.p2p_protocol.P2PProtocol]
b_protocol = b_app.services.peermanager.peers[0].protocols[devp2p.p2p_protocol.P2PProtocol]
st = time.time()
def cb(proto, **data):
print 'took', time.time() - st, len(data['raw_data'])
b_protocol.receive_transfer_callbacks.append(cb)
raw_data = '0' * 1 * 1000 * 100
a_protocol.send_transfer(raw_data=raw_data)
# 0.03 secs for 0.1mb
# 0.28 secs for 1mb
# 2.7 secs for 10mb
# 3.7 MB/s == 30Mbit
gevent.sleep(1)
a_app.stop()
b_app.stop()
gevent.sleep(0.1)
def connect_go():
a_config = dict(p2p=dict(listen_host='127.0.0.1', listen_port=3000),
node=dict(privkey_hex=crypto.sha3('a').encode('hex')))
a_app = BaseApp(a_config)
peermanager.PeerManager.register_with_app(a_app)
a_app.start()
a_peermgr = a_app.services.peermanager
# connect
pubkey = "6ed2fecb28ff17dec8647f08aa4368b57790000e0e9b33a7b91f32c41b6ca9ba21600e9a8c44248ce63a71544388c6745fa291f88f8b81e109ba3da11f7b41b9".decode(
'hex')
a_peermgr.connect(('127.0.0.1', 30303), remote_pubkey=pubkey)
gevent.sleep(50)
a_app.stop()
if __name__ == '__main__':
# ethereum -loglevel 5 --bootnodes ''
import ethereum.slogging
ethereum.slogging.configure(config_string=':debug')
# connect_go()
test_big_transfer()
|
<filename>devp2p/tests/test_peer.py<gh_stars>0
from devp2p import peermanager
from devp2p import crypto
from devp2p.app import BaseApp
import devp2p.muxsession
import rlp
import devp2p.p2p_protocol
import time
import gevent
import copy
def get_connected_apps():
a_config = dict(p2p=dict(listen_host='127.0.0.1', listen_port=3000),
node=dict(privkey_hex=crypto.sha3('a').encode('hex')))
b_config = copy.deepcopy(a_config)
b_config['p2p']['listen_port'] = 3001
b_config['node']['privkey_hex'] = crypto.sha3('b').encode('hex')
a_app = BaseApp(a_config)
peermanager.PeerManager.register_with_app(a_app)
a_app.start()
b_app = BaseApp(b_config)
peermanager.PeerManager.register_with_app(b_app)
b_app.start()
a_peermgr = a_app.services.peermanager
b_peermgr = b_app.services.peermanager
# connect
host = b_config['p2p']['listen_host']
port = b_config['p2p']['listen_port']
pubkey = crypto.privtopub(b_config['node']['privkey_hex'].decode('hex'))
a_peermgr.connect((host, port), remote_pubkey=pubkey)
return a_app, b_app
def test_handshake():
a_app, b_app = get_connected_apps()
gevent.sleep(1)
assert a_app.services.peermanager.peers
assert b_app.services.peermanager.peers
a_app.stop()
b_app.stop()
def test_big_transfer():
class transfer(devp2p.p2p_protocol.BaseProtocol.command):
cmd_id = 4
structure = [('raw_data', rlp.sedes.binary)]
def create(self, proto, raw_data=''):
return [raw_data]
# money patches
devp2p.p2p_protocol.P2PProtocol.transfer = transfer
devp2p.muxsession.MultiplexedSession.max_window_size = 8 * 1024
a_app, b_app = get_connected_apps()
gevent.sleep(.1)
a_protocol = a_app.services.peermanager.peers[0].protocols[devp2p.p2p_protocol.P2PProtocol]
b_protocol = b_app.services.peermanager.peers[0].protocols[devp2p.p2p_protocol.P2PProtocol]
st = time.time()
def cb(proto, **data):
print 'took', time.time() - st, len(data['raw_data'])
b_protocol.receive_transfer_callbacks.append(cb)
raw_data = '0' * 1 * 1000 * 100
a_protocol.send_transfer(raw_data=raw_data)
# 0.03 secs for 0.1mb
# 0.28 secs for 1mb
# 2.7 secs for 10mb
# 3.7 MB/s == 30Mbit
gevent.sleep(1)
a_app.stop()
b_app.stop()
gevent.sleep(0.1)
def connect_go():
a_config = dict(p2p=dict(listen_host='127.0.0.1', listen_port=3000),
node=dict(privkey_hex=crypto.sha3('a').encode('hex')))
a_app = BaseApp(a_config)
peermanager.PeerManager.register_with_app(a_app)
a_app.start()
a_peermgr = a_app.services.peermanager
# connect
pubkey = "6ed2fecb28ff17dec8647f08aa4368b57790000e0e9b33a7b91f32c41b6ca9ba21600e9a8c44248ce63a71544388c6745fa291f88f8b81e109ba3da11f7b41b9".decode(
'hex')
a_peermgr.connect(('127.0.0.1', 30303), remote_pubkey=pubkey)
gevent.sleep(50)
a_app.stop()
if __name__ == '__main__':
# ethereum -loglevel 5 --bootnodes ''
import ethereum.slogging
ethereum.slogging.configure(config_string=':debug')
# connect_go()
test_big_transfer()
|
en
| 0.521297
|
# connect # money patches # 0.03 secs for 0.1mb # 0.28 secs for 1mb # 2.7 secs for 10mb # 3.7 MB/s == 30Mbit # connect # ethereum -loglevel 5 --bootnodes '' # connect_go()
| 2.261127
| 2
|
algos/tf2algos/ioc.py
|
H0wardx/RLs
| 1
|
6626950
|
import rls
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from algos.tf2algos.base.off_policy import make_off_policy_class
from utils.tf2_utils import gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy
class IOC(make_off_policy_class(mode='share')):
'''
Learning Options with Interest Functions, https://www.aaai.org/ojs/index.php/AAAI/article/view/5114/4987
Options of Interest: Temporal Abstraction with Interest Functions, http://arxiv.org/abs/2001.00271
'''
def __init__(self,
s_dim,
visual_sources,
visual_resolution,
a_dim,
is_continuous,
q_lr=5.0e-3,
intra_option_lr=5.0e-4,
termination_lr=5.0e-4,
interest_lr=5.0e-4,
boltzmann_temperature=1.0,
options_num=4,
ent_coff=0.01,
double_q=False,
use_baseline=True,
terminal_mask=True,
termination_regularizer=0.01,
assign_interval=1000,
hidden_units={
'q': [32, 32],
'intra_option': [32, 32],
'termination': [32, 32],
'interest': [32, 32]
},
**kwargs):
super().__init__(
s_dim=s_dim,
visual_sources=visual_sources,
visual_resolution=visual_resolution,
a_dim=a_dim,
is_continuous=is_continuous,
**kwargs)
self.assign_interval = assign_interval
self.options_num = options_num
self.termination_regularizer = termination_regularizer
self.ent_coff = ent_coff
self.use_baseline = use_baseline
self.terminal_mask = terminal_mask
self.double_q = double_q
self.boltzmann_temperature = boltzmann_temperature
def _q_net(): return rls.critic_q_all(self.feat_dim, self.options_num, hidden_units['q'])
self.q_net = _q_net()
self.q_target_net = _q_net()
self.intra_option_net = rls.oc_intra_option(self.feat_dim, self.a_dim, self.options_num, hidden_units['intra_option'])
self.termination_net = rls.critic_q_all(self.feat_dim, self.options_num, hidden_units['termination'], 'sigmoid')
self.interest_net = rls.critic_q_all(self.feat_dim, self.options_num, hidden_units['interest'], 'sigmoid')
self.critic_tv = self.q_net.trainable_variables + self.other_tv
self.actor_tv = self.intra_option_net.trainable_variables
if self.is_continuous:
self.log_std = tf.Variable(initial_value=-0.5 * np.ones((self.options_num, self.a_dim), dtype=np.float32), trainable=True) # [P, A]
self.actor_tv += [self.log_std]
self.update_target_net_weights(self.q_target_net.weights, self.q_net.weights)
self.q_lr, self.intra_option_lr, self.termination_lr, self.interest_lr = map(self.init_lr, [q_lr, intra_option_lr, termination_lr, interest_lr])
self.q_optimizer = self.init_optimizer(self.q_lr, clipvalue=5.)
self.intra_option_optimizer = self.init_optimizer(self.intra_option_lr, clipvalue=5.)
self.termination_optimizer = self.init_optimizer(self.termination_lr, clipvalue=5.)
self.interest_optimizer = self.init_optimizer(self.interest_lr, clipvalue=5.)
self.model_recorder(dict(
q_net=self.q_net,
intra_option_net=self.intra_option_net,
termination_net=self.termination_net,
interest_net=self.interest_net,
q_optimizer=self.q_optimizer,
intra_option_optimizer=self.intra_option_optimizer,
termination_optimizer=self.termination_optimizer,
interest_optimizer=self.interest_optimizer
))
def show_logo(self):
self.recorder.logger.info('''
xxxx xxxxxx xxxxxxx
xx xxx xxxx xxxx xxx
xx xxx xxx xxxx x
xx xx xxx xxx x
xx xx xxx xxx
xx xx xxx xxx
xx xx xxx xxx
xx xxx xxx xxx x
xxxx xxxxxxxx xxxxxxxx
xxxxx xxxxx
''')
def _generate_random_options(self):
return tf.constant(np.random.randint(0, self.options_num, self.n_agents), dtype=tf.int32)
def choose_action(self, s, visual_s, evaluation=False):
if not hasattr(self, 'options'):
self.options = self._generate_random_options()
self.last_options = self.options
a, self.options, self.cell_state = self._get_action(s, visual_s, self.cell_state, self.options)
a = a.numpy()
return a
@tf.function
def _get_action(self, s, visual_s, cell_state, options):
with tf.device(self.device):
feat, cell_state = self.get_feature(s, visual_s, cell_state=cell_state, record_cs=True)
q = self.q_net(feat) # [B, P]
pi = self.intra_option_net(feat) # [B, P, A]
options_onehot = tf.one_hot(options, self.options_num, dtype=tf.float32) # [B, P]
options_onehot_expanded = tf.expand_dims(options_onehot, axis=-1) # [B, P, 1]
pi = tf.reduce_sum(pi * options_onehot_expanded, axis=1) # [B, A]
if self.is_continuous:
log_std = tf.gather(self.log_std, options)
mu = tf.math.tanh(pi)
a, _ = gaussian_clip_rsample(mu, log_std)
else:
pi = pi / self.boltzmann_temperature
dist = tfp.distributions.Categorical(logits=pi) # [B, ]
a = dist.sample()
interests = self.interest_net(feat) # [B, P]
op_logits = interests * q # [B, P] or tf.nn.softmax(q)
new_options = tfp.distributions.Categorical(logits=op_logits).sample()
return a, new_options, cell_state
def learn(self, **kwargs):
self.train_step = kwargs.get('train_step')
def _update():
if self.global_step % self.assign_interval == 0:
self.update_target_net_weights(self.q_target_net.weights, self.q_net.weights)
for i in range(kwargs['step']):
self._learn(function_dict={
'train_function': self.train,
'update_function': _update,
'sample_data_list': ['s', 'visual_s', 'a', 'r', 's_', 'visual_s_', 'done', 'last_options', 'options'],
'train_data_list': ['ss', 'vvss', 'a', 'r', 'done', 'last_options', 'options'],
'summary_dict': dict([
['LEARNING_RATE/q_lr', self.q_lr(self.train_step)],
['LEARNING_RATE/intra_option_lr', self.intra_option_lr(self.train_step)],
['LEARNING_RATE/termination_lr', self.termination_lr(self.train_step)],
['Statistics/option', self.options[0]]
])
})
@tf.function(experimental_relax_shapes=True)
def train(self, memories, isw, crsty_loss, cell_state):
ss, vvss, a, r, done, last_options, options = memories
last_options = tf.cast(last_options, tf.int32)
options = tf.cast(options, tf.int32)
with tf.device(self.device):
with tf.GradientTape(persistent=True) as tape:
feat, feat_ = self.get_feature(ss, vvss, cell_state=cell_state, s_and_s_=True)
q = self.q_net(feat) # [B, P]
pi = self.intra_option_net(feat) # [B, P, A]
beta = self.termination_net(feat) # [B, P]
q_next = self.q_target_net(feat_) # [B, P], [B, P, A], [B, P]
beta_next = self.termination_net(feat_) # [B, P]
interests = self.interest_net(feat) # [B, P]
options_onehot = tf.one_hot(options, self.options_num, dtype=tf.float32) # [B,] => [B, P]
q_s = qu_eval = tf.reduce_sum(q * options_onehot, axis=-1, keepdims=True) # [B, 1]
beta_s_ = tf.reduce_sum(beta_next * options_onehot, axis=-1, keepdims=True) # [B, 1]
q_s_ = tf.reduce_sum(q_next * options_onehot, axis=-1, keepdims=True) # [B, 1]
if self.double_q:
q_ = self.q_net(feat) # [B, P], [B, P, A], [B, P]
max_a_idx = tf.one_hot(tf.argmax(q_, axis=-1), self.options_num, dtype=tf.float32) # [B, P] => [B, ] => [B, P]
q_s_max = tf.reduce_sum(q_next * max_a_idx, axis=-1, keepdims=True) # [B, 1]
else:
q_s_max = tf.reduce_max(q_next, axis=-1, keepdims=True) # [B, 1]
u_target = (1 - beta_s_) * q_s_ + beta_s_ * q_s_max # [B, 1]
qu_target = tf.stop_gradient(r + self.gamma * (1 - done) * u_target)
td_error = qu_target - qu_eval # gradient : q
q_loss = tf.reduce_mean(tf.square(td_error) * isw) + crsty_loss # [B, 1] => 1
if self.use_baseline:
adv = tf.stop_gradient(qu_target - qu_eval)
else:
adv = tf.stop_gradient(qu_target)
options_onehot_expanded = tf.expand_dims(options_onehot, axis=-1) # [B, P] => [B, P, 1]
pi = tf.reduce_sum(pi * options_onehot_expanded, axis=1) # [B, P, A] => [B, A]
if self.is_continuous:
log_std = tf.gather(self.log_std, options)
mu = tf.math.tanh(pi)
log_p = gaussian_likelihood_sum(a, mu, log_std)
entropy = gaussian_entropy(log_std)
else:
pi = pi / self.boltzmann_temperature
log_pi = tf.nn.log_softmax(pi, axis=-1) # [B, A]
entropy = -tf.reduce_sum(tf.exp(log_pi) * log_pi, axis=1, keepdims=True) # [B, 1]
log_p = tf.reduce_sum(a * log_pi, axis=-1, keepdims=True) # [B, 1]
pi_loss = tf.reduce_mean(-(log_p * adv + self.ent_coff * entropy)) # [B, 1] * [B, 1] => [B, 1] => 1
last_options_onehot = tf.one_hot(last_options, self.options_num, dtype=tf.float32) # [B,] => [B, P]
beta_s = tf.reduce_sum(beta * last_options_onehot, axis=-1, keepdims=True) # [B, 1]
pi_op = tf.nn.softmax(interests * tf.stop_gradient(q)) # [B, P] or tf.nn.softmax(q)
interest_loss = -tf.reduce_mean(beta_s * tf.reduce_sum(pi_op * options_onehot, axis=-1, keepdims=True) * q_s) # [B, 1] => 1
v_s = tf.reduce_sum(q * pi_op, axis=-1, keepdims=True) # [B, P] * [B, P] => [B, 1]
beta_loss = beta_s * tf.stop_gradient(q_s - v_s) # [B, 1]
if self.terminal_mask:
beta_loss *= (1 - done)
beta_loss = tf.reduce_mean(beta_loss) # [B, 1] => 1
q_grads = tape.gradient(q_loss, self.critic_tv)
intra_option_grads = tape.gradient(pi_loss, self.actor_tv)
termination_grads = tape.gradient(beta_loss, self.termination_net.trainable_variables)
interest_grads = tape.gradient(interest_loss, self.interest_net.trainable_variables)
self.q_optimizer.apply_gradients(
zip(q_grads, self.critic_tv)
)
self.intra_option_optimizer.apply_gradients(
zip(intra_option_grads, self.actor_tv)
)
self.termination_optimizer.apply_gradients(
zip(termination_grads, self.termination_net.trainable_variables)
)
self.interest_optimizer.apply_gradients(
zip(interest_grads, self.interest_net.trainable_variables)
)
self.global_step.assign_add(1)
return td_error, dict([
['LOSS/q_loss', tf.reduce_mean(q_loss)],
['LOSS/pi_loss', tf.reduce_mean(pi_loss)],
['LOSS/beta_loss', tf.reduce_mean(beta_loss)],
['LOSS/interest_loss', tf.reduce_mean(interest_loss)],
['Statistics/q_option_max', tf.reduce_max(q_s)],
['Statistics/q_option_min', tf.reduce_min(q_s)],
['Statistics/q_option_mean', tf.reduce_mean(q_s)]
])
def store_data(self, s, visual_s, a, r, s_, visual_s_, done):
"""
for off-policy training, use this function to store <s, a, r, s_, done> into ReplayBuffer.
"""
assert isinstance(a, np.ndarray), "store need action type is np.ndarray"
assert isinstance(r, np.ndarray), "store need reward type is np.ndarray"
assert isinstance(done, np.ndarray), "store need done type is np.ndarray"
self._running_average(s)
self.data.add(
s,
visual_s,
a,
r[:, np.newaxis], # 升维
s_,
visual_s_,
done[:, np.newaxis], # 升维
self.last_options,
self.options
)
def no_op_store(self, s, visual_s, a, r, s_, visual_s_, done):
pass
|
import rls
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from algos.tf2algos.base.off_policy import make_off_policy_class
from utils.tf2_utils import gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy
class IOC(make_off_policy_class(mode='share')):
'''
Learning Options with Interest Functions, https://www.aaai.org/ojs/index.php/AAAI/article/view/5114/4987
Options of Interest: Temporal Abstraction with Interest Functions, http://arxiv.org/abs/2001.00271
'''
def __init__(self,
s_dim,
visual_sources,
visual_resolution,
a_dim,
is_continuous,
q_lr=5.0e-3,
intra_option_lr=5.0e-4,
termination_lr=5.0e-4,
interest_lr=5.0e-4,
boltzmann_temperature=1.0,
options_num=4,
ent_coff=0.01,
double_q=False,
use_baseline=True,
terminal_mask=True,
termination_regularizer=0.01,
assign_interval=1000,
hidden_units={
'q': [32, 32],
'intra_option': [32, 32],
'termination': [32, 32],
'interest': [32, 32]
},
**kwargs):
super().__init__(
s_dim=s_dim,
visual_sources=visual_sources,
visual_resolution=visual_resolution,
a_dim=a_dim,
is_continuous=is_continuous,
**kwargs)
self.assign_interval = assign_interval
self.options_num = options_num
self.termination_regularizer = termination_regularizer
self.ent_coff = ent_coff
self.use_baseline = use_baseline
self.terminal_mask = terminal_mask
self.double_q = double_q
self.boltzmann_temperature = boltzmann_temperature
def _q_net(): return rls.critic_q_all(self.feat_dim, self.options_num, hidden_units['q'])
self.q_net = _q_net()
self.q_target_net = _q_net()
self.intra_option_net = rls.oc_intra_option(self.feat_dim, self.a_dim, self.options_num, hidden_units['intra_option'])
self.termination_net = rls.critic_q_all(self.feat_dim, self.options_num, hidden_units['termination'], 'sigmoid')
self.interest_net = rls.critic_q_all(self.feat_dim, self.options_num, hidden_units['interest'], 'sigmoid')
self.critic_tv = self.q_net.trainable_variables + self.other_tv
self.actor_tv = self.intra_option_net.trainable_variables
if self.is_continuous:
self.log_std = tf.Variable(initial_value=-0.5 * np.ones((self.options_num, self.a_dim), dtype=np.float32), trainable=True) # [P, A]
self.actor_tv += [self.log_std]
self.update_target_net_weights(self.q_target_net.weights, self.q_net.weights)
self.q_lr, self.intra_option_lr, self.termination_lr, self.interest_lr = map(self.init_lr, [q_lr, intra_option_lr, termination_lr, interest_lr])
self.q_optimizer = self.init_optimizer(self.q_lr, clipvalue=5.)
self.intra_option_optimizer = self.init_optimizer(self.intra_option_lr, clipvalue=5.)
self.termination_optimizer = self.init_optimizer(self.termination_lr, clipvalue=5.)
self.interest_optimizer = self.init_optimizer(self.interest_lr, clipvalue=5.)
self.model_recorder(dict(
q_net=self.q_net,
intra_option_net=self.intra_option_net,
termination_net=self.termination_net,
interest_net=self.interest_net,
q_optimizer=self.q_optimizer,
intra_option_optimizer=self.intra_option_optimizer,
termination_optimizer=self.termination_optimizer,
interest_optimizer=self.interest_optimizer
))
def show_logo(self):
self.recorder.logger.info('''
xxxx xxxxxx xxxxxxx
xx xxx xxxx xxxx xxx
xx xxx xxx xxxx x
xx xx xxx xxx x
xx xx xxx xxx
xx xx xxx xxx
xx xx xxx xxx
xx xxx xxx xxx x
xxxx xxxxxxxx xxxxxxxx
xxxxx xxxxx
''')
def _generate_random_options(self):
return tf.constant(np.random.randint(0, self.options_num, self.n_agents), dtype=tf.int32)
def choose_action(self, s, visual_s, evaluation=False):
if not hasattr(self, 'options'):
self.options = self._generate_random_options()
self.last_options = self.options
a, self.options, self.cell_state = self._get_action(s, visual_s, self.cell_state, self.options)
a = a.numpy()
return a
@tf.function
def _get_action(self, s, visual_s, cell_state, options):
with tf.device(self.device):
feat, cell_state = self.get_feature(s, visual_s, cell_state=cell_state, record_cs=True)
q = self.q_net(feat) # [B, P]
pi = self.intra_option_net(feat) # [B, P, A]
options_onehot = tf.one_hot(options, self.options_num, dtype=tf.float32) # [B, P]
options_onehot_expanded = tf.expand_dims(options_onehot, axis=-1) # [B, P, 1]
pi = tf.reduce_sum(pi * options_onehot_expanded, axis=1) # [B, A]
if self.is_continuous:
log_std = tf.gather(self.log_std, options)
mu = tf.math.tanh(pi)
a, _ = gaussian_clip_rsample(mu, log_std)
else:
pi = pi / self.boltzmann_temperature
dist = tfp.distributions.Categorical(logits=pi) # [B, ]
a = dist.sample()
interests = self.interest_net(feat) # [B, P]
op_logits = interests * q # [B, P] or tf.nn.softmax(q)
new_options = tfp.distributions.Categorical(logits=op_logits).sample()
return a, new_options, cell_state
def learn(self, **kwargs):
self.train_step = kwargs.get('train_step')
def _update():
if self.global_step % self.assign_interval == 0:
self.update_target_net_weights(self.q_target_net.weights, self.q_net.weights)
for i in range(kwargs['step']):
self._learn(function_dict={
'train_function': self.train,
'update_function': _update,
'sample_data_list': ['s', 'visual_s', 'a', 'r', 's_', 'visual_s_', 'done', 'last_options', 'options'],
'train_data_list': ['ss', 'vvss', 'a', 'r', 'done', 'last_options', 'options'],
'summary_dict': dict([
['LEARNING_RATE/q_lr', self.q_lr(self.train_step)],
['LEARNING_RATE/intra_option_lr', self.intra_option_lr(self.train_step)],
['LEARNING_RATE/termination_lr', self.termination_lr(self.train_step)],
['Statistics/option', self.options[0]]
])
})
@tf.function(experimental_relax_shapes=True)
def train(self, memories, isw, crsty_loss, cell_state):
ss, vvss, a, r, done, last_options, options = memories
last_options = tf.cast(last_options, tf.int32)
options = tf.cast(options, tf.int32)
with tf.device(self.device):
with tf.GradientTape(persistent=True) as tape:
feat, feat_ = self.get_feature(ss, vvss, cell_state=cell_state, s_and_s_=True)
q = self.q_net(feat) # [B, P]
pi = self.intra_option_net(feat) # [B, P, A]
beta = self.termination_net(feat) # [B, P]
q_next = self.q_target_net(feat_) # [B, P], [B, P, A], [B, P]
beta_next = self.termination_net(feat_) # [B, P]
interests = self.interest_net(feat) # [B, P]
options_onehot = tf.one_hot(options, self.options_num, dtype=tf.float32) # [B,] => [B, P]
q_s = qu_eval = tf.reduce_sum(q * options_onehot, axis=-1, keepdims=True) # [B, 1]
beta_s_ = tf.reduce_sum(beta_next * options_onehot, axis=-1, keepdims=True) # [B, 1]
q_s_ = tf.reduce_sum(q_next * options_onehot, axis=-1, keepdims=True) # [B, 1]
if self.double_q:
q_ = self.q_net(feat) # [B, P], [B, P, A], [B, P]
max_a_idx = tf.one_hot(tf.argmax(q_, axis=-1), self.options_num, dtype=tf.float32) # [B, P] => [B, ] => [B, P]
q_s_max = tf.reduce_sum(q_next * max_a_idx, axis=-1, keepdims=True) # [B, 1]
else:
q_s_max = tf.reduce_max(q_next, axis=-1, keepdims=True) # [B, 1]
u_target = (1 - beta_s_) * q_s_ + beta_s_ * q_s_max # [B, 1]
qu_target = tf.stop_gradient(r + self.gamma * (1 - done) * u_target)
td_error = qu_target - qu_eval # gradient : q
q_loss = tf.reduce_mean(tf.square(td_error) * isw) + crsty_loss # [B, 1] => 1
if self.use_baseline:
adv = tf.stop_gradient(qu_target - qu_eval)
else:
adv = tf.stop_gradient(qu_target)
options_onehot_expanded = tf.expand_dims(options_onehot, axis=-1) # [B, P] => [B, P, 1]
pi = tf.reduce_sum(pi * options_onehot_expanded, axis=1) # [B, P, A] => [B, A]
if self.is_continuous:
log_std = tf.gather(self.log_std, options)
mu = tf.math.tanh(pi)
log_p = gaussian_likelihood_sum(a, mu, log_std)
entropy = gaussian_entropy(log_std)
else:
pi = pi / self.boltzmann_temperature
log_pi = tf.nn.log_softmax(pi, axis=-1) # [B, A]
entropy = -tf.reduce_sum(tf.exp(log_pi) * log_pi, axis=1, keepdims=True) # [B, 1]
log_p = tf.reduce_sum(a * log_pi, axis=-1, keepdims=True) # [B, 1]
pi_loss = tf.reduce_mean(-(log_p * adv + self.ent_coff * entropy)) # [B, 1] * [B, 1] => [B, 1] => 1
last_options_onehot = tf.one_hot(last_options, self.options_num, dtype=tf.float32) # [B,] => [B, P]
beta_s = tf.reduce_sum(beta * last_options_onehot, axis=-1, keepdims=True) # [B, 1]
pi_op = tf.nn.softmax(interests * tf.stop_gradient(q)) # [B, P] or tf.nn.softmax(q)
interest_loss = -tf.reduce_mean(beta_s * tf.reduce_sum(pi_op * options_onehot, axis=-1, keepdims=True) * q_s) # [B, 1] => 1
v_s = tf.reduce_sum(q * pi_op, axis=-1, keepdims=True) # [B, P] * [B, P] => [B, 1]
beta_loss = beta_s * tf.stop_gradient(q_s - v_s) # [B, 1]
if self.terminal_mask:
beta_loss *= (1 - done)
beta_loss = tf.reduce_mean(beta_loss) # [B, 1] => 1
q_grads = tape.gradient(q_loss, self.critic_tv)
intra_option_grads = tape.gradient(pi_loss, self.actor_tv)
termination_grads = tape.gradient(beta_loss, self.termination_net.trainable_variables)
interest_grads = tape.gradient(interest_loss, self.interest_net.trainable_variables)
self.q_optimizer.apply_gradients(
zip(q_grads, self.critic_tv)
)
self.intra_option_optimizer.apply_gradients(
zip(intra_option_grads, self.actor_tv)
)
self.termination_optimizer.apply_gradients(
zip(termination_grads, self.termination_net.trainable_variables)
)
self.interest_optimizer.apply_gradients(
zip(interest_grads, self.interest_net.trainable_variables)
)
self.global_step.assign_add(1)
return td_error, dict([
['LOSS/q_loss', tf.reduce_mean(q_loss)],
['LOSS/pi_loss', tf.reduce_mean(pi_loss)],
['LOSS/beta_loss', tf.reduce_mean(beta_loss)],
['LOSS/interest_loss', tf.reduce_mean(interest_loss)],
['Statistics/q_option_max', tf.reduce_max(q_s)],
['Statistics/q_option_min', tf.reduce_min(q_s)],
['Statistics/q_option_mean', tf.reduce_mean(q_s)]
])
def store_data(self, s, visual_s, a, r, s_, visual_s_, done):
"""
for off-policy training, use this function to store <s, a, r, s_, done> into ReplayBuffer.
"""
assert isinstance(a, np.ndarray), "store need action type is np.ndarray"
assert isinstance(r, np.ndarray), "store need reward type is np.ndarray"
assert isinstance(done, np.ndarray), "store need done type is np.ndarray"
self._running_average(s)
self.data.add(
s,
visual_s,
a,
r[:, np.newaxis], # 升维
s_,
visual_s_,
done[:, np.newaxis], # 升维
self.last_options,
self.options
)
def no_op_store(self, s, visual_s, a, r, s_, visual_s_, done):
pass
|
zh
| 0.363415
|
Learning Options with Interest Functions, https://www.aaai.org/ojs/index.php/AAAI/article/view/5114/4987 Options of Interest: Temporal Abstraction with Interest Functions, http://arxiv.org/abs/2001.00271 # [P, A] xxxx xxxxxx xxxxxxx xx xxx xxxx xxxx xxx xx xxx xxx xxxx x xx xx xxx xxx x xx xx xxx xxx xx xx xxx xxx xx xx xxx xxx xx xxx xxx xxx x xxxx xxxxxxxx xxxxxxxx xxxxx xxxxx # [B, P] # [B, P, A] # [B, P] # [B, P, 1] # [B, A] # [B, ] # [B, P] # [B, P] or tf.nn.softmax(q) # [B, P] # [B, P, A] # [B, P] # [B, P], [B, P, A], [B, P] # [B, P] # [B, P] # [B,] => [B, P] # [B, 1] # [B, 1] # [B, 1] # [B, P], [B, P, A], [B, P] # [B, P] => [B, ] => [B, P] # [B, 1] # [B, 1] # [B, 1] # gradient : q # [B, 1] => 1 # [B, P] => [B, P, 1] # [B, P, A] => [B, A] # [B, A] # [B, 1] # [B, 1] # [B, 1] * [B, 1] => [B, 1] => 1 # [B,] => [B, P] # [B, 1] # [B, P] or tf.nn.softmax(q) # [B, 1] => 1 # [B, P] * [B, P] => [B, 1] # [B, 1] # [B, 1] => 1 for off-policy training, use this function to store <s, a, r, s_, done> into ReplayBuffer. # 升维 # 升维
| 2.009248
| 2
|