id stringlengths 2 8 | text stringlengths 16 264k | dataset_id stringclasses 1 value |
|---|---|---|
3444175 | # https://www.youtube.com/watch?v=HGOBQPFzWKo&list=RDCMUC8butISFwT-Wl7EV0hUK0BQ&start_radio=1&t=181s
# Intermediate Python Programming Course (freecodecamp.org)
# Logging
import logging # 5 diff log levels
# to adjust default display behaviour of log msgs...
logging.basicConfig(level=logging.DEBUG, format='%(levelname)s')
# above doesn't seem to be working for me, check docs/other tutorials
#logging.debug('debug msg') # not printed by default
#logging.info('info msg') # not printed by default
#logging.warning('warning msg') # printed
#logging.error('error msg') # printed
#logging.critical('critical msg') # printed
# by default the logger is called the "root" logger
# to log in other modules, best practice is not to use "root"
# create your own module logger
import logging_helper
# 2:26:07 | StarcoderdataPython |
11270583 | <reponame>admariner/polyaxon<gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from coredb.api.artifacts import queries as runs_artifacts_queries
from coredb.api.artifacts.serializers import RunArtifactLightSerializer
from coredb.api.project_resources import methods
from coredb.api.project_resources.serializers import (
OfflineRunSerializer,
OperationCreateSerializer,
RunSerializer,
)
from coredb.models.runs import Run
from coredb.queries.runs import DEFAULT_COLUMNS_DEFER
from coredb.query_managers.artifact import ArtifactQueryManager
from coredb.query_managers.run import RunQueryManager
from endpoints.project import ProjectResourceListEndpoint
from polycommon.apis.filters import OrderingFilter, QueryFilter
from polycommon.endpoints.base import (
CreateEndpoint,
DestroyEndpoint,
ListEndpoint,
PostEndpoint,
)
class ProjectRunsTagView(ProjectResourceListEndpoint, PostEndpoint):
def post(self, request, *args, **kwargs):
return methods.create_runs_tags(view=self, request=request, *args, **kwargs)
class ProjectRunsStopView(ProjectResourceListEndpoint, PostEndpoint):
def post(self, request, *args, **kwargs):
return methods.stop_runs(
view=self, request=request, actor=self.project.actor, *args, **kwargs
)
class ProjectRunsApproveView(ProjectResourceListEndpoint, PostEndpoint):
def post(self, request, *args, **kwargs):
return methods.approve_runs(
view=self, request=request, actor=self.project.actor, *args, **kwargs
)
class ProjectRunsDeleteView(ProjectResourceListEndpoint, DestroyEndpoint):
def delete(self, request, *args, **kwargs):
return methods.delete_runs(
view=self, request=request, actor=self.project.actor, *args, **kwargs
)
class ProjectRunsListView(ProjectResourceListEndpoint, ListEndpoint, CreateEndpoint):
queryset = Run.all.defer(*DEFAULT_COLUMNS_DEFER)
filter_backends = (QueryFilter, OrderingFilter)
query_manager = RunQueryManager
check_alive = RunQueryManager.CHECK_ALIVE
ordering = RunQueryManager.FIELDS_DEFAULT_ORDERING
ordering_fields = RunQueryManager.FIELDS_ORDERING
ordering_proxy_fields = RunQueryManager.FIELDS_ORDERING_PROXY
serializer_class_mapping = {
"GET": RunSerializer,
"POST": OperationCreateSerializer,
}
def perform_create(self, serializer):
serializer.save(project=self.project)
class ProjectRunsSyncView(ProjectResourceListEndpoint, CreateEndpoint):
queryset = Run.all.all()
serializer_class = OfflineRunSerializer
def perform_create(self, serializer):
serializer.save(project=self.project)
class ProjectRunsArtifactsView(ProjectResourceListEndpoint, ListEndpoint):
queryset = runs_artifacts_queries.project_runs_artifacts
serializer_class = RunArtifactLightSerializer
filter_backends = (QueryFilter, OrderingFilter)
query_manager = ArtifactQueryManager
check_alive = ArtifactQueryManager.CHECK_ALIVE
ordering = ArtifactQueryManager.FIELDS_DEFAULT_ORDERING
ordering_fields = ArtifactQueryManager.FIELDS_ORDERING
ordering_proxy_fields = ArtifactQueryManager.FIELDS_ORDERING_PROXY
def enrich_queryset(self, queryset):
return queryset.filter(run__project=self.project)
| StarcoderdataPython |
6448224 | from django.db import models
from cloudinary.models import CloudinaryField
# Create your models here.
class Job(models.Model):
types = (
("web_apps", "web_apps"), ("api", "api"),
("upcoming", "upcoming")
)
job_name = models.CharField(max_length=100)
job_type = models.CharField(
choices=types, max_length=20,
null=True, blank=True
)
job_link = models.URLField(null=True, blank=True)
job_repo = models.URLField(null=True, blank=True)
job_image = CloudinaryField("job_image", blank=True, null=True)
job_description = models.CharField(max_length=100)
technologies_used = models.CharField(max_length=200)
def __str__(self):
return str(self.job_name)
class Meta:
ordering = ("job_name",)
class Intro(models.Model):
first_summary = models.CharField(max_length=400, blank=True, null=True)
second_summary = models.CharField(max_length=400, blank=True, null=True)
def __str__(self):
return "summary for portfolio"
| StarcoderdataPython |
1693463 | # -*- coding: utf-8 -*-
"""
FUZZY REGULATOR
1. Create an instance of the balanced arm
2. Set initial conditions
3. Prepare a fuzzy regulator
4. Begin iterating:
a)
5. Visualize results
note: all values are scaled in standard metric units
note: input params: angle, angular_velocity
note: output params: left thrust, right thrust
"""
import math
import ArmModel
import FuzzyRegulator
import matplotlib.pyplot as plt
import copy
import numpy as np
fig, ax = plt.subplots()
structure_mass = 1.0
arm_length = 0.25
arm_radius = 0.01
interval = 0.01
arm_initial_angle = 45.0
arm_initial_velocity = 0.0
reactions = [8, 8, 8, 7, 8, 4, 4, 5, 6,
8, 7, 7, 5, 6, 3, 3, 4, 2,
8, 7, 5, 3, 4, 2, 2, 3, 0,
7, 6, 4, 2, 2, 1, -1, -2, -3,
7, 5, 3, 2, 0, -2, -3, -5, -7,
3, 2, 1, -1, -2, -2, -4, -6, -7,
0, -3, -2, -2, -4, -3, -5, -7, -8,
-2, -4, -3, -3, -6, -5, -7, -7, -8,
-6, -5, -4, -4, -8, -7, -8, -8, -8]
rules_usage_2d = []
record = []
rules_raw_record = []
rules_processed_record = {}
fit_factor = 0.0
arm_inertial_moment = (1.0 / 12.0) * structure_mass * \
(math.pow(arm_radius, 2) * 3 + math.pow(arm_length * 2, 2))
arm = ArmModel.ArmModel(arm_inertial_moment, arm_length)
arm.setInitialConditions(arm_initial_angle, arm_initial_velocity)
regulator = FuzzyRegulator.FuzzyRegulator(0.0, 10.0, reactions)
for arm_initial_angle_iter in range(-45, 46, 1):
arm.setInitialConditions(arm_initial_angle_iter, 0.0)
for i in range(1, 1000):
regulator.calcNewThrusts(arm.angle, arm.angular_speed, 0.0)
arm.updateState(interval,
regulator.getLeftThrust(), regulator.getRightThrust())
record.append(arm.angle)
fit_factor += abs(regulator.getLastErr() * interval)
for rule_usage in regulator.recently_used_rules:
rules_raw_record.append(copy.deepcopy(rule_usage))
print(str(arm_initial_angle_iter + 46) + " iterations done")
# process rules usage data
for i in range(81):
rules_processed_record[i] = 0.0
for record in rules_raw_record:
rules_processed_record[record[0]] += record[1]
# rules_processed_record[40] = 0.0
for i in range(81):
if rules_processed_record[i] < 0:
rules_processed_record[i] = -math.log(abs(rules_processed_record[i]))
elif rules_processed_record[i] > 0:
rules_processed_record[i] = math.log(rules_processed_record[i])
else:
rules_processed_record[i] = 0
for verse in range(9):
dummy_verse = []
for column in range(9):
dummy_verse.append(rules_processed_record[column + 9 * verse])
rules_usage_2d.append(copy.deepcopy(dummy_verse))
dummy_verse.clear()
data = np.asarray(rules_usage_2d)
heatmap = ax.pcolor(data)
ax.invert_yaxis()
ax.xaxis.tick_top()
ax.set_xticklabels(['-4', '-3', '-2', '-1', '0', '1', '2', '3', '4'], minor=False)
ax.set_yticklabels(['-4', '-3', '-2', '-1', '0', '1', '2', '3', '4'], minor=False)
ax.set_xticks(np.arange(data.shape[0] + 0.5))
ax.set_yticks(np.arange(data.shape[1] + 0.5))
plt.colorbar(heatmap)
plt.show()
print(fit_factor)
| StarcoderdataPython |
227006 | <gh_stars>1-10
import requests
from dataclasses import dataclass
from blockfrost.utils import object_request_wrapper, object_list_request_wrapper
@dataclass
class AddressResponse:
@dataclass
class Amount:
unit: str
quantity: str
address: str
amount: [Amount]
stake_address: str
type: str
script: bool
def __init__(self, address: str, amount: [Amount], stake_address: str, type: str, script: bool) -> None:
self.address = address
self.amount = [self.Amount(**o) for o in amount]
self.stake_address = stake_address
self.type = type
self.script = script
@object_request_wrapper(AddressResponse)
def address(self, address: str, **kwargs):
"""
Obtain information about a specific address.
https://docs.blockfrost.io/#tag/Cardano-Addresses/paths/~1addresses~1{address}/get
:param address: Bech32 address.
:type address: str
:param return_type: Optional. "object", "json" or "pandas". Default: "object".
:type return_type: str
:returns AddressResponse object.
:rtype AddressResponse
:raises ApiError: If API fails
:raises Exception: If the API response is somehow malformed.
"""
return requests.get(
url=f"{self.url}/addresses/{address}",
headers=self.default_headers
)
@dataclass
class AddressesTotalResponse:
@dataclass
class Sum:
unit: str
quantity: str
address: str
received_sum: [Sum]
sent_sum: [Sum]
tx_count: int
def __init__(self, address: str, received_sum: [Sum], sent_sum: [Sum], tx_count: int) -> None:
self.address = address
self.received_sum = [self.Sum(**o) for o in received_sum]
self.sent_sum = [self.Sum(**o) for o in sent_sum]
self.tx_count = tx_count
@object_request_wrapper(AddressesTotalResponse)
def address_total(self, address: str, **kwargs):
"""
Obtain details about an address.
https://docs.blockfrost.io/#tag/Cardano-Addresses/paths/~1addresses~1{address}~1total/get
:param address: Bech32 address.
:type address: str
:param return_type: Optional. "object", "json" or "pandas". Default: "object".
:type return_type: str
:returns AddressesTotalResponse object.
:rtype AddressesTotalResponse
:raises ApiError: If API fails
:raises Exception: If the API response is somehow malformed.
"""
return requests.get(
url=f"{self.url}/addresses/{address}/total",
headers=self.default_headers
)
@dataclass
class AddressesUTXOSResponse:
@dataclass
class Amount:
unit: str
quantity: str
tx_hash: str
output_index: int
amount: [Amount]
block: str
data_hash: str
def __init__(self, tx_hash: str, output_index: int, amount: [Amount], block: str, data_hash: str) -> None:
self.tx_hash = tx_hash
self.output_index = output_index
self.amount = [self.Amount(**o) for o in amount]
self.block = block
self.data_hash = data_hash
@object_list_request_wrapper(AddressesUTXOSResponse)
def address_utxos(self, address: str, **kwargs):
"""
UTXOs of the address.
https://docs.blockfrost.io/#tag/Cardano-Addresses/paths/~1addresses~1{address}~1utxos/get
:param address: Bech32 address.
:type address: str
:param return_type: Optional. "object", "json" or "pandas". Default: "object".
:type return_type: str
:param gather_pages: Optional. Default: 100. Will collect all pages into one return
:type gather_pages: bool
:param count: Optional. Default: 1. The number of results displayed on one page.
:type count: int
:param page: Optional. The page number for listing the results.
:type page: int
:param order: Optional. "asc" or "desc". Default: "asc".
:type order: str
:returns A list of AddressesUTXOSResponse objects.
:rtype [AddressesUTXOSResponse]
:raises ApiError: If API fails
:raises Exception: If the API response is somehow malformed.
"""
return requests.get(
url=f"{self.url}/addresses/{address}/utxos",
params=self.query_parameters(kwargs),
headers=self.default_headers
)
@dataclass
class AddressesUTXOSAssetResponse:
@dataclass
class Amount:
unit: str
quantity: str
tx_hash: str
output_index: int
amount: [Amount]
block: str
data_hash: str
def __init__(self, tx_hash: str, output_index: int, amount: [Amount], block: str, data_hash: str) -> None:
self.tx_hash = tx_hash
self.output_index = output_index
self.amount = [self.Amount(**o) for o in amount]
self.block = block
self.data_hash = data_hash
@object_list_request_wrapper(AddressesUTXOSAssetResponse)
def address_utxos_asset(self, address: str, asset: str, **kwargs):
"""
UTXOs of the address.
https://docs.blockfrost.io/#tag/Cardano-Addresses/paths/~1addresses~1{address}~1utxos~1{asset}/get
:param address: Bech32 address.
:type address: str
:param asset: Concatenation of the policy_id and hex-encoded asset_name.
:type asset: str
:param return_type: Optional. "object", "json" or "pandas". Default: "object".
:type return_type: str
:param gather_pages: Optional. Default: 100. Will collect all pages into one return
:type gather_pages: bool
:param count: Optional. Default: 1. The number of results displayed on one page.
:type count: int
:param page: Optional. The page number for listing the results.
:type page: int
:param order: Optional. "asc" or "desc". Default: "asc".
:type order: str
:returns A list of AddressesUTXOSAssetResponse objects.
:rtype [AddressesUTXOSAssetResponse]
:raises ApiError: If API fails
:raises Exception: If the API response is somehow malformed.
"""
return requests.get(
url=f"{self.url}/addresses/{address}/utxos/{asset}",
params=self.query_parameters(kwargs),
headers=self.default_headers
)
@dataclass
class AddressesTransactionResponse:
tx_hash: str
tx_index: int
block_height: int
@object_list_request_wrapper(AddressesTransactionResponse)
def address_transactions(self, address: str, from_block: str = None, to_block: str = None,
**kwargs):
"""
Transactions on the address.
https://docs.blockfrost.io/#tag/Cardano-Addresses/paths/~1addresses~1{address}~1transactions/get
:param address: Bech32 address.
:type address: str
:param from: The block number and optionally also index from which (inclusive) to start search for results, concatenated using colon. Has to be lower than or equal to to parameter.
:type from: str
:param to: The block number and optionally also index where (inclusive) to end the search for results, concatenated using colon. Has to be higher than or equal to from parameter.
:type to: str
:param return_type: Optional. "object", "json" or "pandas". Default: "object".
:type return_type: str
:param gather_pages: Optional. Default: 100. Will collect all pages into one return
:type gather_pages: bool
:param count: Optional. Default: 1. The number of results displayed on one page.
:type count: int
:param page: Optional. The page number for listing the results.
:type page: int
:param order: Optional. "asc" or "desc". Default: "asc".
:type order: str
:returns A list of AddressesUTXOSResponse objects.
:rtype [AddressesUTXOSResponse]
:raises ApiError: If API fails
:raises Exception: If the API response is somehow malformed.
"""
return requests.get(
url=f"{self.url}/addresses/{address}/transactions",
params={
'from': from_block,
'to': to_block,
**self.query_parameters(kwargs)
},
headers=self.default_headers
)
| StarcoderdataPython |
6599176 | <gh_stars>0
import sqlite3
DB_FILEPATH = 'rpg_db.sqlite3'
connection = sqlite3.connect('rpg_db.sqlite3')
print("CONNECTION:", connection)
cursor = connection.cursor()
print("CURSOR", cursor)
query1 = """
SELECT count (*)
from charactercreator_character
"""
result1 = cursor.execute(query1).fetchall()
print("RESULT 1", result1) | StarcoderdataPython |
6700008 | <filename>muffin_rest/__init__.py
"""REST helpers for Muffin Framework."""
__version__ = "4.0.2"
__project__ = "muffin-rest"
__author__ = "<NAME> <<EMAIL>>"
__license__ = "MIT"
# Default query params
LIMIT_PARAM = 'limit'
OFFSET_PARAM = 'offset'
from .api import API # noqa
from .handler import RESTHandler # noqa
from .errors import APIError # noqa
# Just an alias to support legacy style
Api = API
__all__ = (
'API', 'Api', 'RESTHandler', 'APIError',
'PWRESTHandler', 'PWFilter', 'PWFilters', 'PWSort', 'PWSorting',
'SARESTHandler', 'SAFilter', 'SAFilters', 'SASort', 'SASorting',
'MongoRESTHandler', 'MongoFilter', 'MongoFilters', 'MongoSort', 'MongoSorting',
)
# Support Peewee ORM
try:
from .peewee import PWRESTHandler
from .peewee.filters import PWFilter, PWFilters
from .peewee.sorting import PWSort, PWSorting
except ImportError as exc:
pass
# Support SQLAlchemy ORM
try:
from .sqlalchemy import SARESTHandler
from .sqlalchemy.filters import SAFilter, SAFilters
from .sqlalchemy.sorting import SASort, SASorting
except ImportError:
pass
# Support Mongo ORM
try:
from .mongo import MongoRESTHandler
from .mongo.filters import MongoFilter, MongoFilters
from .mongo.sorting import MongoSort, MongoSorting
except ImportError:
pass
# pylama:ignore=W0611
| StarcoderdataPython |
3518479 | <reponame>nathanielwarner/seatsio-python
from tests.seatsioClientTest import SeatsioClientTest
from tests.util.asserts import assert_that
class RegenerateSubaccountDesignerKeyTest(SeatsioClientTest):
def test(self):
subaccount = self.client.subaccounts.create()
self.client.subaccounts.regenerate_designer_key(subaccount.id)
retrieved_subaccount = self.client.subaccounts.retrieve(subaccount.id)
assert_that(retrieved_subaccount.designer_key).is_not_blank().is_not_equal_to(subaccount.designer_key)
| StarcoderdataPython |
5155071 | <filename>measurements.py<gh_stars>1-10
from operator import attrgetter
import statistics
from measure import Note
# contains measurement functions that only operate on a single measure
class SingleMeasurements:
# calculate the percentage of the measure that are rests
def percent_vacant(measure):
return float(sum(map(attrgetter('note_len'), filter(attrgetter('is_rest'), measure.notes))))
def note_length_stdev(measure):
if len(measure.notes) <= 1:
return 0
return float(statistics.stdev(map(attrgetter('midi_num'), measure.notes)))
def note_length_mean(measure):
if len(measure.notes) < 1:
return 0
return float(statistics.mean(map(attrgetter('midi_num'), measure.notes)))
def midi_number_stdev(measure):
if len(measure.notes) <= 1:
return 0
return float(statistics.stdev(map(attrgetter('midi_num'), measure.notes)))
def midi_number_mean(measure):
if len(measure.notes) < 1:
return 0
return float(statistics.mean(map(attrgetter('midi_num'), measure.notes)))
UNITS = {
'note_length': { 'min': 0.0, 'max': 1.0 },
# the values for note_num are just placeholders
# since they get changed by the user config anyway
'note_num': { 'min': 0, 'max': 128 },
'percent': { 'min': 0.0, 'max': 1.0 }
}
def min_max_normalize(value, unit):
min_value = UNITS[unit]['min']
max_value = UNITS[unit]['max']
return (value - min_value) / (max_value - min_value)
| StarcoderdataPython |
1627692 | <gh_stars>0
from pydantic import BaseModel
class Favorite(BaseModel):
"""Favorite from dynamoDB"""
indexKey: str
PK: str
SK: str
createdAt: str
class ReqFavorite(BaseModel):
"""Favorite request body"""
user: str
video: str
| StarcoderdataPython |
6406904 | <gh_stars>0
import os
import sqlite3
# DB_FILEPATH = os.path.join(os.path_dirname(__file__), "..", "data", "chinook.db")
conn = sqlite3.connect('rpg_db.sqlite3')
curs = conn.cursor()
# query = 'SELECT COUNT(*) FROM armory_item;'
# curs.execute(query)
# results = curs.execute(query).fetchall()
# breakpoint()
def format_query_count(message, query):
result = curs.execute(query).fetchone()[0]
print(f"{message}: {result}")
def format_query_join(message, label, query):
result = curs.execute(query).fetchall()
print(message)
for row in result:
(name, count) = row
print(f"{name} has {count} {label}{'' if count == 1 else 's'}")
# How many total Characters are there?
format_query_count('How many total Characters are there?', 'SELECT count(*) FROM charactercreator_character')
# How many of each specific subclass?
format_query_count('How many total clerics are there?', 'SELECT count(*) from charactercreator_cleric')
format_query_count('How many total fighters are there?', 'SELECT count(*) from charactercreator_fighter')
format_query_count('How many total mages are there?', 'SELECT count(*) from charactercreator_mage')
format_query_count('How many total necromancers are there?', 'SELECT count(*) from charactercreator_necromancer')
format_query_count('How many total thieves are there?', 'SELECT count(*) from charactercreator_thief')
# How many total Items?
format_query_count('How many total items?', 'SELECT count(*) FROM armory_item;')
# How many of the Items are weapons?
format_query_count('How many of the items are weapons?', 'SELECT count(*) from armory_weapon;')
# How many of the Items are not weapons?
format_query_count('How many of the items are not weapons?',
'SELECT count(*) from armory_item WHERE item_id NOT IN (SELECT item_ptr_id FROM armory_weapon);')
# How many Items does each character have? (Return first 20 rows)
format_query_join('How many Items does each character have?', 'item', """
SELECT
a.name,
count(b.item_id) AS 'count of items'
FROM charactercreator_character a
LEFT JOIN charactercreator_character_inventory b
ON a.character_id = b.character_id
GROUP BY a.character_id
ORDER BY a.character_id
LIMIT 20;""")
# How many Weapons does each character have? (Return first 20 rows)
format_query_join('How many Weapons does each character have?', 'weapon', """
SELECT
a.name,
count(c.item_ptr_id) AS 'count of weapons'
FROM charactercreator_character a
LEFT JOIN charactercreator_character_inventory b
ON a.character_id = b.character_id
LEFT JOIN armory_weapon c
ON b.item_id = c.item_ptr_id
GROUP BY a.character_id
ORDER BY a.character_id
LIMIT 20;""")
# On average, how many Items does each Character have?
query = """
SELECT COUNT(DISTINCT item_id)
FROM charactercreator_character_inventory
GROUP BY character_id;"""
results = curs.execute(query).fetchall()
total_count_items = 0
total_characters = len(results)
for i in range(total_characters):
total_count_items += results[i][0]
average = total_count_items / total_characters
print('Average items per character is', average)
# On average, how many Weapons does each character have?
query = """
SELECT COUNT(item_ptr_id)
FROM charactercreator_character_inventory
LEFT JOIN armory_weapon
ON item_id = item_ptr_id
GROUP BY character_id;"""
results = curs.execute(query).fetchall()
total_count_items = 0
total_characters = len(results)
for i in range(total_characters):
total_count_items += results[i][0]
average = total_count_items / total_characters
print('Average weapons per character is', average) | StarcoderdataPython |
228033 | <gh_stars>0
from selenium import webdriver
from bs4 import BeautifulSoup
#import urllib 使用再拿掉
import time
import random
import selenium.webdriver.support.ui as ui
import time
import json
import re
def investigate_by_xpath(driver, time_wait, object, message):
# input : 等待時間, 確認目標的xpath內容, 成功回報訊息
# target : 因驗證帳密會有延遲時間, 故此function為確認有無正常進入下一動作
# print(message)
wait = ui.WebDriverWait(driver, time_wait)
wait.until(lambda driver: driver.find_element_by_xpath(object).is_displayed())
def back_to_menu(driver):
driver.find_element_by_xpath('//a[contains(text()," 回到菜單")]').click()
investigate_by_xpath(driver, 10, '//a[contains(text(),"瀏覽菜單")]', '回到菜單')
def filter_cost(cost_str):
return int(re.findall(r'\d+',cost_str)[0])
def parser_main_food(soup, items):
product_name_list = soup.find_all('h5',{'class': 'product-title'})
product_cost_list = soup.find_all('span',{'class': 'starting-price'})
for name, cost in zip(product_name_list, product_cost_list):
items["主餐"][name.text] = filter_cost(cost.text)
return items
def parser_append(soup, items):
append_name_list = soup.find_all('div',{'class': 'colsize-5'})
for item in append_name_list:
name = item.div.div.h5.text
cost = item.div.find('div',{'class':'product-cost'}).text
cost = filter_cost(cost)
items["追加"][name] = cost
return items
def parser_coffee(soup, items):
coffee_name_list = soup.find_all('h5',{'class': 'product-title'})
coffee_cost_list = soup.find_all('span',{'class': 'starting-price'})
for name, cost in zip(coffee_name_list, coffee_cost_list):
items["咖啡"][name.text] = int(re.findall(r'\d+',cost.text)[0])
return items
def parser_combination(soup, items):
combination_name_list = soup.find_all('h4',{'class': 'item-title'})
combination_cost_list = soup.find_all('td',{'class': 'cost-column'})
single = filter_cost(combination_cost_list[-1].text)
for name, cost in zip(combination_name_list[:-1], combination_cost_list[:-1]): #最後一行單點不要
name = name.text.split(' - ')[0]
cost = filter_cost(cost.text) - single # 減去單點的
items["組合"][name] = cost
return items
def parser_drink(soup, items):
drink_name_list = soup.find_all('h4',{'class': 'item-title'})
drink_cost_list = soup.find_all('td',{'class': 'cost-column'})
for name, cost in zip(drink_name_list, drink_cost_list): #最後一行單點不要\
name = name.text
cost = filter_cost(cost.text)
items["飲料"][name] = cost
return items
def load_account(json_data = 'crawling/account.json'):
with open(json_data, 'r') as f:
data = json.load(f)
if data['username'] == 'yourmail' or data['password'] == '<PASSWORD>':
raise Exception('請至account.json更新可使用帳號跟密碼')
return data['username'], data['password']
def start_crawling():
# 前置
username, password = load_account()
driver = webdriver.Chrome()
url = 'https://www.mcdelivery.com.tw/tw/browse/menu.html'
driver.get(url)
driver.find_element_by_xpath('//a[contains(text(),"登入")]').click()
investigate_by_xpath(driver, 10, '//h2[@id="modal-title"]', '進入登入畫面')
driver.find_element_by_xpath('//input[@placeholder="電子郵件"]').send_keys(username)
driver.find_element_by_xpath('//input[@placeholder="密碼"]').send_keys(password)
driver.find_element_by_xpath('//button[contains(text(),"登入")]').click()
investigate_by_xpath(driver, 10, '//a[contains(text(),"開始訂餐")]', '登入成功!')
driver.get('https://www.mcdelivery.com.tw/tw/menu.html')
investigate_by_xpath(driver, 10, '//a[contains(text(),"瀏覽菜單")]', '進入點餐畫面')
items = {"主餐":{},"組合":{'主餐':0},"追加":{"無":0},"咖啡":{},"飲料":{}}
# 主餐
driver.find_element_by_xpath('//span[contains(text(),"超值全餐和主餐單點")]').click()
investigate_by_xpath(driver, 10, '//li[contains(text(),"超值全餐和主餐單點")]', '進入-超值全餐和主餐單點')
soup = BeautifulSoup(driver.page_source, "html.parser")
items = parser_main_food(soup, items)
# 組合
driver.find_element_by_xpath('//a[contains(text(),"訂購")]').click()
investigate_by_xpath(driver, 10, '//h3[contains(text(),"選擇您的餐點")]', '進入訂購畫面')
soup = BeautifulSoup(driver.page_source, "html.parser")
items = parser_combination(soup, items)
# 追加
soup = BeautifulSoup(driver.page_source, "html.parser")
items = parser_append(soup, items)
# 咖啡‘
back_to_menu(driver)
#time.sleep(5)
driver.find_element_by_xpath('//span[contains(text(),"McCafé")]').click()
#time.sleep(5)
#investigate_by_xpath(driver, 10, '//li[contains(text(),"McCafé")]', '進入-McCafé')
# coffee
soup = BeautifulSoup(driver.page_source, "html.parser")
items = parser_coffee(soup, items)
# 飲料
driver.find_element_by_xpath('//span[contains(text(),"飲料")]').click()
#time.sleep(5)
#investigate_by_xpath(driver, 10, '//li[contains(text(),"飲料")]', '進入-飲料')
for item in driver.find_elements_by_xpath('//a[contains(text(),"訂購")]'):
item.click()
investigate_by_xpath(driver, 10, '//h3[contains(text(),"選擇您的餐點")]', '進入訂購畫面')
soup = BeautifulSoup(driver.page_source, "html.parser")
items = parser_drink(soup, items)
back_to_menu(driver)
driver.close()
<<<<<<< HEAD
with open('items.json', 'w') as f:
=======
with open('crawling/items.json', 'w') as f:
>>>>>>> 6c7facd43be32c1de91befc8863a2fb54f3b3fb9
json.dump(items, f)
with open('crawling/items.json', 'r') as f:
items = json.load(f)
| StarcoderdataPython |
12821965 | from discord_ritoman.lol.stats.match_stat import LoLMatchStat
from pkgutil import iter_modules
from pathlib import Path
from importlib import import_module
__all__ = []
# iterate through the modules in the current package
package_dir = str(Path(__file__).resolve().parent)
for (_, module_name, _) in iter_modules([package_dir]):
# import the module and iterate through its attributes
module = import_module(f"{__name__}.{module_name}")
for attribute_name in dir(module):
attribute = getattr(module, attribute_name)
if isinstance(attribute, LoLMatchStat):
# Add the class to this package's variables
__all__.append(module_name)
| StarcoderdataPython |
3449136 | <filename>tests/hdx/freshness/test_aging.py<gh_stars>1-10
"""
Unit tests for the aging code.
"""
import os
from datetime import datetime, timedelta
from os.path import join
import pytest
from hdx.database import Database
from hdx.freshness.datafreshness import DataFreshness
class TestAging:
@pytest.fixture(scope="class")
def nodatabase(self):
dbpath = join("tests", "test_freshness.db")
try:
os.remove(dbpath)
except FileNotFoundError:
pass
return {"driver": "sqlite", "database": dbpath}
@pytest.fixture(scope="class")
def now(self):
return datetime.utcnow()
@pytest.fixture(scope="class")
def datasets(self):
return list()
@pytest.mark.parametrize(
"days_last_modified,update_frequency,expected_status",
[
(0, 1, 0),
(0, 7, 0),
(0, 365, 0),
(1, 1, 1),
(1, 7, 0),
(2, 1, 2),
(3, 1, 3),
(6, 7, 0),
(7, 7, 1),
(8, 7, 1),
(13, 7, 1),
(13, 14, 0),
(14, 7, 2),
(14, 14, 1),
(20, 7, 2),
(20, 14, 1),
(21, 7, 3),
(21, 14, 2),
(29, 14, 3),
(29, 30, 0),
(30, 30, 1),
(30, 90, 0),
(45, 30, 2),
(45, 90, 0),
(60, 30, 3),
(60, 90, 0),
(89, 90, 0),
(90, 90, 1),
(90, 90, 1),
(120, 90, 2),
(150, 90, 3),
(179, 180, 0),
(180, 180, 1),
(210, 180, 2),
(210, 365, 0),
(240, 180, 3),
(240, 365, 0),
(364, 365, 0),
(365, 365, 1),
(425, 365, 2),
(455, 365, 3),
],
)
def test_aging(
self,
configuration,
nodatabase,
now,
datasets,
days_last_modified,
update_frequency,
expected_status,
):
with Database(**nodatabase) as session:
freshness = DataFreshness(session=session, datasets=datasets, now=now)
last_modified = now - timedelta(days=days_last_modified)
status = freshness.calculate_aging(last_modified, update_frequency)
assert status == expected_status
| StarcoderdataPython |
3549412 | <filename>modeling/dynamics/bullet/bdmodel.py<gh_stars>0
import copy
import math
import modeling.geometricmodel as gm
import modeling.dynamics.bullet.bdbody as bdb
class BDModel(object):
"""
load an object as a bullet dynamics model
author: weiwei
date: 20190627
"""
def __init__(self, objinit, mass=None, restitution=0, allowdeactivation=False, allowccd=True, friction=.2,
stationary=False, type="convex", name="bdm"):
"""
:param objinit: GeometricModel (CollisionModel also work)
:param mass:
:param restitution:
:param allowdeactivation:
:param allowccd:
:param friction:
:param dynamic:
:param type: "convex", "triangle"
:param name:
"""
if isinstance(objinit, BDModel):
self._gm = copy.deepcopy(objinit.gm)
self._bdb = objinit.bdb.copy()
elif isinstance(objinit, gm.GeometricModel):
if mass is None:
mass = 0
self._gm = objinit
self._bdb = bdb.BDBody(self._gm, type, mass, restitution, allowdeactivation=allowdeactivation,
allowccd=allowccd, friction=friction, stationary=stationary, name=name)
else:
if mass is None:
mass = 0
self._gm = gm.GeometricModel(objinit)
self._bdb = bdb.BDBody(self._gm, type, mass, restitution, allowdeactivation=allowdeactivation,
allowccd=allowccd, friction=friction, stationary=stationary, name=name)
@property
def gm(self):
# read-only property
return self._gm
@property
def bdb(self):
# read-only property
return self._bdb
def setcolor(self, rgba):
self._gm.setcolor(rgba)
def clearcolor(self):
self._gm.clearcolor()
def getcolor(self):
return self._gm.getcolor()
def setpos(self, npvec3):
homomat_bdb = self._bdb.gethomomat()
homomat_bdb[:3, 3] = npvec3
self._bdb.sethomomat(homomat_bdb)
self._gm.sethomomat(homomat_bdb)
def getpos(self):
return self._bdb.getpos()
def sethomomat(self, npmat4):
self._bdb.sethomomat(npmat4)
self._gm.sethomomat(npmat4)
def gethomomat(self):
return self._bdb.gethomomat()
def setmass(self, mass):
self._bdb.setmass(mass)
def reparent_to(self, obj):
"""
obj must be base.render
:param obj:
:return:
author: weiwei
date: 20190627
"""
if obj is not base.render:
raise ValueError("This bullet dynamics model doesnt support rendering to non base.render nodes!")
else:
self._gm.sethomomat(self.bdb.gethomomat()) # get updated with dynamics
self._gm.reparent_to(obj)
def remove(self):
self._gm.remove()
def detach(self):
self._gm.detach()
def startphysics(self):
base.physicsworld.attach(self._bdb)
def endphysics(self):
base.physicsworld.remove(self._bdb)
def showlocalframe(self):
self._gm.showlocalframe()
def unshowlocalframe(self):
self._gm.unshowlocalframe()
def copy(self):
return BDModel(self)
if __name__ == "__main__":
import os
import numpy as np
import basis.robot_math as rm
import visualization.panda.world as wd
import random
base = wd.World(camp=[1, .3, 1], lookat_pos=[0, 0, 0], toggle_debug=False)
base.setFrameRateMeter(True)
this_dir, this_filename = os.path.split(__file__)
objpath = os.path.join(this_dir, "objects", "block.meshes")
bunnycm = BDModel(objpath, mass=1, type="convex")
objpath2 = os.path.join(this_dir, "objects", "bowlblock.meshes")
bunnycm2 = BDModel(objpath2, mass=0, type="triangle", stationary=True)
bunnycm2.setcolor(np.array([0, 0.7, 0.7, 1.0]))
bunnycm2.setpos(np.array([0, 0, 0]))
base.attach_autoupdate_object(bunnycm2)
def update(bunnycm, task):
if base.inputmgr.keymap['space'] is True:
for i in range(100):
bunnycm1 = bunnycm.copy()
bunnycm1.setmass(.1)
rndcolor = np.random.rand(4)
rndcolor[-1] = 1
bunnycm1.setcolor(rndcolor)
rotmat = rm.rotmat_from_euler(0, 0, math.pi/12)
z = math.floor(i / 100)
y = math.floor((i - z * 100) / 10)
x = i - z * 100 - y * 10
print(x, y, z, "\n")
bunnycm1.sethomomat(rm.homomat_from_posrot(np.array([x * 0.015 - 0.07, y * 0.015 - 0.07, 0.15 + z * 0.015]), rotmat))
base.attach_autoupdate_object(bunnycm1)
bunnycm1.startphysics()
base.inputmgr.keymap['space'] = False
return task.cont
gm.genframe().reparent_to(base.render)
taskMgr.add(update, "addobject", extraArgs=[bunnycm], appendTask=True)
base.run()
| StarcoderdataPython |
196640 | <reponame>flying-sheep/goatools<gh_stars>100-1000
"""Test the loading of the optional GO term fields."""
# https://owlcollab.github.io/oboformat/doc/GO.format.obo-1_4.html
__copyright__ = "Copyright (C) 2010-2018, <NAME>, <NAME>, All rights reserved."
__author__ = "<NAME>"
import os
import sys
import re
import timeit
import collections as cx
from goatools.godag.prttime import GoDagTimed
from goatools.godag.prttime import prt_hms
class OptionalAttrs(object):
"""Holds data for GO relationship test."""
repo = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../..")
cmpfld = re.compile(r'^(\S+)\s*:\s*(\S.*\S)\s*$') # Field line pattern
exp_scopes = set(['EXACT', 'BROAD', 'NARROW', 'RELATED'])
exp_xrefpat = re.compile(r'^\S+:\S+$')
# Required attributes are always loaded
exp_req = set(['name', 'item_id', 'is_obsolete', 'namespace', 'alt_id', 'is_a', 'is_obsolete'])
# Generated attributes
exp_gen = set(['level', 'depth', 'parents', 'children', '_parents'])
exp_relationships = set(['part_of',
'regulates', 'negatively_regulates', 'positively_regulates'])
attrs_scalar = set(['item_id', 'namespace', 'name', 'def', 'comment'])
attrs_set = set(['xref', 'subset', 'alt_id'])
def __init__(self, fin_obo, opt_field=None, keep_alt_ids=False):
self.opt = opt_field # None causes all fields to read to exp dict
self.obo = os.path.join(self.repo, fin_obo)
self.go2obj = GoDagTimed(self.obo, opt_field, keep_alt_ids).go2obj
self.dcts = self._init_go2dct() # go2dct typdefdct flds
self.go2dct = {go:d for go, d in self.dcts['go2dct'].items() if go in self.go2obj}
self.num_tot = len(self.go2obj)
self._chk_required()
self._chk_parents()
self._set_exp_children()
self._chk_children()
def chk_get_goterms_upper(self):
"""Check that GOTerm's 'get_upper' returns parents and relationships."""
tic = timeit.default_timer()
for goterm in self.go2obj.values():
goids_act = set(o.item_id for o in goterm.get_goterms_upper())
goids_exp = self._get_goterms_upper(goterm.item_id)
assert goids_act == goids_exp
prt_hms(tic, "get_goterms_upper")
def chk_get_goterms_lower(self):
"""Check that GOTerm's 'get_lower' returns parents and relationships."""
tic = timeit.default_timer()
for goterm in self.go2obj.values():
goids_act = set(o.item_id for o in goterm.get_goterms_lower())
goids_exp = self._get_goterms_lower(goterm.item_id)
assert goids_act == goids_exp, "{GO} EXP({E}) ACT({A})".format(
GO=goterm.item_id, E=goids_exp, A=goids_act)
prt_hms(tic, "get_goterms_lower")
def _get_goterms_upper(self, goid):
"""Get expected GO IDs returned by GOTerm's 'get_goterms_upper'."""
goids_exp = set()
dct = self.go2dct[goid]
if 'is_a' in dct:
goids_exp.update(dct['is_a'])
if 'relationship' in dct:
for rel_go in dct['relationship']:
goids_exp.add(rel_go.split()[1])
return goids_exp
def _get_goterms_lower(self, goid):
"""Get expected GO IDs returned by GOTerm's 'get_goterms_lower'."""
goids_exp = set()
dct = self.go2dct[goid]
if 'is_a_rev' in dct:
goids_exp.update(dct['is_a_rev'])
if 'relationship_rev' in dct:
for rel_gos in dct['relationship_rev'].values():
goids_exp.update(rel_gos)
return goids_exp
def chk_relationships_rev(self, reltype='part_of', prt=None):
"""Check reciprocal relationships. Print all GO pairs in one type of relationship."""
spc = " "*len(reltype)
rec2revs = cx.defaultdict(set)
for rec in sorted(self.go2obj.values(), key=lambda o: o.namespace):
reldct = rec.relationship
if reltype in reldct:
if prt is not None:
prt.write("{SPC} {GO}\n".format(SPC=spc, GO=str(rec)))
for related_to in reldct[reltype]:
rec2revs[related_to].add(rec)
if prt is not None:
prt.write("{RELTYPE} {GO}\n".format(RELTYPE=reltype, GO=str(related_to)))
if prt is not None:
prt.write("\n")
for rec, exp_revs in sorted(rec2revs.items(), key=lambda t: t[0].namespace):
if prt is not None:
prt.write(" {SPC} {GO}\n".format(SPC=spc, GO=str(rec)))
assert rec.relationship_rev[reltype] == exp_revs
for related_from in rec.relationship_rev[reltype]:
if prt is not None:
prt.write("rev {RELTYPE} {GO}\n".format(RELTYPE=reltype, GO=str(related_from)))
if prt is not None:
prt.write("\n")
def chk_str(self, attr):
"""Check that expected scalar value matches actual string value."""
for goid, rec in self.go2obj.items():
# A string data member must always be present, even if the value is ""
act_str = getattr(rec, attr)
exp_dct = self.go2dct[goid]
# Expected string equals actual string?
if attr in exp_dct:
exp_str = next(iter(exp_dct[attr]))
assert exp_str == act_str, "{} EXP({}) ACT({})".format(
goid, exp_str, act_str)
# If there is no expected string, is actual string ""?
else:
assert act_str == ""
def prt_summary(self, prt=sys.stdout):
"""Print percentage of GO IDs that have a specific relationship."""
sep = "\n-----------------------------------------------------------\n"
flds_seen = self.dcts['flds']
fld_cnts_go = self._get_cnts_gte1(self.go2dct.values())
prt.write("{SEP}GO TERM REQUIRED FIELDS:\n".format(SEP=sep))
self._prt_summary(prt, fld_cnts_go, self.exp_req, self.go2dct.values())
flds_seen = flds_seen.difference(self.exp_req)
prt.write("{SEP}GO TERM OPTIONAL FIELDS:\n".format(SEP=sep))
self._prt_summary(prt, fld_cnts_go, flds_seen, self.go2dct.values())
flds_seen = flds_seen.difference(fld_cnts_go.keys())
prt.write("{SEP}Typedef FIELDS:\n".format(SEP=sep))
fld_cnts_typedef = self._get_cnts_gte1(self.dcts['typedefdct'].values())
self._prt_summary(prt, fld_cnts_typedef, flds_seen, self.dcts['typedefdct'])
flds_seen = flds_seen.difference(fld_cnts_typedef.keys())
assert flds_seen == set(['consider', 'replaced_by']), "UNEXPECTED FIELDS({})".format(
flds_seen)
def _prt_summary(self, prt, fld_cnts, prt_flds, dcts):
prt.write("\n These fields appear at least once\n")
# Ex: 28,951 of 44,948 (64%) GO IDs has field(synonym)
for relname, cnt in fld_cnts.most_common():
if prt_flds is None or relname in prt_flds:
self._prt_perc(cnt, relname, len(dcts), prt)
prt.write("\n Maximum number of fields:\n")
for fld, maxqty in sorted(self._get_cnts_max(dcts).items(), key=lambda t: t[1]):
if prt_flds is None or fld in prt_flds:
prt.write(" {MAX:3} {MRK} {FLD}\n".format(
MAX=maxqty, MRK=self._get_fldmrk(fld), FLD=fld))
def _chk_parents(self):
"""Check parents."""
for goobj in self.go2obj.values():
exp_dct = self.go2dct[goobj.item_id]
if 'is_a' in exp_dct:
# pylint: disable=protected-access
exp_parents = exp_dct['is_a']
act_parents = goobj._parents
assert exp_parents == act_parents
else:
assert not goobj.parents
def _chk_children(self):
"""Check children."""
for goobj in self.go2obj.values():
exp_dct = self.go2dct[goobj.item_id]
if '_children' in exp_dct:
exp_children = exp_dct['_children']
act_children = set(o.item_id for o in goobj.children)
assert exp_children == act_children
else:
assert not goobj.children
def _set_exp_children(self):
"""Fill expected child GO IDs."""
# Initialize empty sets for child GO IDs
for exp_dct in self.go2dct.values():
exp_dct['_children'] = set()
# Loop thru all GO IDs
for goid_child, exp_dct in self.go2dct.items():
if 'is_a' in exp_dct:
# Add current GO ID to all of it's parents' set of children
for goid_parent in exp_dct['is_a']:
self.go2dct[goid_parent]['_children'].add(goid_child)
def _chk_required(self):
"""Check the required attributes."""
for goid, goobj in self.go2obj.items():
godct = self.go2dct[goid]
assert goobj.item_id == godct['GO']
assert goobj.namespace == next(iter(godct['namespace'])), godct
assert goobj.name == next(iter(godct['name']))
self._chk_is_obsolete(goobj, godct)
self._chk_alt_ids(goobj, godct)
@staticmethod
def _chk_alt_ids(goobj, godct):
"""Check 'alt_ids' required attribute."""
if 'alt_id' in godct:
assert godct['alt_id'] == goobj.alt_ids
else:
assert not goobj.alt_ids
@staticmethod
def _chk_is_obsolete(goobj, godct):
"""Check 'is_obsolete' required attribute."""
act_obso = getattr(goobj, 'is_obsolete', None)
if act_obso:
assert 'is_obsolete' in godct, "EXP({})\nACT({})".format(
godct, getattr(goobj, 'is_obsolete', None))
else:
assert 'is_obsolete' not in godct, "EXP({})\nACT({})".format(
godct, getattr(goobj, 'is_obsolete', None))
def chk_no_optattrs(self):
"""Check that only the optional attributes requested are the attributes implemented."""
# name is_obsolete namespace item_id alt_ids
# level namespace depth parents children _parents
exp_flds = self.exp_req.union(self.exp_gen)
obj1_exp0 = set(['id', 'alt_ids'])
for goobj in self.go2obj.values():
attrs = set(vars(goobj).keys()).difference(exp_flds)
assert attrs == obj1_exp0, attrs
def chk_xref(self, prt=None):
"""Check xrefs."""
# Get GO IDs which are expected to have xrefs
goids = set(go for go, d in self.go2dct.items() if 'xref' in d)
for goid in goids:
goobj = self.go2obj[goid]
xrefs = getattr(goobj, 'xref', None)
assert xrefs is not None, "{GO} MISSING XREF".format(GO=goid)
# Iterate through list of xref data stored in named tuples
for dbxref in xrefs:
if prt is not None:
prt.write("{GO} {DBXREF}\n".format(GO=goid, DBXREF=dbxref))
assert self.exp_xrefpat.match(dbxref), "INVALID XREF FORMAT({X})".format(
X=dbxref)
def chk_synonyms(self, prt=None):
"""Check synonyms
Example synonym and its storage in a namedtuple:
synonym: "The other white meat" EXACT MARKETING_SLOGAN [MEAT:00324, BACONBASE:03021]
text: "The other white meat"
scope: EXACT
typename: MARKETING_SLOGAN
dbxrefs: set(["MEAT:00324", "BACONBASE:03021"])
"""
# Get GO IDs which are expected to have synonyms
badnts = []
for goid, dct_exp in self.go2dct.items():
goobj = self.go2obj[goid]
if 'synonym' in dct_exp:
ntsyns = getattr(goobj, 'synonym', None)
assert ntsyns is not None, "{GO} MISSING SYNONYM".format(GO=goid)
# Iterate through list of synonym data stored in named tuples
for ntsyn in ntsyns:
if prt is not None:
prt.write("{GO} {NT}\n".format(GO=goid, NT=ntsyn))
# Example:
assert ntsyn.text, "SYNONYM CANNOT BE EMPTY"
assert ntsyn.scope in self.exp_scopes, "INVALID SYNONYM SCOPE"
for dbxref in ntsyn.dbxrefs:
if not self.exp_xrefpat.match(dbxref):
badnts.append((goid, ntsyn))
print("**WARNING: INVALID FORMAT: DBXREF({D}) ON {GO}".format(
D=dbxref, GO=goid))
else:
assert goobj.synonym == []
return badnts
def _get_fldmrk(self, fld):
"""Get a mark for each field indicating if it is required or optional"""
#pylint: disable=too-many-return-statements
if fld in self.exp_req:
return 'REQ'
if fld == 'def':
return 'str'
if fld in self.attrs_scalar:
return 'str'
if fld in self.attrs_set:
return 'set'
if fld == 'relationship':
return 'rel'
if fld == 'synonym':
return 'syn'
if fld == 'xref':
return 'xrf'
raise RuntimeError("UNEXPECTED FIELD({})".format(fld))
@staticmethod
def _prt_perc(num_rel, name, num_tot, prt=sys.stdout):
"""Print percentage of GO IDs that have a specific relationship."""
prt.write(" {N:6,} of {M:,} ({P:3.0f}%) GO IDs has field({A})\n".format(
N=num_rel, M=num_tot, P=float(num_rel)/num_tot*100, A=name))
def _get_cnts_max(self, dcts):
"""Get the maximum count of times a specific relationship was seen on a GO."""
fld2qtys = cx.defaultdict(set)
flds = self.dcts['flds']
for recdct in dcts:
for opt in flds:
if opt in recdct:
fld2qtys[opt].add(len(recdct[opt]))
return {f:max(qtys) for f, qtys in fld2qtys.items()}
def _get_cnts_gte1(self, record_dicts):
"""Get counts of if a specific relationship was seen on a GO."""
ctr = cx.Counter()
flds = self.dcts['flds']
for recdct in record_dicts:
for opt in flds:
if opt in recdct:
ctr[opt] += 1
return ctr
def chk_set(self, opt):
"""Check that actual set contents match expected set contents."""
errpat = "SET EXP({EXP}) ACT({ACT}) {GO}\n{DESC}:\nEXP:\n{Es}\n\nACT:\n{As}"
for goid, dct in self.go2dct.items():
act_set = getattr(self.go2obj[goid], opt, None)
if opt in dct:
exp_set = dct[opt]
assert exp_set == act_set, errpat.format(
EXP=len(exp_set), ACT=len(act_set), GO=goid,
DESC=str(self.go2obj[goid].name),
Es="\n".join(sorted(exp_set)),
As="\n".join(sorted(act_set)))
else:
assert act_set == set(), "EXPECTED EMPTY SET FOR {O}: ACT({A})\n".format(
O=opt, A=act_set)
def chk_relationships(self):
"""Expected relationship GO IDs should match actual relationship GO IDs."""
for goid, dct in self.go2dct.items():
act_rel2recs = getattr(self.go2obj[goid], 'relationship', None)
if 'relationship' in dct:
rel2gos = self._mk_exp_relatinship_sets(dct['relationship'])
# Check if expected relationships and actual relationships are the same
assert set(act_rel2recs.keys()) == set(rel2gos.keys()), "EXP({}) != ACT({})".format(
set(act_rel2recs.keys()), set(rel2gos.keys()))
for rel, exp_goids in rel2gos.items():
# Expected relationships store GO IDs.
# Actual relationships store GO Terms.
act_goids = set(o.item_id for o in act_rel2recs[rel])
assert exp_goids == act_goids, "EXP({}) ACT({}) {}:\nEXP({})\nACT({})".format(
len(exp_goids), len(act_goids), goid, exp_goids, act_goids)
else:
assert act_rel2recs == {}, act_rel2recs
def _mk_exp_relatinship_sets(self, relationship_str_set):
"""Transform a set of relationship strings into a dict of sets containing GO IDs."""
rel2gos = cx.defaultdict(set)
for rel_str in relationship_str_set:
rel, goid = rel_str.split()
assert rel in self.exp_relationships
assert goid[:3] == "GO:" and goid[3:].isdigit()
rel2gos[rel].add(goid)
return rel2gos
@staticmethod
def add_is_a_rev(go2dct):
"""If there 'is_a' exists, add 'is_a_rev'."""
for go_src, dct in go2dct.items():
if 'is_a' in dct:
for go_parent in dct['is_a']:
if 'is_a_rev' not in go2dct[go_parent]:
go2dct[go_parent]['is_a_rev'] = set()
go2dct[go_parent]['is_a_rev'].add(go_src)
@staticmethod
def add_relationship_rev(go2dct):
"""If there is a relationship, add 'relationship_rev'."""
for go_src, dct in go2dct.items():
if 'relationship' in dct:
for rel in dct['relationship']:
reltype, go_dst = rel.split()
# print("RRRRRRRRR", go_src, reltype, go_dst)
if 'relationship_rev' not in go2dct[go_dst]:
go2dct[go_dst]['relationship_rev'] = {}
if reltype not in go2dct[go_dst]['relationship_rev']:
go2dct[go_dst]['relationship_rev'][reltype] = set()
go2dct[go_dst]['relationship_rev'][reltype].add(go_src)
# pylint: disable=too-many-branches
def _init_go2dct(self):
"""Create EXPECTED RESULTS stored in a dict of GO fields."""
go2dct = {}
# pylint: disable=unsubscriptable-object
typedefdct = {}
flds = set()
with open(self.obo) as ifstrm:
rec = {}
rec_typedef = None
for line in ifstrm:
line = line.rstrip()
# End of GO record
if not line:
if rec: # and option is None or option in rec:
# 'Definition' is specified in obo as 'def' and in Python by 'defn'
if 'def' in rec:
rec['defn'] = rec['def']
go2dct[rec['GO']] = rec
rec = {}
if rec_typedef is not None:
# Example rec_typedef:
# {'xref': 'RO:0002212',
# 'name': 'negatively regulates',
# 'namespace': 'external',
# 'transitive_over': 'part_of',
# 'is_a': 'regulates',
# 'id': 'negatively_regulates'}
typedefdct[rec_typedef['item_id']] = rec_typedef
rec_typedef = None
elif line[:9] == "[Typedef]":
rec_typedef = {}
else:
mtch = self.cmpfld.match(line)
if mtch:
fld = mtch.group(1)
val = mtch.group(2)
# Beginning of GO record
if fld == "id":
assert not rec, "NOW({}) WAS({})".format(line, rec)
rec = {'GO':val, 'item_id':val}
flds.add('item_id')
# Middle of GO record
elif rec:
flds.add(fld)
if fld not in rec:
rec[fld] = set()
# Strip comment if it exists
loc = val.find(' ! ')
if loc != -1:
val = val[:loc]
# Add value
rec[fld].add(val)
if rec_typedef is not None:
if fld == 'id':
fld = 'item_id'
rec_typedef[fld] = val
for dct in go2dct.values():
if 'def' in dct:
dct['defn'] = dct['def']
self.add_relationship_rev(go2dct)
self.add_is_a_rev(go2dct)
return {'go2dct':go2dct, 'typedefdct':typedefdct, 'flds':flds}
# Copyright (C) 2010-2018, <NAME>, <NAME>, All rights reserved.
| StarcoderdataPython |
11383409 | """base
Revision ID: 3895aa356acf
Revises:
Create Date: 2019-05-19 11:34:12.741305
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('exam_questions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('question', sa.String(length=200), nullable=True),
sa.Column('question_type', sa.String(length=64), nullable=True),
sa.Column('question_images', sa.Text(), nullable=True),
sa.Column('option_a', sa.String(length=64), nullable=True),
sa.Column('option_b', sa.String(length=64), nullable=True),
sa.Column('option_c', sa.String(length=64), nullable=True),
sa.Column('option_d', sa.String(length=64), nullable=True),
sa.Column('answer', sa.String(length=1), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_exam_questions_question'), 'exam_questions', ['question'], unique=False)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('sagf_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('surname', sa.String(length=64), nullable=True),
sa.Column('email', sa.String(length=128), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('admin', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_sagf_id'), 'user', ['sagf_id'], unique=True)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
op.create_table('exam_result',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('sagf_id', sa.Integer(), nullable=True),
sa.Column('theory_answer', sa.Text(), nullable=True),
sa.Column('practical_answer', sa.Text(), nullable=True),
sa.Column('exam_start_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['sagf_id'], ['user.sagf_id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('sagf_id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('exam_result')
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_index(op.f('ix_user_sagf_id'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
op.drop_index(op.f('ix_exam_questions_question'), table_name='exam_questions')
op.drop_table('exam_questions')
# ### end Alembic commands ###
| StarcoderdataPython |
9766608 | k, a = int(input()), list(map(int,input().split()))
s = set(a);
print(((sum(s)*k)-(sum(a)))//(k-1))
| StarcoderdataPython |
5121578 | from os import chdir, environ, path, getcwd
from shutil import rmtree
from inspect import getfile
from importlib import reload
from _pytest.tmpdir import TempPathFactory
from pytest_mock import MockerFixture
from .helpers import onerror
CWD = getcwd()
def test___import__(tmp_path_factory: TempPathFactory, mocker: MockerFixture) -> None:
test_context = tmp_path_factory.mktemp('test_context')
test_context_root = str(test_context)
chdir(test_context_root)
try:
environ['GRIZZLY_MOUNT_CONTEXT'] = '/var/tmp'
import grizzly_cli
reload(grizzly_cli)
mocker.patch.object(grizzly_cli, '__version__', '1.2.3')
static_context = path.join(path.dirname(getfile(grizzly_cli)), 'static')
assert grizzly_cli.__version__ == '1.2.3'
assert grizzly_cli.EXECUTION_CONTEXT == test_context_root
assert grizzly_cli.MOUNT_CONTEXT == '/var/tmp'
assert grizzly_cli.STATIC_CONTEXT == static_context
assert grizzly_cli.PROJECT_NAME == path.basename(test_context_root)
assert len(grizzly_cli.SCENARIOS) == 0
finally:
chdir(CWD)
rmtree(test_context_root, onerror=onerror)
try:
del environ['GRIZZLY_MOUNT_CONTEXT']
except:
pass
| StarcoderdataPython |
8107513 | <reponame>skunkworksdev/Ifes_Algoritmo
a = int(input('Digite 1 para sim e 2 para não: '))
if(a == 1):
print('Você quer namorar comigo, sou gato!')
elif(a == 2): # simplesmente está confirmando
print('Você não quer namorar comigo, sou ridículo!')
else:
print('Você não quer ninguém!\n Evite piadas de tios(as) quando você fizer um bom almoço de familia!!!')
# condições aninhadas (nested condition)
# if(a == 1):
# print('Você quer namorar comigo, sou gato!')
# else:
# if(a == 2): # simplesmente está confirmando
# print('Você não quer namorar comigo, sou ridículo!')
# else:
# print('Você não quer ninguém!\n Evite piadas de tios(as) quando você fizer um bom almoço de familia!!!') | StarcoderdataPython |
6544080 | ##============================ ea_config_ex_3.py ================================
# Some of the input parameters and options in order to select the settings of the
# evolutionary algorithm are given here for the minimization of f1(x) and f2(x)
# of the ZDT 1 using NSGA-II (Genetic Algorithms).
EA_type = 'GA'
pop_size = 80
n_gen = 300
mut_rate = -1
n_gen_var = 25
cross_rate = 0.9
tour_sel_param = 0.8
perc_rank1 = 1.0
perc_nf = 0
| StarcoderdataPython |
12820751 | <gh_stars>0
# Author: <NAME>
# Class representing an ordered set of operations on a given data input
import logging
import json
from json import JSONDecodeError
from abc import ABC, abstractmethod
import ast
from google.protobuf import json_format
from ta3ta2_api import core_pb2, pipeline_pb2, problem_pb2, value_pb2
# from .scores import Metric
logger = logging.getLogger(__name__)
class ModelInput(object):
def __init__(self, name):
self.name = name
class ModelOutput(object):
def __init__(self, name, source):
self.name = name
self.source = source
class ModelNode(ABC):
@abstractmethod
def get_type(self):
pass
class SimpleModelNode(ModelNode):
def __init__(self, op, args=None, outputs=None, hyperparams=None):
self.operator = op
if args is not None:
self.args = args
else:
self.args = []
if outputs is not None:
self.outputs = outputs
else:
self.outputs = []
if hyperparams is not None:
self.hyperparams = hyperparams
else:
self.hyperparams = []
def get_type(self):
return "SimpleNode"
class SearchModelNode(ModelNode):
def __init__(self, inputs=None, outputs=None):
if inputs is None:
self.inputs = None
else:
self.inputs = [ModelInput(input) for input in inputs]
if outputs is None:
self.outputs = None
else:
self.outputs = [ModelOutput(out) for out in outputs]
def get_type(self):
return "SearchModelNode"
class Model(object):
def __init__(self, mid, name=None, desc=None, model=None):
self.id = mid
self.fitted_id = None
self.name = name
self.desc = desc
self.model = model
def add_description(self, model):
self.model = model
def add_description_from_protobuf(self, msg):
desc = json_format.MessageToJson(msg)
self.model = json.loads(desc)
def to_protobuf(self):
return json_format.Parse(self.model, pipeline_pb2.PipelineDescription())
def get_default_output(self, format=None):
"""
Just returns the first output
"""
logger.debug("Model outputs: %s" % str(self.model['outputs']))
if format == 'name':
return self.model['outputs'][0]['name']
elif format == 'data':
return self.model['outputs'][0]['data']
elif format == 'declare':
return "outputs.0"
else:
return "outputs.0"
@staticmethod
def from_json(data):
"""
Load from json string
"""
logger.debug("type of data to load from json: %s" % str(type(data)))
if isinstance(data, str):
try:
d = json.loads(data)
except JSONDecodeError:
d = ast.literal_eval(data)
elif isinstance(data, dict):
d = data
else:
raise Exception("Invalid type given: %s" % str(type(data)))
logger.debug("got json data for new model: %s" % str(d))
out = Model(d['id'])
out.name = d['name']
out.desc = d['description']
out.add_description(d['model'])
if 'fitted_id' in d.keys():
logger.debug("Found fitted id in model json")
out.fitted_id = d['fitted_id']
else:
logger.debug("Did not find fitted id in model json")
logger.debug("Got pipeline parsed: %s" % str(out))
return out
def to_file(self, fpath):
"""
Writes the workflows to a file where the first line is tab separated
list of solution ids. The second row contains a stringified version
of the json for the corresponding solution id
"""
return fpath
def to_dict(self):
out = {
'id': self.id,
'name': self.name,
'description': self.desc,
'model': self.model
}
if self.fitted_id is not None:
out['fitted_id'] = self.fitted_id
return out
def __str__(self):
return str(self.to_dict())
class SubModelNode(Model, ModelNode):
def get_type(self):
return "SubModelNode"
| StarcoderdataPython |
252608 | import os
import numpy as np
import pandas as pd
import plotly.graph_objects as go
from skimage import io
from sklearn import metrics
from sklearn.dummy import DummyClassifier
import berrytemplates as bt
from berrysort import TestDataLoader
# Der Pfad zu den Trainingsdaten
path = "BlueberryData/TrainingData/"
def load_images(path):
# load images and their labels
samples = []
labels = []
# os.listdir(path) returns a list of files and folders in the folder
for file in os.listdir(path):
res = io.imread(path + file)
samples.append(res)
if 'good' in file:
labels.append(1)
elif 'bad' in file:
labels.append(0)
else:
# image with incorrect name format
continue
return samples, labels
def extract_features(images):
# example for feature extraction
features = []
for i in range(len(images)):
image = images[i]
# since the images are 64x64, we get the middle pixel rgb values as features
middle_pixel = image[32, 32, :]
features.append(middle_pixel)
# convert to DataFrame for classifier
return pd.DataFrame(np.array(features))
def print_metrics(y, predictions, set_name="test"):
# extend for more metrics
accuracy = metrics.accuracy_score(y, predictions)
print("Classifier performance on the {} set".format(set_name))
print("-------------------------------------------------")
print("Accuracy: {:.3f}".format(accuracy))
print("\n")
def plot_results(predictions):
# plot the number of images classified as 0 (bad)
count_good = sum(predictions)
fig = go.Figure(data=[
go.Bar(name="good blueberries", x=["good"], y=[count_good])
])
# plotly does not work yet
# fig.show()
def predict_pipeline(X_test, model):
X_test = extract_features(X_test)
return model.predict(X_test)
def main():
# required line to work with the test data
tdl = TestDataLoader()
# load images
X_train, y_train = load_images(path)
print("finished loading data")
print("\n")
# extract features from the images
X_train = extract_features(X_train)
# build model
model = DummyClassifier(strategy="uniform")
# train model
model.fit(X_train, y_train)
# evaluate model
predict_func = lambda X_test: predict_pipeline(X_test, model)
# examples for template methods
# X_train, y_train = bt.load_images()
# X_train = bt.extract_features(X_train)
# model = bt.classifier()
# model.fit(X_train, y_train)
# predict_func = lambda X_test: model.predict(bt.extract_features(X_test))
# bt.print_prediction_metrics(predict_func, tdl)
tdl.send_to_unity(predict_func)
acc = tdl.evaluate_metric(predict_func)
print(acc)
main()
| StarcoderdataPython |
246790 | <reponame>Blackweather/rpg-station
import pygame
from os import system
pygame.init()
j = pygame.joystick.Joystick(0)
j.init()
try:
while True:
for event in pygame.event.get():
if event.type == pygame.JOYBUTTONDOWN:
system('clear')
print("Pressed button " + str(event.button))
elif event.type == pygame.JOYHATMOTION:
system('clear')
print("Moved hat #" + str(event.hat) + " with value: "
+ str(event.value))
elif event.type == pygame.JOYAXISMOTION:
system('clear')
print("Moved axis #" + str(event.axis) + " with value: "
+ str(event.value))
except KeyboardInterrupt:
print("\nexiting now")
j.quit()
| StarcoderdataPython |
88357 | import argparse
import time
import torch
from kruskals import kruskals_pytorch, kruskals_pytorch_batched
from kruskals import kruskals_cpp_pytorch, kruskals_cpp_pytorch2
parser = argparse.ArgumentParser()
parser.add_argument("--n", type=int, default=30, help="Number of nodes.")
parser.add_argument("--batch_size", type=int, default=10, help="Batch size.")
parser.add_argument("--num_steps", type=int, default=1,
help="Number of times to evaluate.")
args = parser.parse_args()
num_edges = int(args.n * (args.n - 1) / 2)
weights = torch.randn(args.batch_size, num_edges)
vertices = torch.triu_indices(args.n - 1, args.n, offset=1)
tiled_vertices = vertices.transpose(0, 1).repeat((weights.size(0), 1, 1)).float()
weights_and_edges = torch.cat([weights.unsqueeze(-1), tiled_vertices], axis=-1)
# Test pytorch (batched, gpu).
t = 0
weights_and_edges = weights_and_edges.to("cuda")
for _ in range(args.num_steps):
start = time.time()
res_pytorch = kruskals_pytorch_batched(weights_and_edges, args.n)
torch.cuda.synchronize()
t += time.time() - start
print(f"Pytorch (batched, gpu): {t}; avg: {t / args.num_steps}")
# Test cpp (pytorch, cpu).
t = 0
weights_and_edges = weights_and_edges.to("cpu")
for _ in range(args.num_steps):
start = time.time()
res_pytorch = kruskals_cpp_pytorch(weights_and_edges, args.n)
t += time.time() - start
print(f"C++ (pytorch, cpu): {t}; avg: {t / args.num_steps}")
# Test cpp (pytorch, gpu).
t = 0
weights_and_edges = weights_and_edges.to("cuda")
for _ in range(args.num_steps):
start = time.time()
res_pytorch = kruskals_cpp_pytorch(weights_and_edges, args.n)
torch.cuda.synchronize()
t += time.time() - start
print(f"C++ (pytorch, gpu): {t}; avg: {t / args.num_steps}")
# Test cpp (pytorch2, cpu).
t = 0
weights_and_edges = weights_and_edges.to("cpu")
for _ in range(args.num_steps):
start = time.time()
res_pytorch = kruskals_cpp_pytorch2(weights_and_edges, args.n)
t += time.time() - start
print(f"C++ (pytorch2, cpu): {t}; avg: {t / args.num_steps}")
# Test cpp (pytorch2, gpu).
t = 0
weights_and_edges = weights_and_edges.to("cuda")
for _ in range(args.num_steps):
start = time.time()
res_pytorch = kruskals_cpp_pytorch2(weights_and_edges, args.n)
torch.cuda.synchronize()
t += time.time() - start
print(f"C++ (pytorch2, gpu): {t}; avg: {t / args.num_steps}") | StarcoderdataPython |
6454389 | # Generated by Django 2.1 on 2021-01-11 12:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0012_auto_20210111_1133'),
]
operations = [
migrations.AddField(
model_name='collaborater',
name='field',
field=models.ManyToManyField(blank=True, related_name='collaboraters', to='app.Field'),
),
]
| StarcoderdataPython |
1880764 | <filename>azplugins/test-py/test_mpcd_reverse_perturbation.py
# Copyright (c) 2018-2020, <NAME>
# Copyright (c) 2021, Auburn University
# This file is part of the azplugins project, released under the Modified BSD License.
import hoomd
from hoomd import md
from hoomd import mpcd
hoomd.context.initialize()
try:
from hoomd import azplugins
import hoomd.azplugins.mpcd
except ImportError:
import azplugins
import azplugins.mpcd
import unittest
# tests for azplugins.flow.reverse_pertubation
class updater_reverse_perturbation(unittest.TestCase):
def setUp(self):
snapshot = hoomd.data.make_snapshot(N=1, particle_types=['A'],box=hoomd.data.boxdim(L=20))
system1 = hoomd.init.read_snapshot(snapshot)
snap = mpcd.data.make_snapshot(N=1)
snap.particles.types = ['A']
snap.particles.position[0] = (0,0,0)
self.s = mpcd.init.read_snapshot(snap)
# tests basic creation of the updater
def test(self):
field = azplugins.mpcd.reverse_perturbation(width=1.0,Nswap=1,period=100,phase=-1,target_momentum=0.5)
# test setting parameters
def test_parameters(self):
field = azplugins.mpcd.reverse_perturbation(width=1.0,Nswap=1,period=100,phase=-1,target_momentum=0.5)
field.set_params(Nswap=4)
field.set_params(width=3)
# cannot set a width > L/2
with self.assertRaises(RuntimeError):
field.set_params(width=22.0)
hoomd.run(1)
# cannot set a Nswap < 0
with self.assertRaises(ValueError):
field.set_params(Nswap=-5)
# cannot set a period < 0
with self.assertRaises(TypeError):
field.set_params(period =-1)
# cannot set a target_momentum < 0
with self.assertRaises(ValueError):
field.set_params(target_momentum=-1)
# cannot set slab distance < 0
with self.assertRaises(ValueError):
field.set_params(H=-1)
def tearDown(self):
hoomd.context.initialize()
class updater_reverse_perturbation_swap(unittest.TestCase):
def setUp(self):
snapshot = hoomd.data.make_snapshot(N=1, particle_types=['A'],box=hoomd.data.boxdim(L=20))
system1 = hoomd.init.read_snapshot(snapshot)
snap = mpcd.data.make_snapshot(N=5)
snap.particles.types = ['A']
snap.particles.velocity[0] = (0.1,0.0,0.0)
snap.particles.velocity[1] = (0.8,0.0,0.0)
snap.particles.velocity[2] = (-0.1,0.0,0.0)
snap.particles.velocity[3] = (-0.5,0.0,0.0)
snap.particles.velocity[4] = (-0.09,0.0,0.0)
snap.particles.position[0] = (0.0,0.0,-5.3)
snap.particles.position[1] = (0.0,1.0,-5.3)
snap.particles.position[2] = (0.0,0.0,5.3)
snap.particles.position[3] = (0.0,1.0,5.3)
snap.particles.position[4] = (0.0,0.0,5.3)
self.s = mpcd.init.read_snapshot(snap)
def test_resize_Nswap(self):
field = azplugins.mpcd.reverse_perturbation(width=0.2,Nswap=1,period=1,target_momentum=2)
hoomd.run(1)
self.assertAlmostEqual(field.Nswap,1)
field = azplugins.mpcd.reverse_perturbation(width=0.2,Nswap=356,period=1,target_momentum=2)
hoomd.run(1)
self.assertAlmostEqual(field.Nswap,356)
def test_set_slab_distance(self):
field = azplugins.mpcd.reverse_perturbation(width=0.2,Nswap=1,period=1,target_momentum=2,H=3)
hoomd.run(1)
self.assertAlmostEqual(field.distance,3)
def test_simple_swap(self):
# swap velocities of particle 1/3. Don't swap 0/2 - Nswap is too small
field = azplugins.mpcd.reverse_perturbation(width=1.0,Nswap=1,period=1,phase=0,target_momentum=0.8)
hoomd.run(1)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.velocity[0][0],0.1)
self.assertAlmostEqual(snap_out.particles.velocity[2][0],-0.1)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],-0.5)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],0.8)
def test_swap_outside_slab(self):
# swap no velocities because slab distance is changed - no particles are in slab at +/- 3
field = azplugins.mpcd.reverse_perturbation(H=3,width=1.0,Nswap=1,period=1,phase=0,target_momentum=0.8)
hoomd.run(1)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.velocity[0][0],0.1)
self.assertAlmostEqual(snap_out.particles.velocity[2][0],-0.1)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],0.8)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],-0.5)
def test_swap_changed_slab(self):
# swap velocites of particle 1/3, shifted particle and slab positions
snap_in = self.s.take_snapshot()
snap_in.particles.position[1]=(0,3,-2.0)
snap_in.particles.position[3]=(1,2,+2.0)
snap_in.particles.velocity[1]=(0.5,0,0)
snap_in.particles.velocity[3]=(-0.5,0,0)
self.s.restore_snapshot(snap_in)
field = azplugins.mpcd.reverse_perturbation(H=2,width=1.0,Nswap=1,period=1,phase=0,target_momentum=0.8)
hoomd.run(1)
self.assertAlmostEqual(field.distance,2)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.position[1][2],-2.0)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],-0.5)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],0.5)
def test_empty_top_slab(self):
snap_in = self.s.take_snapshot()
snap_in.particles.position[2]=(0,3,-1.0)
snap_in.particles.position[3]=(1,2,-1.0)
snap_in.particles.position[4]=(2,-2,-1.0)
self.s.restore_snapshot(snap_in)
field = azplugins.mpcd.reverse_perturbation(width=1.0,Nswap=10,period=1,target_momentum=2)
hoomd.run(1)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.velocity[0][0],0.1)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],0.8)
self.assertAlmostEqual(snap_out.particles.velocity[2][0],-0.1)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],-0.5)
def test_empty_bottom_slab(self):
snap_in = self.s.take_snapshot()
snap_in.particles.position[0]=(0,3.0,1.0)
snap_in.particles.position[1]=(1,2.0,1.0)
self.s.restore_snapshot(snap_in)
field = azplugins.mpcd.reverse_perturbation(width=1.0,Nswap=10,period=1,target_momentum=2)
hoomd.run(1)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.velocity[0][0],0.1)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],0.8)
self.assertAlmostEqual(snap_out.particles.velocity[2][0],-0.1)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],-0.5)
def test_simple_swap_outside_slab(self):
# don't swap anything - all particles are outside of slabs
field = azplugins.mpcd.reverse_perturbation(width=0.2,Nswap=100,period=1,target_momentum=2)
hoomd.run(1)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.velocity[0][0],0.1)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],0.8)
self.assertAlmostEqual(snap_out.particles.velocity[2][0],-0.1)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],-0.5)
def test_simple_swap_all(self):
# swap velocities of particle 1/3, and 0/2
field = azplugins.mpcd.reverse_perturbation(width=1.0,Nswap=100,period=1,target_momentum=2)
hoomd.run(1)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.velocity[0][0],-0.1)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],-0.5)
self.assertAlmostEqual(snap_out.particles.velocity[2][0],0.1)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],0.8)
def test_swap_target_momentum(self):
# swap velocities of particle 0/2 (closer to V) and not 1/3
field = azplugins.mpcd.reverse_perturbation(width=1.0,Nswap=1,period=1,target_momentum=0.1)
hoomd.run(1)
snap_out = self.s.take_snapshot()
self.assertAlmostEqual(snap_out.particles.velocity[0][0],-0.1)
self.assertAlmostEqual(snap_out.particles.velocity[1][0],0.8)
self.assertAlmostEqual(snap_out.particles.velocity[2][0],0.1)
self.assertAlmostEqual(snap_out.particles.velocity[3][0],-0.5)
def tearDown(self):
hoomd.context.initialize()
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| StarcoderdataPython |
1813443 | import scrapy
class CRateSpider(scrapy.Spider):
name = 'c_rate'
allowed_domains = ['https://www.bb.org.bd/econdata/exchangerate.php']
start_urls = ['http://https://www.bb.org.bd/econdata/exchangerate.php/']
def parse(self, response):
pass
| StarcoderdataPython |
11302702 | from django import forms
class LoginForm(forms.Form):
"""Login form implementation"""
username = forms.CharField(
max_length = 120,
required = False,
widget = forms.TextInput(
attrs = {
'placeholder':'Enter Your Email Address'
}
)
)
password = forms.CharField(
max_length = 120,
required = False,
widget = forms.PasswordInput(
attrs = {
'placeholder' : 'Enter Your Password'
}
)
)
def clean_username(self):
"""Validation for username"""
username = self.cleaned_data.get("username")
if not username:
raise forms.ValidationError("email_error")
else:
return username
def clean_password(self):
"""Validation for Password"""
password = self.cleaned_data.get("password")
if not password:
raise forms.ValidationError("password_error")
else:
return password
| StarcoderdataPython |
1754462 | import atexit
import logging
from logging.config import dictConfig
import os
import sys
import time
import click
import docker
from .bitcoind import (BitcoindDockerController,
fetch_wallet_addresses_for_mining)
from .helpers import load_jsons, which
from .server import DATA_FOLDER, create_app, init_app
from os import path
import signal
@click.group()
def cli():
pass
@cli.command()
@click.option("--daemon", is_flag=True)
@click.option("--stop", is_flag=True)
@click.option("--restart", is_flag=True)
@click.option("--force", is_flag=True)
# options below can help to run it on a remote server,
# but better use nginx
@click.option("--port") # default - 25441 set to 80 for http, 443 for https
@click.option("--host", default="127.0.0.1") # set to 0.0.0.0 to make it available outside
# for https:
@click.option("--cert")
@click.option("--key")
# provide tor password here
@click.option("--tor")
def server(daemon, stop, restart, force, port, host, cert, key, tor):
# we will store our daemon PIN here
pid_file = path.expanduser(path.join(DATA_FOLDER, "daemon.pid"))
toraddr_file = path.expanduser(path.join(DATA_FOLDER, "onion.txt"))
# check if pid file exists
if path.isfile(pid_file):
# if we need to stop daemon
if stop or restart:
print("Stopping the Specter server...")
with open(pid_file) as f:
pid = int(f.read())
os.kill(pid, signal.SIGTERM)
elif daemon:
if not force:
print(f"PID file \"{pid_file}\" already exists. Use --force to overwrite")
return
if stop:
return
else:
if stop or restart:
print(f"Can't find PID file \"{pid_file}\"")
if stop:
return
app = create_app()
app.app_context().push()
init_app(app)
# watch templates folder to reload when something changes
extra_dirs = ['templates']
extra_files = extra_dirs[:]
for extra_dir in extra_dirs:
for dirname, dirs, files in os.walk(extra_dir):
for filename in files:
filename = os.path.join(dirname, filename)
if os.path.isfile(filename):
extra_files.append(filename)
# if port is not defined - get it from environment
if port is None:
port = int(os.getenv('PORT', 25441))
else:
port = int(port)
# certificates
if cert is None:
cert = os.getenv('CERT', None)
if key is None:
key = os.getenv('KEY', None)
protocol = "http"
kwargs = {
"host": host,
"port": port,
"extra_files": extra_files,
}
if cert is not None and key is not None:
cert = os.path.abspath(cert)
key = os.path.abspath(key)
kwargs["ssl_context"] = (cert, key)
protocol = "https"
# if tor password is not provided but env variable is set
if tor is None and os.getenv('CONNECT_TOR') == 'True':
from dotenv import load_dotenv
load_dotenv() # Load the secrets from .env
tor = os.getenv('TOR_PASSWORD')
# debug is false by default
def run(debug=False):
if tor is not None:
from . import tor_util
# if we have certificates
if "ssl_context" in kwargs:
tor_port = 443
else:
tor_port = 80
tor_util.run_on_hidden_service(app,
debug=False,
tor_password=tor,
tor_port=tor_port,
save_address_to=toraddr_file,
**kwargs)
else:
app.run(debug=debug, **kwargs)
# check if we should run a daemon or not
if daemon or restart:
from daemonize import Daemonize
print("Starting server in background...")
print("* Hopefully running on %s://%s:%d/" % (protocol, host, port))
if tor is not None:
print("* For onion address check the file %s" % toraddr_file)
# Note: we can't run flask as a deamon in debug mode,
# so use debug=False by default
d = Daemonize(app="specter", pid=pid_file, action=run)
d.start()
else:
# if not a daemon we can use DEBUG
run(app.config['DEBUG'])
@cli.command()
@click.option('--debug/--no-debug', default=False)
@click.option('--mining/--no-mining', default=True)
@click.option('--docker-tag', "docker_tag", default="latest")
def bitcoind(debug,mining, docker_tag):
mining_every_x_seconds = 15
if debug:
logging.getLogger().setLevel(logging.DEBUG)
click.echo(" --> starting or detecting container")
my_bitcoind = BitcoindDockerController(docker_tag=docker_tag)
try:
my_bitcoind.start_bitcoind()
except docker.errors.ImageNotFound:
click.echo(" --> Image with tag {} does not exist!".format(docker_tag))
click.echo(" --> Try to download first with docker pull registry.gitlab.com/cryptoadvance/specter-desktop/python-bitcoind:{}".format(docker_tag))
sys.exit(1)
tags_of_image = [ image.split(":")[-1] for image in my_bitcoind.btcd_container.image.tags]
if not docker_tag in tags_of_image:
click.echo(" --> The running docker container is not the tag you requested!")
click.echo(" --> please stop first with docker stop {}".format(my_bitcoind.btcd_container.id))
sys.exit(1)
click.echo(" --> containerImage: %s" % my_bitcoind.btcd_container.image.tags)
click.echo(" --> url: %s" % my_bitcoind.rpcconn.render_url())
click.echo(" --> user, password: <PASSWORD>")
click.echo(" --> host, port: localhost, 18443")
click.echo(" --> bitcoin-cli: bitcoin-cli -regtest -rpcuser=bitcoin -rpcpassword=<PASSWORD> getblockchaininfo ")
if mining:
click.echo(" --> Now, mining a block every %i seconds. Avoid it via --no-mining" % mining_every_x_seconds)
# Get each address some coins
try:
for address in fetch_wallet_addresses_for_mining():
my_bitcoind.mine(address=address)
except FileNotFoundError:
# might happen if there no ~/.specter folder yet
pass
# make them spendable
my_bitcoind.mine(block_count=100)
click.echo(" --> ",nl=False)
i = 0
while True:
my_bitcoind.mine()
click.echo("%i"% (i%10),nl=False)
if i%10 == 9:
click.echo(" ",nl=False)
i += 1
if i >= 50:
i=0
click.echo(" ")
click.echo(" --> ",nl=False)
time.sleep(mining_every_x_seconds)
if __name__ == "__main__":
# central and early configuring of logging
# see https://flask.palletsprojects.com/en/1.1.x/logging/#basic-configuration
dictConfig({
'version': 1,
'formatters': {'default': {
'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s',
}},
'handlers': {'wsgi': {
'class': 'logging.StreamHandler',
'stream': 'ext://flask.logging.wsgi_errors_stream',
'formatter': 'default'
}},
'root': {
'level': 'INFO',
'handlers': ['wsgi']
}
})
cli() | StarcoderdataPython |
4902126 | import unittest
import sys, os
sys.path.append(os.pardir)
from common.np import *
from common.util import im2col, col2im, clip_grads, preprocess, \
convert_one_hot, create_co_matrix, cos_similarity, most_similar, ppmi, \
create_contexts_target
class UtilTest(unittest.TestCase):
def test_im2col_transforms(self):
x = np.random.randn(5, 1, 28, 28)
col = im2col(x, 4, 4, stride=1, pad=0)
self.assertSequenceEqual((3125, 16), col.shape)
def test_col2im_transforms(self):
col = np.random.randn(3125, 16)
x = col2im(col, (5, 1, 28, 28), 4, 4, stride=1, pad=0)
self.assertSequenceEqual((5, 1, 28, 28), x.shape)
def test_preprocess_returns_corpus(self):
text = 'you say goodbye and I say hello.'
corpus, word_to_id, id_to_word = preprocess(text)
ex_corpus = np.array([0,1,2,3,4,1,5,6])
ex_w2id = {'you': 0, 'say': 1, 'goodbye': 2, 'and': 3, 'i': 4, 'hello': 5, '.': 6}
ex_id2w = {0: 'you', 1: 'say', 2: 'goodbye', 3: 'and', 4: 'i', 5: 'hello', 6: '.'}
self.assertTrue((ex_corpus == corpus).all())
self.assertDictEqual(ex_w2id, word_to_id)
self.assertDictEqual(ex_id2w, id_to_word)
def test_convert_one_hot(self):
text = 'you say goodbye and I say hello.'
corpus, w2id, id2w = preprocess(text)
one_hot = convert_one_hot(corpus, len(w2id))
self.assertTrue((np.array([
[1, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 1]
]) == one_hot).all())
def test_convert_one_hot_contexts(self):
text = 'you say goodbye and I say hello.'
corpus, w2id, id2w = preprocess(text)
contexts, target = create_contexts_target(corpus, 1)
contexts = convert_one_hot(contexts, len(w2id))
self.assertEqual(contexts.shape, (6, 2, 7))
def test_create_co_matrix(self):
text = 'you say goodbye and I say hello.'
corpus, w2id, id2w = preprocess(text)
co_matrix = create_co_matrix(corpus, len(w2id), window_size=1)
self.assertTrue((np.array([
[0, 1, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 1, 1, 0],
[0, 1, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1, 0]
]) == co_matrix).all())
def test_cos_similarity(self):
text = 'you say goodbye and I say hello.'
corpus, w2id, id2w = preprocess(text)
vocab_size = len(w2id)
C = create_co_matrix(corpus, vocab_size)
c0 = C[w2id['you']]
c1 = C[w2id['i']]
expected_c0 = 0.9999999800000005
expected_c1 = 0.7071067691154799
self.assertEqual(cos_similarity(c0, c0), expected_c0)
self.assertEqual(cos_similarity(c0, c1), expected_c1)
def test_most_similar(self):
text = 'you say goodbye and I say hello.'
corpus, w2id, id2w = preprocess(text)
vocab_size = len(w2id)
C = create_co_matrix(corpus, vocab_size)
most_similar('you', w2id, id2w, C, top=5)
def test_ppmi(self):
text = 'you say goodbye and I say hello.'
corpus, w2id, id2w = preprocess(text)
vocab_size = len(w2id)
C = create_co_matrix(corpus, vocab_size)
W = ppmi(C)
W = np.around(W, 3)
expected = np.array([
[0. ,1.807 ,0. ,0. ,0. ,0. ,0. ],
[1.807 ,0. ,0.807 ,0. ,0.807 ,0.807 ,0. ],
[0. ,0.807 ,0. ,1.807 ,0. ,0. ,0. ],
[0. ,0. ,1.807 ,0. ,1.807 ,0. ,0. ],
[0. ,0.807 ,0. ,1.807 ,0. ,0. ,0. ],
[0. ,0.807 ,0. ,0. ,0. ,0. ,2.807],
[0. ,0. ,0. ,0. ,0. ,2.807 ,0. ]
])
np.testing.assert_array_almost_equal(W, expected)
def test_create_contexts_target(self):
text = 'you say goodbye and I say hello.'
corpus, w2id, id2w = preprocess(text)
contexts, target = create_contexts_target(corpus)
expected_contexts = np.array([
[0, 2],
[1, 3],
[2, 4],
[3, 1],
[4, 5],
[1, 6]
])
expected_target = [1, 2, 3, 4, 1, 5]
np.testing.assert_array_almost_equal(contexts, expected_contexts)
np.testing.assert_array_almost_equal(target, expected_target)
if __name__ == '__main__':
unittest.main() | StarcoderdataPython |
3433584 | <gh_stars>0
#!/usr/bin/python
'''The MIT License (MIT)
Copyright (c) 2017 <NAME>(<EMAIL>)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.'''
__author__ = '<NAME> (<NAME>)'
__doc__ = '''
it's GUI of DasH aka Do as Human
created 2017-05-06 by <NAME>
'''
import webbrowser
from datetime import datetime
import wx.grid as gridlib
import traceback
import wx
from gui.MainFrame import MainFrame
import os
from lib.common import load_bench, caller_stack_info,info, get_next_in_ring_list,get_folder_item, info,debug, warn, error, parse_command_line, call_function_in_module
import re
import time
import threading
import ConfigParser
import sys
import inspect
import Queue
from SessionTab import SessionTab
import imp
import types
from lib.common import send_mail_smtp_without_login
from lib.common import run_script
from multiprocessing import Process
import subprocess
import shlex
#from dut import dut
DUT={}
class RedirectText(object):
font_point_size = 10
old_stdout = None
old_stderr = None
write_lock = None
log_file = None
error_pattern = None
font_point = None
previous_scroll_pos = 0
previous_insert_pos = 0
def __init__(self,aWxTextCtrl, log_path=None):
self.old_stderr , self.old_stdout=sys.stderr , sys.stdout
self.out=aWxTextCtrl
self.font_point_size = self.out.GetFont().PointSize+2
self.write_lock = threading.Lock()
self.error_pattern = re.compile('error|\s+err\s+|fail|wrong|errno')
self.font_point =self.out.GetFont().PointSize
if log_path:
name = '{}/dash.log'.format(log_path)
self.log_file = open(name, 'w+')
self.fileno = self.log_file.fileno
def write(self,string):
def __write(string):
#self.write_lock.acquire()
try:
self.old_stdout.write(string)
err_pattern = self.error_pattern#re.compile('error|\s+err\s+|fail|wrong')
current_scroll_pos = self.out.GetScrollPos(wx.VERTICAL)
current_insert_pos = self.out.GetInsertionPoint()
last_pos = self.out.GetLastPosition()
v_scroll_range = self.out.GetScrollRange(wx.VERTICAL)
char_height = self.out.GetCharHeight()
w_client,h_client = self.out.GetClientSize()
line_in_a_page= h_client/char_height*2/3
max_gap=line_in_a_page
c_col, c_line = self.out.PositionToXY(current_scroll_pos) #current_scroll_pos
t_col, t_line = self.out.PositionToXY(v_scroll_range) #v_scroll_range last_pos
x, y = c_col, c_line
real_gap = t_line- c_line
if real_gap>max_gap:#100
self.__freeze_main_log_window()
#self.previous_insert_pos = current_scroll_pos
#self.previous_scroll_pos = current_scroll_pos
else:
self.__thaw_main_log_window()
#tmp_msg ='\n!!!!! current {}, range {}, t_line {}, c_line {}, gap {}\n'.format(current_scroll_pos, v_scroll_range, t_line, c_line, t_line -c_line)
#string+=tmp_msg
#self.old_stdout.write()
if True:#err_pattern.search(string.lower()):
last_start = 0
for m in err_pattern.finditer(string.lower()):
self.out.SetDefaultStyle(wx.TextAttr(wx.GREEN, wx.BLACK,font =wx.Font(self.font_point, family = wx.DEFAULT, style = wx.NORMAL, weight = wx.NORMAL, faceName = 'Consolas')))
self.out.AppendText( string[last_start:m.start()])
self.out.SetDefaultStyle(wx.TextAttr(wx.RED, wx.YELLOW,font =wx.Font(self.font_point+2, family = wx.DEFAULT, style = wx.NORMAL, weight = wx.NORMAL, faceName = 'Consolas')))
self.out.AppendText( string[m.start():m.end()])
last_start= m.end()
self.out.SetDefaultStyle(wx.TextAttr(wx.GREEN, wx.BLACK,font =wx.Font(self.font_point, family = wx.DEFAULT, style = wx.NORMAL, weight = wx.NORMAL, faceName = 'Consolas')))
self.out.AppendText( string[last_start:])
else:
self.out.SetDefaultStyle(wx.TextAttr(wx.GREEN, wx.BLACK,font =wx.Font(self.font_point, family = wx.DEFAULT, style = wx.NORMAL, weight = wx.NORMAL, faceName = 'Consolas')))
self.out.AppendText( string)
if self.log_file:
self.log_file.write(string)
self.log_file.flush()
if real_gap>max_gap:#1000
#time.sleep(0.01)
pass
self.out.SetInsertionPoint( self.out.GetScrollPos(wx.VERTICAL))
#self.out.SetScrollPos(wx.VERTICAL, self.previous_scroll_pos)
#self.previous_insert_pos = current_scroll_pos
else:
#self.previous_scroll_pos= self.out.GetScrollRange(wx.VERTICAL)#v_scroll_range
#self.previous_insert_pos = last_pos+len(string)
self.out.SetScrollPos(wx.VERTICAL, self.out.GetScrollRange(wx.VERTICAL))
#self.out.SetScrollPos(wx.VERTICAL, self.previous_scroll_pos)
#self.out.SetInsertionPoint( self.previous_insert_pos) #self.out.ScrollToLine(c_line+line_in_a_page)
#pos =self.out.XYToPosition(xxx[0], xxx[1])
#self.out.ShowPosition(self.previous_insert_pos)
self.__thaw_main_log_window()
except Exception as e:
self.old_stdout.write('\n'+error(traceback.format_exc()))
#self.write_lock.release()
#time.sleep(0.1)
__write(string)
#threading.Thread(target=__write, args=[string]).start()
def close(self):
if self.log_file:
self.log_file.flush()
self.log_file.close()
def flush(self):
if self.log_file:
self.log_file.flush()
def __freeze_main_log_window(self):
#return
if self.out.IsFrozen():
pass
else:
#self.output_window_last_position =self.out.GetScrollRange(wx.VERTICAL)
self.out.Freeze()
def __thaw_main_log_window(self):
#self.out.SetScrollPos(wx.VERTICAL, self.previous_scroll_pos)
if self.out.IsFrozen():
self.out.Thaw()
else:
pass
class process_info(object):
process = None
pid=None
full_name=None
returncode = None
def __init__(self,name, process):
self.process= process
self.pid = process.pid
self.full_name =name
self.returncode = process.returncode
@property
def returncode(self):
return self.process.returncode
class FileEditor(wx.Panel):
editor =None
font_size=10
parent=None
type = None
sessions_node =None
function_node =None
case_suite_node =None
full_file_name = None
file_instance = None
name =''
def on_close(self):
if self.full_file_name:
data = self.editor.GetValue()
with open(self.full_file_name, 'w') as f:
f.write(data)
f.flush()
#done 2017-9-12: handle close tab in edit_area
def __init__(self, parent, title='pageOne', type ='grid', file_name = None):
wx.Panel.__init__(self, parent)
self.name = title
self.parent = parent
self.type = type
self.full_file_name = file_name
#self.editor = wx.TextCtrl(self, style = wx.TE_MULTILINE|wx.TE_RICH2|wx.EXPAND|wx.ALL, size=(-1,-1))
if type in ['text']:
self.editor = wx.TextCtrl( self, -1, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_AUTO_URL|wx.VSCROLL|wx.TE_RICH|wx.TE_MULTILINE&(~wx.TE_PROCESS_ENTER))
#wx.richtext.RichTextCtrl( self, -1, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0|wx.VSCROLL|wx.HSCROLL|wx.NO_BORDER|wx.WANTS_CHARS )
with open(self.full_file_name, 'r') as f:
for line in f.readlines():
self.editor.AppendText(line)
else:
self.editor= gridlib.Grid(self)
self.editor.CreateGrid(50, 5)
col = self.editor.GetNumberCols()
row = self.editor.GetNumberRows()
function_color ='black'
arg_color = 'blue'
for c in range(0, col):
if c < 1 :
self.editor.SetColLabelValue(c, 'Function Name')
else:
self.editor.SetColLabelValue(c, 'arg# {}'.format(c))
for r in range (0, row):
self.editor.SetCellTextColour(r,c,function_color if c <1 else arg_color)
for r in range (0, row):
self.editor.SetCellFont(r, 0, wx.Font(self.font_size,wx.SWISS, wx.NORMAL, wx.BOLD ))
self.editor.Bind( wx.EVT_MOUSEWHEEL, self.editor_OnMouseWheel )
sizer = wx.BoxSizer()
sizer.Add(self.editor, 1, wx.EXPAND)
self.SetSizer(sizer)
def editor_OnMouseWheel(self,event):
min_font_size = 5
interval_step = 2
if event.ControlDown():
pass
else:
return
if event.GetWheelRotation() < 0:
if self.font_size>min_font_size:
self.font_size-=interval_step
else:
self.font_size+=1
if self.type in ['text']:
f =self.editor.GetFont()
f.PointSize= self.font_size
self.editor.SetFont(f)
else:
col = self.editor.GetNumberCols()
row = self.editor.GetNumberRows()
for c in range(0, col):
for r in range (0, row):
f = self.editor.GetCellFont(r, c)
f.PointSize = self.font_size
self.editor.SetCellFont(r, c, f)
self.Refresh()
#wx.StaticText(self, -1, "THIS IS A PAGE OBJECT", (20,20))
#DONE: DasHFrame should handle CLOSE event when closing the app, call on_close_tab_in_edit_area for all opened sessions and files
from functools import wraps
import pprint
def gui_event_thread_handler( func):
@wraps(func)
def inner(func, *args, **kwargs):
ret =None
try:
ret = func(*args, **kwargs)
#th = threading.Thread(target=func,args= args, kwargs=kwargs)
#th.start()
except:
error(traceback.format_exc())
return ret
return inner
class gui_event_decorator():
def __init__(self):
pass
@classmethod
def gui_even_handle(self, func):
def inner(*args, **kwargs):
ret =None
try:
#print('decorator!!!')
#ret = func(*args, **kwargs)
th = threading.Thread(target=func,args= args, kwargs=kwargs)
th.start()
#print('decorator####')
except:
print(traceback.format_exc())
return ret
return inner
class DasHFrame(MainFrame, gui_event_decorator):#wx.Frame
ini_setting = None
#m_left_navigator =None
redir = None
edit_area=None
tabs_in_edit_area = None
src_path = './src/'
sessions_alive=None
sequence_queue=None
history_cmd = []
history_cmd_index = -1
import_modules={'TC':''}
lib_path ='./lib'
log_path = '../log/dash'
session_path = './sessions'
suite_path = '../test_suite'
dict_test_report= ''
alive =True
mail_server=None
mail_to_list=None
mail_from=None
mail_read_url= 'outlook.office365.com'
mail_password = <PASSWORD>
mail_user ='<EMAIL>'
case_queue =None
check_case_running_status_lock = None
case_list=None
#session_names={}
web_daemon = None
web_host = None
web_port = 8888
mailed_case_pids= []
timestamp=None
mail_failure =False
last_time_call_on_idle= None
ini_file=None
dict_function_obj= {'instance':{}}
dict_function_files = {}
updating_function_page =False
m_log_current_pos = None
def __init__(self,parent=None, ini_file = './gDasH.ini'):
#wx.Frame.__init__(self, None, title="DasH")
gui_event_decorator.__init__(self)
self.timestamp= datetime.now().isoformat('-').replace(':','-')
self.case_list= []
self.case_queue = Queue.Queue()
self.dict_test_report={}
self.check_case_running_status_lock = threading.Lock()
self.tabs_in_edit_area=[]
self.sessions_alive={}
MainFrame.__init__(self, parent=parent)
self.sequence_queue= Queue.Queue()
#self.sequence_queue.put()
self.ini_setting = ConfigParser.ConfigParser()
self.m_log_current_pos = 0
if os.path.exists(ini_file):
self.ini_setting.read(ini_file)
self.src_path = os.path.abspath(self.ini_setting.get('dash','src_path'))
self.lib_path = os.path.abspath(self.ini_setting.get('dash','lib_path'))
self.log_path = os.path.abspath(self.ini_setting.get('dash','log_path'))
self.suite_path = os.path.abspath(self.ini_setting.get('dash', 'test_suite_path'))
self.mail_server = self.ini_setting.get('dash', 'mail_server')
self.mail_from =self.ini_setting.get('dash', 'mail_from')
self.mail_to_list =self.ini_setting.get('dash', 'mail_to_list')
self.mail_read_url =self.ini_setting.get('dash', 'mail_read_url')
self.mail_user = self.ini_setting.get('dash','mail_user')
self.mail_password =self.ini_setting.get('dash', 'mail_password')
self.web_port =int(self.ini_setting.get('dash', 'web_port'))
else:
with open(ini_file, 'w') as tmp_ini_file:
tmp_ini_file.write('''[dash]
test_suite_path = ../test_suite/
log_path= {log_path}
lib_path = {lib_path}
session_path={session_path}
#the source python file folder
src_path = {src_path}
mail_server={mail_server}
mail_to_list={mail_to_list}
mail_user={mail_user}
mail_from ={mail_from}
mail_read_url={mail_read_url}
mail_password = {<PASSWORD>}
web_port={web_port}
'''.format(
log_path = self.log_path,
lib_path = self.lib_path,
session_path = self.session_path,
src_path = self.src_path,
mail_server = self.mail_server,
mail_to_list = self.mail_to_list,
mail_user = self.mail_user,
mail_from = self.mail_from,
mail_read_url = self.mail_read_url,
mail_password = self.mail_password,
web_port = self.web_port))
tmp_ini_file.flush()
#self.ini_setting.read(ini_file)
self.ini_file = ini_file
from lib.common import create_case_folder, create_dir
sys.argv.append('-l')
sys.argv.append('{}'.format(self.log_path))
from lib.common import create_dir
self.log_path = create_dir(self.log_path)
self.suite_path = create_dir(self.suite_path)
self.lib_path = create_dir(self.lib_path)
self.src_path = create_dir(self.src_path)
if not os.path.exists(self.log_path):
os.mkdir(self.log_path)
self.add_src_path_to_python_path(self.src_path)
self.redir = RedirectText(self.m_log, self.log_path)
sys.stdout = self.redir
sys.stderr = self.redir
self.m_log.SetBackgroundColour('Black')
self.m_log.SetDefaultStyle(wx.TextAttr(wx.GREEN, wx.BLACK, font =wx.Font(9, family = wx.DEFAULT, style = wx.NORMAL, weight = wx.BOLD, faceName = 'Consolas')))
#self.m_editor.WriteText('welcome to dash world')
self.m_log.WriteText('Welcome to DasH!\n')
self.m_command_box.WriteText('functions.static_function_in_module test_ssh 2')
fileMenu = wx.Menu()
#open_test_suite = fileMenu.Append(wx.NewId(), "Open TestSuite", "Open a Test Suite")
#open_test_case = fileMenu.Append(wx.NewId(), "Open TestCase", "Open a Test Case")
generate_test_report = fileMenu.Append(wx.NewId(), "Generate Test Report", "Generate Test Report")
generate_code = fileMenu.Append(wx.NewId(), "Generate Python Code", "Generate Python Code")
mail_test_report = fileMenu.Append(wx.NewId(), "Mail Test Report", "Mail Test Report")
get_case_queue = fileMenu.Append(wx.NewId(), "Get Case Queue", "Get Case Queue") #done
clear_case_queue = fileMenu.Append(wx.NewId(), "Clear Case Queue", "Clear Case Queue")
kill_running_case = fileMenu.Append(wx.NewId(), "Kill Running Case(s)", "Kill Running Case(s)")
self.m_menubar_main.Append(fileMenu, "&Operations")
self.Bind(wx.EVT_MENU,self.on_generate_test_report ,generate_test_report)
self.Bind(wx.EVT_MENU,self.on_generate_code ,generate_code)
self.Bind(wx.EVT_MENU,self.on_mail_test_report ,mail_test_report)
self.Bind(wx.EVT_MENU,self.get_case_queue ,get_case_queue)
self.Bind(wx.EVT_MENU,self.on_clear_case_queue ,clear_case_queue)
self.Bind(wx.EVT_MENU,self.on_kill_running_case ,kill_running_case)
self.Bind(wx.EVT_CLOSE, self.on_close)
self.m_command_box.Bind(wx.EVT_TEXT_ENTER, self.on_command_enter)
self.m_command_box.Bind(wx.EVT_KEY_UP, self.on_key_up)
self.m_command_box.Bind(wx.EVT_KEY_DOWN, self.on_key_down)
self.m_log.Bind(wx.EVT_TEXT, self.on_m_log_text_changed)
from wx.aui import AuiNotebook
bookStyle = wx.aui.AUI_NB_DEFAULT_STYLE &(~wx.aui.AUI_NB_CLOSE_ON_ACTIVE_TAB)
self.navigator = AuiNotebook(self.m_left_navigator, style= bookStyle )
self.case_suite_page = wx.TreeCtrl(self.navigator, wx.ID_ANY, wx.DefaultPosition, wx.Size(-1, -1), wx.TR_DEFAULT_STYLE | wx.TR_EDIT_LABELS | wx.TR_EXTENDED | wx.TR_HAS_BUTTONS | wx.TR_HAS_VARIABLE_ROW_HEIGHT | wx.HSCROLL | wx.TAB_TRAVERSAL | wx.VSCROLL | wx.WANTS_CHARS)
self.function_page = wx.TreeCtrl(self.navigator, wx.ID_ANY, wx.DefaultPosition, wx.Size(-1, -1), wx.TR_DEFAULT_STYLE | wx.TR_EDIT_LABELS | wx.TR_EXTENDED | wx.TR_HAS_BUTTONS | wx.TR_HAS_VARIABLE_ROW_HEIGHT | wx.HSCROLL | wx.TAB_TRAVERSAL | wx.VSCROLL | wx.WANTS_CHARS)
self.session_page = wx.TreeCtrl(self.navigator, wx.ID_ANY, wx.DefaultPosition, wx.Size(-1, -1), wx.TR_DEFAULT_STYLE | wx.TR_EDIT_LABELS | wx.TR_EXTENDED | wx.TR_HAS_BUTTONS | wx.TR_HAS_VARIABLE_ROW_HEIGHT | wx.HSCROLL | wx.TAB_TRAVERSAL | wx.VSCROLL | wx.WANTS_CHARS)
self.navigator.AddPage(self.session_page, 'SESSION')
self.navigator.AddPage(self.function_page, 'FUNCTION')
self.navigator.AddPage(self.case_suite_page, 'CASE')
self.edit_area = AuiNotebook(self.m_file_editor, style = bookStyle)#wx.aui.AUI_NB_DEFAULT_STYLE)
if False:
new_page = FileEditor(self.edit_area, 'a', type= type)
self.edit_area.AddPage(new_page, 'test')
self.tabs_in_edit_area.append(('test'))
self.edit_area.Enable(True)
right_sizer = wx.BoxSizer(wx.VERTICAL)
#right_sizer =wx.GridSizer( 3, 1, 0, 0 )
left_sizer = wx.BoxSizer(wx.HORIZONTAL)
left_sizer.Add(self.m_left_navigator, 1, wx.EXPAND)
self.edit_area.Bind(wx.aui.EVT_AUINOTEBOOK_PAGE_CHANGED, self.on_active_change_in_edit_area)
#self.m_file_editor.Bind(wx.EVT_CLOSE, self.on_close_tab_in_edit_area)
self.case_suite_page.Bind(wx.EVT_LEFT_DCLICK, self.m_case_treeOnLeftDClick)
#self.case_suite_page.Bind(wx.EVT_MOUSEWHEEL, self.case_tree_OnMouseWheel)
self.case_suite_page.Bind(wx.EVT_TREE_ITEM_EXPANDING, self.m_case_treeOnTreeItemExpanding)
self.session_page.Bind(wx.EVT_LEFT_DCLICK, self.on_LeftDClick_in_Session_tab)
self.function_page.Bind(wx.EVT_LEFT_DCLICK, self.on_LeftDClick_in_Function_tab)
self.function_page.Bind(wx.EVT_RIGHT_DOWN, self.on_right_down_in_function_tab)
self.case_suite_page.Bind(wx.EVT_RIGHT_DOWN, self.on_right_down_in_case_tab)
self.session_page.Bind(wx.EVT_RIGHT_DOWN, self.on_right_down_in_session_tab)
self.edit_area.Bind(wx.aui.EVT_AUINOTEBOOK_TAB_RIGHT_DOWN, self.on_right_up_over_tab_in_edit_area)
main_sizer = wx.BoxSizer(wx.HORIZONTAL)
#main_sizer = wx.GridSizer( 1, 2, 0, 0 )
nav_sizer = wx.BoxSizer()
nav_sizer.Add(self.navigator, 1, wx.EXPAND, 1)
self.m_left_navigator.SetSizer(nav_sizer)
#main_sizer = wx.BoxSizer(wx.HORIZONTAL)
#main_sizer.Add(left_sizer, 3, wx.EXPAND)
main_sizer.Add(left_sizer, 2, wx.EXPAND)
edit_sizer = wx.BoxSizer()
edit_sizer.Add(self.edit_area, 1, wx.EXPAND, 1)
self.m_file_editor.SetSizer(edit_sizer)
right_sizer.Add(self.m_file_editor, 100, wx.ALL|wx.EXPAND, 1)
#right_sizer.Add(self.m_log, 2, wx.ALL|wx.EXPAND, 2)
right_sizer.Add(self.m_command_box,1, wx.ALL|wx.EXPAND, 3)
main_sizer.Add(right_sizer, 8, wx.EXPAND)
self.SetSizer(main_sizer)
ico = wx.Icon('./gui/dash.bmp', wx.BITMAP_TYPE_ICO)
self.SetIcon(ico)
#th= threading.Thread(target=self.polling_running_cases)
#th.start()
#th = threading.Thread(target=self.polling_request_via_mail)
#th.start()
threading.Thread(target=self.web_server_start).start()
#tooltips bind
self.case_suite_page.Bind(wx.EVT_MOTION, self.OnMouseMotion)
self.session_page.Bind(wx.EVT_MOTION, self.OnMouseMotion)
self.function_page.Bind(wx.EVT_MOTION, self.OnMouseMotion)
#wx.html.EVT_HTML_LINK_CLICKED wx.EVT_TEXT_URL, wx.EVT_TEXT_URL,
self.m_log.Bind(wx.EVT_TEXT_URL, self.on_leftD_click_url_in_m_log)
self.Bind(wx.EVT_IDLE, self.on_idle)
self.last_time_call_on_idle = datetime.now()
self.build_session_tab()
self.build_suite_tree()
self.build_function_tab()
self.Show(True)
self.Maximize()
self.create_main_log_window()
def on_close(self, event):
try:
self.alive =False
sys.stderr =self.redir.old_stderr
sys.stdout = self.redir.old_stdout
self.redir.close()
event.Skip()
except Exception as e:
error(traceback.format_exc())
self.Show(False)
time.sleep(0.01)
def close():
try:
self.web_daemon.shutdown()
except:
pass
self.generate_code(file_name='{}/test_script.py'.format(self.suite_path))
if len(self.dict_test_report):
self.mail_test_report("DASH TEST REPORT")
for index in range(1,self.edit_area.GetPageCount()): #len(self.tabs_in_edit_area)):
closing_page = self.edit_area.GetPage(index)
if isinstance(closing_page, (SessionTab)):
if closing_page:
name = closing_page.name
self.tabs_in_edit_area.pop(self.tabs_in_edit_area.index(name))
try:
closing_page.Disable()
closing_page.on_close()
except:
pass
close()
time.sleep(0.01)
#sys.exit(0)
def generate_report(self, filename, report_all_cases=True):
#fixed 2017-11-19, 2017-10-21 no need to send whole report, just the updating part
def GetTime(duration):
from datetime import timedelta
sec = timedelta(seconds=int(duration))
d = datetime(1,1,1) + sec
#print("DAYS:HOURS:MIN:SEC")
return "%d:%d:%d:%d" % (d.day-1, d.hour, d.minute, d.second)
report_in_list =[['result',
'start_time',
'end_time',
'ProcessID',
'duration',
'duration',
'case_name','log']]
report = '''Test Report
RESULT,\tStart_Time,\tEnd_Time,\tPID,\tDuration(s),\tDuration(D:H:M:S)\tCase_Name,\tLog\n'''
with open(filename, 'w') as f:
if len(self.dict_test_report):
#f.write(report)
for pi in sorted(self.dict_test_report, key = lambda x: self.dict_test_report[x][1]):
case_name, start_time, end_time, duration, return_code ,proc, log_path =self.dict_test_report[pi][:7]
if return_code is None:
result = 'IP'
result_html = '<font color="blue">IP'
else:
result = return_code # 'FAIL' if return_code else 'PASS'
if result.lower() in ['pass']:
result_html= '<font color="green">PASS'
else:
result_html= '<font color="red">FAIL'
one_record = ['{}'.format(x) for x in [
result,
start_time,
end_time,
pi,
duration,
GetTime(duration),
case_name,
'{html_link} {file_path}'.format(
file_path=log_path,
html_link = log_path.replace(
self.log_path,
'http://{}:{}/log/'.format(self.web_host,self.web_port)
).replace('/\\',r'/')
) ]]
one_record_html = ['{}'.format(x) for x in [
result_html,
start_time,
end_time,
pi,
duration,
GetTime(duration),
case_name,
'<a href={html_link}>{file_path}</a>'.format(
file_path=log_path,
html_link = log_path.replace(
self.log_path,
'http://{}:{}/log/'.format(self.web_host,self.web_port)
).replace('/\\',r'/')
) ]]
report_in_list.append(one_record_html)
record = '\t'.join(one_record)
if result == 'IP':
report+=record+'\n'
else:
if report_all_cases:
report+=record+'\n'
self.mailed_case_pids.append(pi)
else:
print('{}\n'.format(record))
if pi not in self.mailed_case_pids:
report+=record+'\n'
self.mailed_case_pids.append(pi)
else:
pass #
from lib.common import array2htmltable
report_in_html_string = array2htmltable(report_in_list)
f.write(report_in_html_string)
return report
def on_close_tab_in_edit_area(self, event):
#self.edit_area.GetPage(self.edit_area.GetSelection()).on_close()
if self.edit_area.GetSelection()==0:
return
def close_tab():
global gSessions
closing_page = self.edit_area.GetPage(self.edit_area.GetSelection())
index =self.edit_area.GetPageIndex(closing_page)
closing_page.on_close()
if isinstance(closing_page, (SessionTab)):
ses_name = closing_page.name
self.tabs_in_edit_area.pop(self.tabs_in_edit_area.index(ses_name))
if gSessions.has_key( ses_name):
# globals().has_key(ses_name):
#g = dict(globals())
#globals()[ses_name]=None
#del g[ses_name]
gSessions[ses_name].close_session()
del gSessions[ses_name] #del globals()[ses_name]
threading.Thread(target=close_tab, args=[]).start()
event.Skip()
def add_item_to_subfolder_in_tree(self,node):
subfolder_path_name = self.case_suite_page.GetPyData(node)['path_name']
items = get_folder_item(subfolder_path_name)
if items is None:
self.case_suite_page.SetItemText(node, self.m_case_tree.GetItemText(node) + ' Not Exists!!!')
self.case_suite_page.SetItemTextColour(node, wx.Colour(255, 0, 0))
return
for i in items:
path_name = '{}/{}'.format(subfolder_path_name,i)
base_name = os.path.basename(i)
item_info = wx.TreeItemData({'path_name':path_name})
self.case_list.append(path_name)
new_item = self.case_suite_page.InsertItem(node, node, base_name)
self.case_suite_page.SetItemData(new_item, item_info)
if os.path.isdir(path_name):
self.case_suite_page.SetItemHasChildren(new_item)
#self.m_case_tree.ItemHasChildren()
#self.m_case_tree.InsertItem(new_item,new_item,'')
@gui_event_decorator.gui_even_handle
def build_suite_tree(self):
suite_path = self.suite_path #os.path.abspath(self.ini_setting.get('dash','test_suite_path'))
if not os.path.exists(suite_path):
suite_path= os.path.abspath(os.path.curdir)
base_name = os.path.basename(suite_path)
root =self.case_suite_page.AddRoot(base_name)
item_info = wx.TreeItemData({'path_name':suite_path})
self.case_suite_page.SetItemData(root, item_info)
self.add_item_to_subfolder_in_tree(root)
self.case_suite_page.Expand(root)
# def OnSelChanged(self, event):
# item = event.GetItem()
# self.display.SetLabel(self.tree.GetItemText(item))
#def case_tree_OnMouseWheel(self, event):
def m_case_treeOnLeftDClick(self, event):
ht_item =self.case_suite_page.GetSelection()
#ht_item = self.HitTest(event.GetPosition())
item_name = self.case_suite_page.GetItemText(ht_item)
item_data = self.case_suite_page.GetItemData(ht_item)
if self.case_suite_page.ItemHasChildren(ht_item):
if self.case_suite_page.IsExpanded(ht_item):
self.case_suite_page.Collapse(ht_item)
else:
self.case_suite_page.ExpandAllChildren(ht_item)
else:
if item_name.lower() in ['.csv', '.xlsx','.xls']:
type = 'grid'
file_name = item_data.Data['path_name']
else:
type = 'text'
file_name = item_data.Data['path_name']
new_page = FileEditor(self.edit_area, 'a', type= type,file_name=file_name)
self.edit_area.AddPage(new_page, item_name)
index = self.edit_area.GetPageIndex(new_page)
self.edit_area.SetSelection(index)
def m_case_treeOnTreeItemExpanding(self,event):
ht_item =self.case_suite_page.GetSelection()
try:
item_info = self.case_suite_page.GetPyData(ht_item)
if 0== self.case_suite_page.GetChildrenCount(ht_item):
if os.path.isdir(item_info['path_name']):
self.add_item_to_subfolder_in_tree(ht_item)
except Exception as e:
pass
@gui_event_decorator.gui_even_handle
def build_session_tab(self):
if self.session_page.RootItem:
self.session_pagef.DeleteAllItems()
self.ini_setting.read(self.ini_file)
session_path = os.path.abspath(self.ini_setting.get('dash','session_path'))
self.session_path= session_path
if not os.path.exists(session_path):
session_path= os.path.abspath(os.path.curdir)
base_name = os.path.basename(session_path)
sessions = {}
root =self.session_page.AddRoot(base_name)
item_info = wx.TreeItemData({'path_name':session_path})
self.session_page.SetItemData(root, item_info)
self.session_page.Expand(root)
item_list = get_folder_item(session_path)
session_files=[]
for item in item_list:
if os.path.isfile('{}/{}'.format(session_path,item)) and '{}'.format(item).lower().strip().endswith('.csv'):
session_files.append(item)
for csv_file in sorted(session_files):
try:
ses_in_bench = load_bench(os.path.abspath('{}/{}'.format(session_path, csv_file)))
for bench in ses_in_bench:
for ses in ses_in_bench[bench]:
if ses_in_bench[bench][ses].has_key('login_step') and ses_in_bench[bench][ses]['login_step'].strip() not in ['', None]:
ses_in_bench[bench][ses].update(
{'login_step': os.path.abspath('{}/{}'.format(session_path, ses_in_bench[bench][ses]['login_step'].strip()))}
)
sessions.update(ses_in_bench)
except Exception as e:
error(traceback.format_exc())
root =self.session_page.GetRootItem()
for file_name in sorted(sessions.keys()):
item_name = os.path.basename(file_name)
item_info = wx.TreeItemData({'file_name':file_name})
new_bench = self.session_page.InsertItem(root, root, item_name)
self.case_suite_page.SetItemData(new_bench, item_info)
for ses in sorted(sessions[file_name]):
item_name = ses
item_info = wx.TreeItemData({'attribute':sessions[file_name][ses]})
new_item = self.session_page.InsertItem(new_bench, new_bench, item_name)
self.case_suite_page.SetItemData(new_item, item_info)
self.session_page.Expand(root)
first_child = self.session_page.GetFirstChild(root)
self.session_page.Expand(first_child[0])
#@gui_<EMAIL>.gui_even_handle
def create_main_log_window(self):
ses_name ='*LOG*'
indow_id = self.edit_area.AddPage(self.m_log, ses_name)
index = self.edit_area.GetPageIndex(self.m_log)
self.edit_area.SetSelection(index)
#self.edit_area.Disable(0,False)
def on_active_change_in_edit_area(self, event):
if self.edit_area.GetPageText(self.edit_area.GetSelection())=="*LOG*":
self.edit_area.SetWindowStyle(wx.aui.AUI_NB_DEFAULT_STYLE&(~wx.aui.AUI_NB_CLOSE_ON_ACTIVE_TAB))
else:
self.edit_area.SetWindowStyle(wx.aui.AUI_NB_DEFAULT_STYLE)
#@gui_event_decorator.gui_even_handle
def on_LeftDClick_in_Session_tab(self, event):
#self.session_page.Disable()
ses_name = self.session_page.GetItemText(self.session_page.GetSelection())
self.session_page.GetItemText(self.session_page.GetSelection())
session_attribute = self.session_page.GetItemData(self.session_page.GetSelection())
if session_attribute.Data.has_key('attribute'):
info(session_attribute.Data['attribute'])
counter =1
original_ses_name = ses_name
tmp_tabs =[]
for index in range(1,self.edit_area.GetPageCount()): #len(self.tabs_in_edit_area)):
tab_page = self.edit_area.GetPage(index)
#tab_page.name
tmp_tabs.append(tab_page.name)
self.tabs_in_edit_area = tmp_tabs
while ses_name in self.tabs_in_edit_area:
ses_name= '{}_{}'.format(original_ses_name,counter)
counter+=1
if globals().has_key(ses_name):
if not globals().has_key('_{}'.format(ses_name)):
info("variable '{}' is existed in global, change the name to _{}".format(ses_name, ses_name))
ses_name='_{}'.format(ses_name)
self.session_page.SetItemText(self.session_page.GetSelection(), ses_name)
else:
error(("variable '{}' is existed in global, please change the name".format(ses_name)))
return
new_page = SessionTab(self.edit_area, ses_name, session_attribute.Data['attribute'], self.sequence_queue, log_path=self.log_path+'/session_log')
window_id = self.edit_area.AddPage(new_page, ses_name)
index = self.edit_area.GetPageIndex(new_page)
self.edit_area.SetSelection(index)
self.tabs_in_edit_area.append(ses_name)
self.sessions_alive.update({ses_name: new_page.name})
attribute = session_attribute.Data['attribute']
log_path='a_fake_log_path_for_auto_script'
attribute['log_path']=log_path
self.add_new_session_to_globals(new_page, '{}'.format(attribute))
#time.sleep(0.1)
event.Skip()
time.sleep(0.5)
#self.session_page.Enable()
def add_new_session_to_globals(self, new_page, args_str):
name = new_page.name
global DUT
#FIX ISSUE
# INFO common.py:161 call_function_in_module:
# module_name: xdsl
# class_name: xdsl
# function_name: get_eut
# args:[wxPython wrapper for DELETED SessionTab object! (The C++ object no longer exists.)]
# kwargs: {}
# Exception in thread Thread-40:
# Traceback (most recent call last):
# File "C:\Python27\Lib\threading.py", line 801, in __bootstrap_inner
# self.run()
# File "C:\Python27\Lib\threading.py", line 754, in run
# self.__target(*self.__args, **self.__kwargs)
# File "C:\workspace\gDasH\src\xdsl.py", line 36, in get_eut
# ses.write(cmd)
# File "C:\Python27\lib\site-packages\wx-3.0-msw\wx\_core.py", line 16711, in __getattr__
# raise PyDeadObjectError(self.attrStr % self._name)
if name in DUT:
try:
DUT[name].name
del DUT[name]
except :
DUT[name]= new_page
else:
DUT[name]= new_page
self.add_cmd_to_sequence_queue('DUT["{}"] = dut.dut(name= "{}", **{})'.format(new_page.name,new_page.name,args_str.replace("'a_fake_log_path_for_auto_script'",'log_path').replace("'not_call_open': True,", "'not_call_open': False,") ), 'dut')
#session = dut(name, **attributes)
@gui_event_decorator.gui_even_handle
def on_command_enter(self, event):
info('called on_command_enter')
#self.redir.previous_scroll_pos=self.m_log.GetScrollRange(wx.VERTICAL)
#self.redir.provious_insert_pos = self.m_log.GetLastPosition()+1
#self.redir.out.SetInsertionPoint(self.redir.previous_insert_pos)
#self.redir.out.SetScrollPos(wx.VERTICAL,self.redir.previous_scroll_pos)
self.redir.out.SetInsertionPoint(self.redir.out.GetLastPosition())
self.redir.out.SetScrollPos(wx.VERTICAL,self.redir.out.GetScrollRange(wx.VERTICAL))
cmd = self.m_command_box.GetValue()
self.m_command_box.Clear()
cmd = cmd.strip()
cmds = cmd.replace('\r\n', '\n').split('\n')
def handle_one_cmd(cmd):
if cmd.strip()=='':
return
cmd_string = cmd.strip()
lex = shlex.shlex(cmd_string)
lex.quotes = '"'
lex.whitespace_split = True
cmd_list=list(lex)
function_obj_name = cmd_list[0]
if self.dict_function_obj.has_key(function_obj_name):
call_function = self.dict_function_obj[function_obj_name]
else:
return
module,class_name, function,args = parse_command_line(cmd)
self.add_cmd_to_history(cmd, module, None, class_name)
#args[0]=self.sessions_alive['test_ssh'].session
if module !='' or class_name!='' or function!='':
after_sub_args=[]
for i in range(len(args)):
a = args[i]
if a in globals():
after_sub_args.append(a)
elif a in DUT:
after_sub_args.append('DUT["{}"]'.format(a))
else:
after_sub_args.append(a)
function_name, new_argvs, new_kwargs, str_code = call_function_in_module(module,class_name,function,after_sub_args, globals())
#call_function = None
# if class_name!="":
#
# call_function = getattr(instance_name, function_name)
# #(*new_argvs,**new_kwargs)
# else:
# call_function = instance_name#(*new_argvs,**new_kwargs)
th =threading.Thread(target=call_function, args=new_argvs, kwargs=new_kwargs)
th.start()
#self.m_command_box.ShowPosition(len(self.m_command_box.GetString())+1)
self.add_cmd_to_history(cmd, module, str_code, class_name)
else:
error('"{}" is NOT a valid call in format:\n\tmodule.class.function call or \n\tmodule.function'.format(cmd))
for cmd in cmds:
try:
handle_one_cmd(cmd)
except:
error(traceback.format_exc())
#self.redir.previous_scroll_pos=self.m_log.GetScrollRange(wx.VERTICAL)
#self.redir.provious_insert_pos = self.m_log.GetLastPosition()+1
event.Skip()
def add_src_path_to_python_path(self, path):
paths = path.split(';')
old_path = sys.path
for p in paths:
if p in old_path:
info('path {} already in sys.path'.format(p))
else:
abspath = os.path.abspath(p)
if os.path.exists(abspath):
sys.path.insert(0,abspath)
else:
warn('path {} is not existed, ignored to add it into sys.path'.format(p))
def on_key_down(self, event):
#error(event.KeyCode)
keycode = event.KeyCode
if keycode ==wx.WXK_TAB:
self.m_command_box.AppendText('\t')
self.on_command_enter(event)
elif keycode == wx.PAPER_ENV_INVITE and wx.GetKeyState(wx.WXK_SHIFT):
self.m_command_box.AppendText('?')
self.on_command_enter(event)
elif keycode in [wx.WXK_RETURN]:
#cmd = self.m_command_box.GetValue()
self.m_command_box.SetInsertionPointEnd()
#self.m_command_box.SetValue(cmd)
event.Skip()
else:
event.Skip()
def on_key_up(self, event):
keycode = event.KeyCode
increase =False
if keycode ==wx.WXK_UP:
pass
elif keycode ==wx.WXK_DOWN:
increase =True#
if keycode in [wx.WXK_UP, wx.WXK_DOWN]:
self.m_command_box.Clear()
self.history_cmd_index, new_command = get_next_in_ring_list(self.history_cmd_index,self.history_cmd,increase=increase)
self.m_command_box.AppendText(new_command)
if keycode in [wx.WXK_TAB]:
pass
else:
event.Skip()
def add_cmd_to_history(self, cmd, module_name, str_code, class_name=""):
if str_code is None:
if self.history_cmd==[]:
self.history_cmd.append(cmd)
elif self.history_cmd[-1]==cmd:
pass
else:
self.history_cmd.append(cmd)
self.history_cmd_index= len(self.history_cmd)
else:# str_code is not None:
self.add_cmd_to_sequence_queue(str_code,module_name, class_name )
#self.sequence_queue.put([cmd, datetime.now()])
def get_description_of_function(self, function_obj):
import inspect
fundefstr=''
try:
try:
fundef = inspect.getsource(function_obj) # recreate function define for binary distribute
fundefstr = fundef[:fundef.find(':')]
except Exception as e:
(args, varargs, keywords, defaults) =inspect.getargspec(function_obj)
argstring = ''
largs=len(args)
ldefaults= len(defaults)
gaplen = largs-ldefaults
index =0
for arg in args:
if index <gaplen:
argstring+='%s, '%arg
else:
defvalue = defaults[index-gaplen]
if type('')==type(defvalue):
defvalue = '"%s"'%defvalue
argstring+='%s = %s, '%(arg,str(defvalue))
index+=1
fundefstr ='%s( %s )'%(function_obj.func_name, argstring)
fundef =fundefstr
listoffun =fundef.split('\n')
ret = function_obj.__doc__
if ret:
fundefstr = fundefstr +'\n '+'\n '.join(ret.split('\n'))
except Exception as e:
pass
return fundefstr
@gui_event_decorator.gui_even_handle
def check_whether_function_file_is_updated(self):
for module_file in self.dict_function_files.keys():
old_modify_time = self.dict_function_files[module_file]
current_modify_time = os.path.getmtime(module_file)
if current_modify_time ==old_modify_time:
continue
else:
if self.updating_function_page is False:
self.build_function_tab()
@gui_event_decorator.gui_even_handle
def build_function_tab(self):
self.updating_function_page=True
try:
instances = self.dict_function_obj['instance'].keys()
for inst_name in instances:
inst = self.dict_function_obj['instance'][inst_name]
#print ('instance ref count',inst_name, sys.getrefcount(inst))
if 'close' in dir(inst):
inst.close()
del inst
fun_list = self.dict_function_obj.keys()
for fun_name in fun_list:
inst = self.dict_function_obj[fun_name]
#print ('instance ref count',fun_name, sys.getrefcount(inst))
del inst
time.sleep(1)
#import gc
#gc.collect()
self.dict_function_obj={'instance':{}}
self.dict_function_files= {}
src_path = os.path.abspath(self.src_path)
if not os.path.exists(src_path):
src_path= os.path.abspath(os.path.curdir)
base_name = os.path.basename(src_path)
#FIX ISSUE BELOW, rebuild function tree
# Traceback (most recent call last):
# File "gui\DasHFrame.pyc", line 995, in build_function_tab
# File "wx\_controls.pyc", line 5428, in AddRoot
# PyAssertionError: C++ assertion "parent.IsOk() || !(HTREEITEM)::SendMessageW((((HWND)GetHWND())), (0x1100 + 10), (WPARAM)(0x0000), (LPARAM)(HTREEITEM)(0))" failed at ..\..\src\msw\treectrl.cpp(1472) in wxTreeCtrl::DoInsertAfter(): can't have more than one root in the tree
self.function_page.DeleteAllItems()
root =self.function_page.AddRoot(base_name)
item_info = wx.TreeItemData({'name':src_path})
self.function_page.SetItemData(root, item_info)
modules = get_folder_item(src_path)
if modules is None:
self.function_page.SetItemText(root, self.function_page.GetItemText(root) + ' Not Exists!!!')
self.function_page.SetItemTextColour(root, wx.Colour(255, 0, 0))
return
for module_file in modules:
if module_file.endswith('.pyc'):
if module_file[:-1] in modules:
continue
if module_file.startswith('__'):
continue
path_name = '{}'.format(os.path.abspath(self.src_path))
module_name = os.path.basename(module_file).split('.')[0]
extension = os.path.basename(module_file).split('.')[-1]
full_name = '{}/{}'.format(path_name,module_file)
if extension.lower() in ['py', 'pyc']:
try:
new_module = self.function_page.InsertItem(root, root, module_name)
module_file, path_name, description = imp.find_module(module_name)
lmod = imp.load_module(module_name, module_file, path_name,description)
self.dict_function_files[full_name] = os.path.getmtime(full_name)
for attr in sorted(dir(lmod)):
if attr.startswith('__'):
continue
attr_obj = getattr(lmod, attr)
attr_type = type(attr_obj)
if attr_type == types.FunctionType :
new_item = self.function_page.InsertItem(new_module, new_module, '{}'.format( attr))
fun_str = '{}.{}'.format(module_name,attr)
item_info = wx.TreeItemData({'name':fun_str,
'tip':self.get_description_of_function(attr_obj),
})
self.dict_function_obj[fun_str] = attr_obj
self.function_page.SetItemData(new_item, item_info)
elif attr_type== types.TypeType:
#class_obj = getattr(lmod, attr)
instance = getattr(lmod, attr)()
self.dict_function_obj['instance'][attr]=instance
new_class = self.function_page.InsertItem(new_module, new_module, attr)
item_info = wx.TreeItemData({'name':'{}.{}'.format(module_name,attr)})
self.function_page.SetItemData(new_class, item_info)
for attr_in_class in sorted(dir(instance)):
if attr_in_class.startswith('__'):
continue
attr_obj = getattr(instance,attr_in_class)
attr_type =type(attr_obj)
if attr_type == types.MethodType :
fun_str = '{}.{}.{}'.format(module_name,attr,attr_in_class)
item_info = wx.TreeItemData({'name':fun_str,
'tip':self.get_description_of_function(attr_obj)})
new_item = self.function_page.InsertItem(new_class, new_class, attr_in_class)
self.dict_function_obj[fun_str] = getattr(instance, attr_in_class)#attr_obj
self.function_page.SetItemData(new_item, item_info)
except :
pass
self.function_page.Expand(root)
first_child = self.function_page.GetFirstChild(root)
self.function_page.Expand(first_child[0])
except Exception as e:
print(traceback.format_exc())
self.updating_function_page=False
def on_LeftDClick_in_Function_tab(self,event):
event.Skip()
select_item = self.function_page.GetSelection()
fun_name = self.function_page.GetItemData(select_item)
text_in_tree = self.function_page.GetItemText(select_item)
if fun_name != None and fun_name.Data.has_key('name'):
cmd = fun_name.Data['name']
info('click item in Functions tab: {}'.format(fun_name.Data['name']))
wx.CallAfter(self.m_command_box.Clear)
wx.CallAfter(self.m_command_box.AppendText, cmd+' ')
wx.CallAfter(self.m_command_box.SetFocus)
wx.CallAfter(self.m_command_box.SetInsertionPointEnd)
wx.CallAfter(self.m_command_box.Refresh)
def on_refresh_case_page(self, event):
self.case_suite_page.DeleteAllItems()
self.build_suite_tree()
info('Refresh Case tab done!')
def on_right_down_in_session_tab(self, event):
menu = wx.Menu()
item = wx.MenuItem(menu, wx.NewId(), "Refresh")
#acc = wx.AcceleratorEntry()
#acc.Set(wx.ACCEL_NORMAL, ord('O'), self.popupID1)
#item.SetAccel(acc)
menu.AppendItem(item)
self.Bind(wx.EVT_MENU, self.on_refresh_session_page,item)
self.PopupMenu(menu,event.GetPosition())
def on_refresh_session_page(self, event):
self.session_page.DeleteAllItems()
self.build_session_tab()
info('Refresh Session tab done!')
def on_right_down_in_function_tab(self, event):
menu = wx.Menu()
item = wx.MenuItem(menu, wx.NewId(), "Refresh")
#acc = wx.AcceleratorEntry()
#acc.Set(wx.ACCEL_NORMAL, ord('O'), self.popupID1)
#item.SetAccel(acc)
menu.AppendItem(item)
self.Bind(wx.EVT_MENU, self.on_refresh_function_page,item)
self.PopupMenu(menu,event.GetPosition())
def on_refresh_function_page(self, event):
self.function_page.DeleteAllItems()
self.build_function_tab()
info('Refresh Function tab done!')
def add_cmd_to_sequence_queue(self, cmd, module_name, class_name=""):
if self.import_modules.has_key(module_name):
pass
else:
self.import_modules.update({module_name:class_name})
self.sequence_queue.put([cmd,datetime.now() ])
def generate_code(self, file_name ):
#todo 2017-10-21 no code need, when no command entered at all
str_code ="""#created by DasH {}
if __name__ == "__main__":
import sys, traceback
sys.path.insert(0,r'{}')
sys.path.insert(0,r'{}')
import common
log_path= '../log/tmp'
log_path= common.create_case_folder()
DUT={}
try:
""".format(datetime.now().isoformat('-'), self.src_path,self.lib_path , "{}")
sessions =[]
for module in self.import_modules:
str_code+=' import {mod}\n'.format(mod=module)#\n {mod}_instance = {mod}()
for module in self.import_modules:
class_name = self.import_modules[module]
if class_name!="":
str_code+=' {mod}_instance = {mod}.{class_name}()\n'.format(mod=module, class_name=class_name)#\
no_operation = True
while True:
try:
cmd, timestamp =self.sequence_queue.get(block=False)[:2]
str_code +=' {} #{}\n'.format(cmd, timestamp.isoformat( ' '))
if cmd.find('dut.dut(')!=-1:
sessions.append(cmd.split('=')[0].strip())
no_operation=False
#datetime.now().isoformat()
except Exception as e:
break
close_session=''
str_code+=''' except Exception as e:
print(traceback.format_exc())\n'''
for ses in sessions:
str_code+=''' {}.close_session()\n'''.format(ses)
no_operation=False
str_code+=' sys.exit(-1)\n'#, sys.exit(-1)
for ses in sessions:
str_code+=''' {}.close_session()\n'''.format(ses)
info('code saved to file: ',file_name)
info(str_code)
info('code saved to file: ',file_name)
if not no_operation:
with open(file_name, 'a+') as f:
f.write(str_code)
else:
info('No code will be saved to file, due to no operation was performed ',file_name)
def on_right_down_in_case_tab(self, event):
menu = wx.Menu()
item1 = wx.MenuItem(menu, wx.NewId(), "Run Test")
item2 = wx.MenuItem(menu, wx.NewId(), "Kill Test")
item3 = wx.MenuItem(menu, wx.NewId(), "Refresh")
#acc = wx.AcceleratorEntry()
#acc.Set(wx.ACCEL_NORMAL, ord('O'), self.popupID1)
#item.SetAccel(acc)
menu.AppendItem(item1)
menu.AppendItem(item2)
menu.AppendItem(item3)
self.Bind(wx.EVT_MENU, self.on_run_script,item1)
self.Bind(wx.EVT_MENU, self.on_kill_script,item2)
self.Bind(wx.EVT_MENU, self.on_refresh_case_page,item3)
self.PopupMenu(menu,event.GetPosition())
def on_kill_script(self,event):
hit_item = self.case_suite_page.GetSelection()
item_name = self.case_suite_page.GetItemText(hit_item)
item_data = self.case_suite_page.GetItemData(hit_item).Data
if item_data.has_key('PROCESS'):
p = item_data['PROCESS']
name= item_data['FULL_NAME']
info('script:{}, returncode:{}'.format(name,p.returncode))
if p.returncode is None:
#if p.is_alive():
info('Terminate alive process {}:{}'.format(item_name, p.pid))
result ='KILL'
self.update_case_status(p.pid, result)
self.mail_test_report("DASH TEST REPORT-updating")
p.terminate()
else:
result ='FAIL' if p.returncode else 'PASS'
info('{}:{} completed with returncode {}'.format(item_name, p.pid, result))
self.update_case_status(p.pid, result)
def run_script(self, script_name):
old_script_name = script_name
lex = shlex.shlex(script_name)
lex.quotes = '"'
lex.whitespace_split = True
script_name_and_args = list(lex)
script_args = script_name_and_args[1:]
script_name = script_name_and_args[0]
if script_name.find(os.path.sep)!=-1:
pass
else:
script_name= '{}/{}'.format(self.suite_path,script_name)
from lib.common import create_case_folder
old_sys_argv = sys.argv
sys.argv= [script_name]+script_args
case_log_path = create_case_folder(self.log_path )#self.log_path #create_case_folder()
sys.argv= old_sys_argv
try:
if os.path.exists('script_runner.exe'):
execute = 'script_runner.exe'
cmd = [execute,script_name ]+script_args + ['-l','{}'.format(case_log_path)]
#p=subprocess.Popen(cmd, creationflags = subprocess.CREATE_NEW_CONSOLE)
else:
cmd = [sys.executable,'./script_runner.py', script_name ]+script_args+ ['-l','{}'.format(case_log_path)]
p=subprocess.Popen(cmd, creationflags = subprocess.CREATE_NEW_CONSOLE)#, stdin=pipe_input, stdout=pipe_output,stderr=pipe_output)
self.add_new_case_to_report(p.pid, old_script_name, p, case_log_path)
except:
error(traceback.format_exc())
return p, case_log_path
def on_run_script(self,event):
hit_item = self.case_suite_page.GetSelection()
item_name = self.case_suite_page.GetItemText(hit_item)
item_data = self.case_suite_page.GetItemData(hit_item).Data
script_name = self.case_suite_page.GetItemData(hit_item).Data['path_name']
if script_name.lower().split('.')[-1] in ['txt','csv']:#test suite file, not a single script
self.run_a_test_suite(script_name)
else:#a single test case
self.on_kill_script(event)
try:
p, case_log_path = self.run_script('{} {}'.format(script_name, item_name.replace(os.path.basename(script_name), '')))
self.case_suite_page.GetItemData(hit_item).Data['PROCESS']=p
self.case_suite_page.GetItemData(hit_item).Data['FULL_NAME']= item_name
info('start process {} :{}'.format(item_name, p.pid))
#p.join() # this blocks until the process terminates
time.sleep(1)
except Exception as e :
error(traceback.format_exc())
#p = Process(target=run_script, args=[script_name, script_and_args])
#p.start()
def check_case_status(self):
self.check_case_running_status_lock.acquire()
changed=False
running_case = 0
for pid in self.dict_test_report.keys():
case_name, start_time, end_time, duration, return_code ,proc, log_path= self.dict_test_report[pid]
if return_code is None:
if proc.poll() is None:
running_case+=1
debug('RUNNING', start_time, end_time, duration, return_code ,proc, log_path)
else:
changed=True
return_code = 'FAIL' if proc.returncode else 'PASS'
self.update_case_status(pid,return_code)
if running_case:
pass
elif not self.case_queue.empty():#self.case_queue.qsize():
case_name_with_args = self.case_queue.get()
p, case_log_path = self.run_script(case_name_with_args)
self.check_case_running_status_lock.release()
if changed:
self.generate_report(filename='{}/dash_report_{}.html'.format(self.log_path, self.timestamp),report_all_cases= False)
#test_report = self.generate_report(filename='{}/dash_report.txt'.format(self.log_path))
self.mail_test_report('DasH Test Report-updating')
return changed
def polling_running_cases(self):
try:
while self.alive:
time.sleep(10)
try:
self.check_case_status()
except:
if self.alive:
error(traceback.format_exc())
except:
pass
print('end polling_running_cases')
time.sleep(0.01)
#sys.exit(0) #break can't exit the app immediately, so change it to exit
#self.check_case_running_status_lock.acquire()
#self.check_case_running_status_lock.release()
def add_new_case_to_report(self, pid, case_name, proc, log_path):
start_time=datetime.now()
duration = 0
end_time = None
return_code = None
#self.check_case_running_status_lock.acquire()
if pid in self.dict_test_report:
self.dict_test_report[pid].update([case_name, start_time, end_time, duration, return_code, proc, log_path])
else:
self.dict_test_report[pid]= [case_name, start_time, end_time, duration, return_code, proc, log_path ]
#self.check_case_running_status_lock.release()
def update_case_status(self, pid,return_code=None):
now = datetime.now()
case_name, start_time, end_time, duration, tmp_return_code ,proc,log_path= self.dict_test_report[pid]
if tmp_return_code is None:
duration = (now-start_time).total_seconds()
if return_code is not None:
end_time=now
self.dict_test_report[pid]=[case_name,start_time, end_time, duration, return_code, proc, log_path]
else:
pass# don't update one case result twice
def mail_test_report(self, subject="DASH TEST REPORT-updating"):
try:
#self.check_case_status()
report_all_cases=True
if subject.find('updating')!=-1:
report_all_cases=False
test_report = self.generate_report(filename='{}/dash_report_{}.html'.format(self.log_path, self.timestamp),report_all_cases= report_all_cases)
#TO, SUBJECT, TEXT, SERVER, FROM
send_mail_smtp_without_login(self.mail_to_list, subject,test_report,self.mail_server,self.mail_from)
except Exception as e:
error(traceback.format_exc())
def on_mail_test_report(self,event):
self.mail_test_report('DasH Test Report-requested')
#p.terminate()
def on_handle_request_via_mail(self):
import imaplib
from email.parser import Parser
def process_multipart_message(message):
if isinstance(message, basestring) or isinstance(message , list):
return message
rtn = ''
try:
if message.is_multipart():
for m in message.get_payload():
rtn += process_multipart_message(m)
else:
rtn += message.get_payload()
except Exception as e:
pass
return rtn
url, user, password = self.mail_read_url,self.mail_user, self.mail_password
if self.mail_user in ['<EMAIL>']:
return
conn = imaplib.IMAP4_SSL(url,993)
#conn.logout()
#conn.authenticate('')
conn.debug = 0#10
def plain_callback(response):
return "{}\x00{}\x00{}".format(user.lower(),user.lower(),password)
try:
conn.authenticate('PLAIN',plain_callback)
except:
conn.login(user,password)
self.mail_failure = False
conn.select('INBOX')#, readonly=True)
try:
authorized_mail_address = self.mail_to_list.replace(',',';').split(';')
except Exception as e:
return
for mail_address in authorized_mail_address:
results,data = conn.search(None,'(UNSEEN)', '(FROM "{}")'.format(mail_address)) # #'ALL')
msg_ids = data[0]
msg_id_list = msg_ids.split()
MAX_UNREAD_MAIL = 50
for unread_mail_id in msg_id_list[::-1][:MAX_UNREAD_MAIL]:
result,data = conn.fetch(unread_mail_id,'(BODY.PEEK[HEADER])')#"(RFC822)")#
raw_email = data[0][1]
p = Parser()
msg = p.parsestr(raw_email)
#msg = process_multipart_message(msg )
from1 = msg.get('From')
sub = '{}'.format(msg.get('Subject'))
sub = sub.strip().lower()
support_list='''
###############################
mail subject below is supported:
dash-request-case-queue : request the cases in queue which to be executed
dash-request-case : request cases which are under suite_path
dash-request-report : request a test report by now
dash-request-kill-running : to kill all running test cases
dash-request-clear-queue : to clear/remove all cases which are in case queue
dash-request-run : to run script(s), each line is a script with arguments if it has
--------------------------------
***non-case-sensitive***
###############################
'''
handled =False
if sub in ['dash']:
send_mail_smtp_without_login(self.mail_to_list, 'DONE-DasH Support List',support_list,self.mail_server,self.mail_from)
handled = True
#conn.uid('STORE', unread_mail_id, '+FLAGS', '\SEEN')
elif sub in ['dash-request-case-queue']:
case_in_queue =self.get_case_queue(None)
send_mail_smtp_without_login(self.mail_to_list, 'DONE-DasH:Case In Queue',case_in_queue+support_list,self.mail_server,self.mail_from)
#conn.uid('STORE', unread_mail_id, '+FLAGS', '\SEEN')
handled = True
elif sub in ['dash-request-case']:
cases_string = '\n\t'.join(self.case_list)
send_mail_smtp_without_login(self.mail_to_list, 'DONE-DasH:Case List',cases_string+support_list,self.mail_server,self.mail_from)
handled = True
#conn.uid('STORE', unread_mail_id, '+FLAGS', '\SEEN')
elif sub in ['dash-request-report']:
self.mail_test_report('DasH Test Report-requested')
#conn.uid('STORE', unread_mail_id, '+FLAGS', '\SEEN')
handled = True
elif sub in ['dash-request-kill-running']:
killed= self.on_kill_running_case()
send_mail_smtp_without_login(self.mail_to_list, 'DONE-[DasH]:Killed Running Case(s)',killed+support_list,self.mail_server,self.mail_from)
handled = True
#conn.uid('STORE', unread_mail_id, '+FLAGS', '\SEEN')
elif sub in ['dash-request-clear-queue']:
case_in_queue = self.on_clear_case_queue()
send_mail_smtp_without_login(self.mail_to_list, 'DONE-DasH:Clear Case Queue',case_in_queue+support_list,self.mail_server,self.mail_from)
handled = True
#conn.uid('STORE', unread_mail_id, '+FLAGS', '\SEEN')
elif sub in ['dash-request-run']:
#if from1 in [ '<EMAIL>',self.mail_to_list]:
conn.uid('STORE', unread_mail_id, '+FLAGS', r'(\SEEN)')
handled = True
#conn.uid('STORE', '-FLAGS', '(\Seen)')
payload = msg.get_payload()
payload = process_multipart_message(payload )
from lib.html2text import html2text
txt = html2text(payload)
cases = txt.replace('\r\n','\n').split('\n')
for line in cases:
line = line.strip()
if line.strip().startswith('#') or len(line)==0:
pass
else:
#done: replace lines below with a function
self.add_line_to_case_queue(line)
result,data = conn.fetch(unread_mail_id,'(RFC822)')#"(RFC822)")#
else:
conn.uid('STORE', unread_mail_id, '-FLAGS', r"(\SEEN)")
#fixed : 2017-09-25 failed to set unmatched mail to unread, to fetch it again with RFC822
if handled:
result,data = conn.fetch(unread_mail_id,'(RFC822)')#"(RFC822)")#
def check_case_type(self, str_line):
lex = shlex.shlex(str_line)
lex.quotes = '"'
lex.whitespace_split = True
script_name_and_args = list(lex)
script_name = script_name_and_args[0]
return script_name.lower().split('.')[-1],script_name_and_args[0] ,script_name_and_args[1:]
def polling_request_via_mail(self):
try:
while self.alive:
time.sleep(5)
try:
self.on_handle_request_via_mail()
self.mail_failure =False
except Exception as e:
self.mail_failure =True
except :
pass
print('end polling_request_via_mail!!!')
time.sleep(0.01)
def get_case_queue(self, item=None):
case_in_queue = list(self.case_queue.queue)
number_in_queue= len(case_in_queue)
if number_in_queue:
str_case_in_queue='\ntotal {} case(s) in Queue\n'.format(number_in_queue)+'\n'.join('{}'.format(x) for x in case_in_queue)
else:
str_case_in_queue='\nNo Case in Queue'
info('Case(s) in Queue', str_case_in_queue)
return str_case_in_queue
def on_clear_case_queue(self, event=None):
case_in_queue = self.get_case_queue(None)
self.case_queue.queue.clear()
self.get_case_queue(None)
return case_in_queue
def on_kill_running_case(self,event=None):
killed_case= ''
for case in self.dict_test_report:
case_name,start_time, end_time, duration, return_code, proc, log_path = self.dict_test_report[:7]
if return_code is None:
if proc.poll() is None:
killed_case+='{}:{}\n'.format(case_name, proc.pid)
info('Terminate alive process {}:{}'.format(case_name, proc.pid))
result ='KILL'
self.update_case_status(proc.pid, result)
proc.terminate()
info('Killed All Running cases', killed_case)
return killed_case
def run_a_test_suite(self, csv_file_name, clear_queue=False, kill_running =False):
try:
case_type, suite_file_name, args =self.check_case_type(csv_file_name)
if clear_queue:
self.on_clear_case_queue()
if kill_running:
self.on_kill_running_case()
import csv
if suite_file_name.find(os.path.sep)!=-1:
pass
else:
suite_file_name= '{}/{}'.format(self.suite_path,suite_file_name)
with open(suite_file_name) as bench:
reader = csv.reader(bench,delimiter=',')
for row in reader:
if len(row)<1:
continue
else:
name = row[0]
args.insert(0,0)
for index in range(1,len(args)):
name =name.replace('{{index}}'.format(index =index), '{}'.format(args[index]))
self.case_queue.put(name)
info('adding case to queue: {}'.format(name))
except Exception as e:
error(traceback.format_exc())
def web_server_start(self):
from SocketServer import ThreadingMixIn
from BaseHTTPServer import HTTPServer,BaseHTTPRequestHandler
import cgi , urllib#StringIO
class HttpHandler(BaseHTTPRequestHandler):
runner_proc =self.add_line_to_case_queue
root = os.path.dirname(__file__)+ '/html/'
home = root
suite_path = self.suite_path
log_path = self.log_path
session_path = self.session_path
def __del__(self):
#self.hdrlog.close()
#print('end http server')
pass
def list_dir(self, path, related_path, pattern=['']):
"""Helper to produce a directory listing (absent index.html).
Return value is either a file object, or None (indicating an
error). In either case, the headers are sent, making the
interface the same as for send_head().
"""
content =""
try:
list = os.listdir(path)
except os.error:
self.send_error(404, "No permission to list directory")
return ""
list.sort(key=lambda a: a.lower())
#f = StringIO()
displaypath = cgi.escape(urllib.unquote(self.path))
content='<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">'
content+="<html>\n<title>Directory listing for %s</title>\n" % displaypath
content+="<body>\n<h2>Directory listing for %s</h2>\n" % displaypath
content+="<hr>\n<ul>\n"
content+='''<SCRIPT>
function post( id, script, dest )
{
element = document.getElementById(id);
value = element.value
params = 'script='+encodeURI(script)+'&arg='+encodeURI(value)
var xmlhttp;
if (window.XMLHttpRequest)
{// code for IE7+, Firefox, Chrome, Opera, Safari
xmlhttp=new XMLHttpRequest();
}
else
{// code for IE6, IE5
xmlhttp=new ActiveXObject('Microsoft.XMLHTTP');
}
xmlhttp.onreadystatechange=function()
{
if (xmlhttp.readyState==4 && xmlhttp.status==200)
{
alert(xmlhttp.responseText);
newHTML( xmlhttp.responseText);
setTimeout("window.close()",3000);
}
}
xmlhttp.open("POST",dest,true);
xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded");
xmlhttp.send( params );
}
function newHTML(HTMLstring) {
//var checkitem = mygetCheckedItem();
//HTMLstring=post( 'manualtest','/cgi-bin/onSUTLIST.py', 'bedname='+encodeURI(checkitem) );
var newwindow=window.open();
var newdocument=newwindow.document;
newdocument.write(HTMLstring);
newdocument.close();
}
</SCRIPT>
<table>'''
for name in list:
extension = os.path.basename(name).split('.')[-1]
if pattern in ['', '*', '*.*']:
pass
elif extension in pattern:
pass
else:
continue
fullname = os.path.join(path, name)
displayname = linkname = name
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
input_button =""
filename = urllib.quote(linkname)
if not related_path.endswith('/'):
related_path+='/'
fullfilename =related_path+urllib.quote(linkname)
if related_path.startswith('/case') and os.path.isfile(fullname):
input_button = """<input id=%s name="ARGS" style="width:200" type="text" value="" rows="1" autocomplete="on"/>
<input name="go" value="Run" type="button" onclick="post('%s','%s', 'RunCase');return false";/>"""%(filename,filename,fullfilename)
elif related_path.startswith('/suite') and os.path.isfile(fullname):
input_button = """<input id=%s name="ARGS" style="width:200" type="text" value="" rows="1" autocomplete="on"/>
<input name="go" value="Run" type="button" onclick="post('%s','%s', 'RunSuite');return false";/>
</td></tr>\n"""%(filename,filename,fullfilename)
content+='<tr><td><a href="%s">%s</a></td><td>'% (related_path+urllib.quote(linkname), cgi.escape(displayname))+input_button
content+="</table></ul>\n<hr>\n</body>\n</html>\n"
return content
def array2htmltable(self,Array):
content = "<table border='1' align='left' width=autofit >"
for index , sublist in enumerate( Array):
content += ' <tr><td>\n%d</td><td>'%(index+1)
content += ' </td><td>'.join([x if x!='' else ' ' for x in sublist ])
content += ' \n</td></tr>\n'
content += ' \n </table><br>'
return content
def show_content_by_path(self, path, type='csv'):
header = '''
<table border="0" align='center' width="100%" >
<tr> <td align=center valign=middle><a href="/">Back to DasH</a></td> </tr>
</table>'''
footer = header
if os.path.isfile(path):
indexpage= open(path)
encoded=indexpage.read()
html = []
for line in encoded.split('\n'):
html.append('<p>%s</p>'%line.replace('\r', '').replace('\n',''))
encoded= ''.join(html)
if type in ['csv']:
ar =[]
for line in html:
row = line.split(',')
ar.append(row)
encoded = self.array2htmltable(ar)
# elif type in ['py']:
# ar =[]
# for line in html:
# row = line.split(',')
# ar.append(row)
# encoded = self.array2htmltable(ar)
else:
encoded =self.list_dir(path, self.path, type)
#encoded = "<html>{}</html>".format(cgi.escape(encoded))
encoded =header+encoded.replace('\t', ' ').replace(' ', ' ') + footer
return encoded
def do_GET(self):
root = self.root
home = self.home
suite_path = self.suite_path
log_path = self.log_path
response = 200
type = 'text/html'
if self.path=='/':
indexpage= open(home+ 'index.html', 'r')
encoded=indexpage.read()
encoded = encoded.encode(encoding='utf_8')
elif self.path =='/favicon.ico':
indexpage= open(home+'dash.bmp', 'r')
encoded=indexpage.read()
type = "application/x-ico"
elif self.path=='/home':
path = os.path.abspath(self.suite_path)
encoded =self.list_dir(path, './')
elif self.path.startswith('/sessions'):
path = os.path.abspath(self.session_path)
path = path+ self.path[9:]#replace('/log/','/')
encoded = self.show_content_by_path(path)
elif self.path.startswith('/case'):
path = os.path.abspath(self.suite_path)
path = path+ self.path[5:]#replace('/log/','/')
encoded = self.show_content_by_path(path, 'py')
elif self.path.startswith('/suite'):
path = os.path.abspath(self.suite_path)
path = path+ self.path[6:]#replace('/log/','/')
encoded = self.show_content_by_path(path, 'csv')
elif self.path.startswith('/log'):
path = os.path.abspath(self.log_path)
print(path)
path = path+ self.path[4:]#replace('/log/','/')
encoded = self.show_content_by_path(path, '*')
elif self.path.startswith('/report'):
path = os.path.abspath(self.log_path)
print(path)
path = path+ self.path[7:]#replace('/report/','/')
encoded = self.show_content_by_path(path, 'html')
else:
path = os.path.abspath(root)
path = path+ self.path.replace('//','/')
if os.path.isfile(path):
from lib.common import csvfile2array
arrary = csvfile2array(path)
encoded = self.array2htmltable(arrary)
else:
encoded =self.list_dir(path, self.path)
self.send_response(200)
self.send_header("Content-type", type)
self.end_headers()
self.wfile.write(encoded)
def LoadHTMLPage(self, filename, replace=[], Pattern4ESCAPE1='#NOTEXISTPATTERN_HERE_FOR_STRING_FORMAT1#',Pattern4ESCAPE2='#NOTEXISTPATTERN_HERE_FOR_STRING_FORMAT2#'):
indexpage= open(filename, 'r')
encoded=indexpage.read()
encoded =encoded.replace('%s',Pattern4ESCAPE1 )
encoded =encoded.replace('%',Pattern4ESCAPE2 )
encoded =encoded.replace(Pattern4ESCAPE1,'%s' )
for item in replace:
encoded =encoded.replace('%s', item, 1)
encoded =encoded.replace(Pattern4ESCAPE2, '%' )
return encoded
def RunScript(self, script, args=None):
if not args:
args =''
exe_cmd = '%s %s'%(script,args)
print('Run Script:'+exe_cmd)
encoded = self.runner_proc(exe_cmd)
#encoded ='run{}'.format(exe_cmd)
self.send_response(200)
self.send_header("Content-type", "text/html")#; charset=%s" % enc)
self.end_headers()
self.wfile.write(encoded)
def ParseFormData(self, s):
import re
reP = re.compile('^(-+[\d\w]+)\r\n(.+)-+[\d\w]+-*', re.M|re.DOTALL)
#s = '''-----------------------------186134213815046583202125303385\r\nContent-Disposition: form-data; name="fileToUpload"; filename="case1.csv"\r\nContent-Type: text/csv\r\n\r\n,ACTION,EXPECT,TIMEOUT,CASE OR COMMENTS\n[case1],,,,\n#var,\ncmd,${5}\ncmd2,${cmd2}\n#setup,,,,\ntel,pwd,],10\ntel,ls,],10,\n,ls,],10,\ntel,${cmd},],10,\n,${cmd2},],10,\n#!---,,,,\n\n\r\n-----------------------------186134213815046583202125303385--\r\n'''
#rs = re.escape(s)
rs =s
m = re.match(reP, rs)
print(rs)
if m:
print('match!')
boundary = m.group(1)
print(m.group(2))
c = m.group(2)
index =c.find(boundary)
if index ==-1:
pass
else:
c = c[:index]
l = c.split('\r\n')
print(l)
attribute=l[0].split('; ')
da={}
la =attribute[0].split(':')
da.update({la[0]:la[1]})
for a in attribute[1:]:
la=a.split('=')
da.update({la[0]:la[1].replace('"','').replace('\'','')})
data = '\r\n'.join(l[3:-1])
filename = da['filename']
if filename.find('\\')!=-1:
filename=filename[filename.rfind('\\')+1:]
else:
filename=filename[filename.rfind('/')+1:]
return (da['name'],filename,data)
else:
print('not match')
return None
def do_POST(self):
content_len = int(self.headers['Content-Length'])
#self.queryString
self.path
s = self.rfile.read(content_len)
encoded=''
try:
s=str(s)
import urlparse
req = urlparse.parse_qs(urlparse.unquote(s))
strip_char_length = 6 #for case
if self.path.startswith('/RunSuite'):
strip_char_length = 7
elif self.path.startswith('/RunCase'):
strip_char_length = 6
script = '{}/{}'.format(self.suite_path, req['script'][0][strip_char_length:])#remove the starting string /case/ or /suite/
if req.has_key('arg'):
arg= req['arg'][0]
else:
arg = ''
executefile =''
cmd_line = script+ ' '+ arg
encoded=self.runner_proc(cmd_line)
#print(encoded)
encoded = encoded.encode(encoding='utf_8').replace('\t', ' ').replace('\n','')
self.send_response(200)
self.send_header("Content-type", "text/html")#; charset=%s" % enc)
self.end_headers()
self.wfile.write(encoded)
except Exception as e:
import traceback
print(traceback.format_exc())
response = self.ParseFormData(s)
if response:
type, filename, data =response
encoded = self.onUploadFile(type, filename, data)
else:
encoded ='ERROR: %s, Can\'t parse Form data: %s'%(str(e),s)
encoded= encoded.encode(encoding='utf_8')
try:
requestline = self.requestline
import re
reScript=re.compile('POST\s+(.+)\s+HTTP.*', re.DOTALL)
m= re.match(reScript, requestline)
if m:
returncode =self.RunScript(m.group(1),[])
encoded ='script %s completed with return code %d!'%(m.group(1), returncode)
except Exception as e:
encoded ='can\'t run script!'
encoded = encoded.encode(encoding='utf_8', errors='strict')
# self.send_response(200)
# self.send_header("Content-type", "text/html")#; charset=%s" % enc)
# self.end_headers()
# self.wfile.write(encoded)
port =self.web_port
home = __file__ #sys.argv[0]
if os.path.exists(home):
home = os.path.dirname(home)
root = home
home = home +'/html/'
#done move runWebserver to DasH, and launch it at dash initialization
class ThreadingHttpServer(ThreadingMixIn, HTTPServer):
pass
httpd=ThreadingHttpServer(('',port), HttpHandler)
from socket import socket, AF_INET, SOCK_DGRAM, gethostname,SOL_SOCKET, SO_REUSEADDR, getfqdn#*
try:
hostip=''
s = socket(AF_INET, SOCK_DGRAM)
s.bind(("", 1234))
#sq = socket(AF_INET, SOCK_DGRAM)
s.connect(("10.0.0.4", 1234))
domain = getfqdn()
hostip = s.getsockname()[0]
self.web_host = hostip
self.SetTitle('DasH-{}:{}'.format(self.web_host, self.web_port))
s.close()
except Exception as e:
import traceback
msg = traceback.format_exc()
print(msg)
hostname =gethostname()
info("Server started on %s (%s),port %d....."%(hostname,hostip,port))
#print('Process ID:%d'%os.geteuid())
self.web_daemon= httpd
on=1
self.web_daemon.socket.setsockopt(SOL_SOCKET, SO_REUSEADDR, on)
httpd.serve_forever()
try:
s.close()
except:
pass
def add_line_to_case_queue(self,line):
type_case, case_name, args = self.check_case_type(line)
if type_case in ['txt','csv']:
self.run_a_test_suite(line)
else:
self.case_queue.put(line)
return info('adding case to queue: {}'.format(line))
def OnMouseMotion(self, evt):
try:
active_page = self.navigator.GetCurrentPage()
pos = self.case_suite_page.ScreenToClient(wx.GetMousePosition())
item_index, flag = active_page.HitTest(pos)
item_data = active_page.GetItemData(item_index)
tip = active_page.GetToolTip()
if item_data :
if item_data.Data.has_key('tip'):
active_page.SetToolTipString(item_data.Data['tip'])
else:
from pprint import pformat
tip_string = pformat(item_data.Data)
active_page.SetToolTipString(tip_string)
if False:
if flag == wx.LIST_HITTEST_ONITEMLABEL:
active_page.SetToolTipString('Some information about ' + self.case_suite_page.GetItemText(item_index))
else:
active_page.SetToolTipString('')
except Exception as e:
pass
evt.Skip()
def on_keyboard_key_down(self,event):
event.Skip()
@gui_event_decorator.gui_even_handle
def on_generate_code(self, event):
self.generate_code('{}/test_code_{}.py'.format(self.suite_path, datetime.now().isoformat().replace(':','-').replace('.','-')))
def on_right_up_over_tab_in_edit_area(self, event):
if self.edit_area.GetPageText(self.edit_area.GetSelection())=="*LOG*":
return
x = event.GetEventObject()
tabID = x.GetId()
tab = x.FindWindowById(tabID)
#session.session.open(retry, interval)
#tab.open(3,15)
th =threading.Thread(target=self.edit_area.GetCurrentPage().open, args=[1, 5])
#index = self.edit_area.GetCurrentPage().open(1, 60)
th.start()
event.Skip()
#self.edit_area.SetSelection(index)
def idle_process(self):
try:
self.on_handle_request_via_mail()
self.mail_failure =False
except Exception as e:
self.mail_failure =True
try:
self.check_case_status()
except:
pass
#print('{} i\'m idle !!!!!!!!!!!!!!!!!!'.format(datetime.now().isoformat()))
def on_idle(self,event):
# print('helllo!{}, {}\n'.format( self.m_log.PositionToXY( self.m_log.GetScrollPos(wx.VERTICAL) )[1], self.m_log.PositionToXY( self.m_log.GetScrollRange(wx.VERTICAL))[1] ) )
#self.freeze_thaw_main_log_window()
#self.m_log_current_pos-=1
#self.m_log.SetScrollPos(wx.VERTICAL, self.m_log_current_pos)
if True:
self.out = self.redir.out
current_pos = self.out.GetScrollPos(wx.VERTICAL)
v_scroll_range = self.out.GetScrollRange(wx.VERTICAL)
char_height = self.out.GetCharHeight()
w_client,h_client = self.out.GetClientSize()
max_gap=h_client*2/char_height/3
c_col, c_line = self.out.PositionToXY(current_pos)
t_col, t_line = self.out.PositionToXY(v_scroll_range)
current_insert = self.out.GetInsertionPoint()
if False:
tmp_msg ='\n insert {}, current_pos {} current first visible line {} column {} last line {}, colum {}\n'.format(current_insert, current_pos, c_line, c_col, t_line, t_col)
self.redir.old_stdout.write(tmp_msg)
#self.redir.old_stdout.write('current {}, range {}, t_line {}, c_line {}, gap {}\n'.format(current_pos, v_scroll_range, t_line, c_line, t_line -c_line))
now = datetime.now()
max_idle=3
if (now-self.last_time_call_on_idle).total_seconds()>max_idle:
self.last_time_call_on_idle=now
th=threading.Thread(target=self.idle_process, args=[])
th.start()
if self.updating_function_page is False:
threading.Thread(target=self.check_whether_function_file_is_updated, args=[]).start()
def on_m_log_text_changed(self, event):
event.Skip()
#self.freeze_thaw_main_log_window()
def freeze_thaw_main_log_window(self):
c_col, c_line = self.m_log.GetPosition()
#print('cline', c_line)
v_scroll_range = self.m_log.GetLastPosition()#wx.VERTICAL
char_height = self.m_log.GetCharHeight()
w_client,h_client = self.m_log.GetClientSize()
max_gap=h_client/char_height/3
current_pos = self.m_log.GetScrollPos(wx.VERTICAL)#self.m_log.XYToPosition(c_col, c_line)
c_col, c_line = self.m_log.PositionToXY(current_pos)
t_col, t_line = self.m_log.PositionToXY(v_scroll_range)
#string = "{}\ncurrent {}\t total {},max_gap {}, gap {}, range {}\n".format(string, c_line, t_line, max_gap,t_line-c_line, self.out.GetScrollRange(wx.VERTICAL))
#todo: when mulit-threads(up-to 7~9 SessionTab opened) are writting to DasHFrame.m_log, the log window was frozen, can't be thawed, if disable .freeze_main_log_window, there is no 'no response' issue
#so suspect it's freeze issue
frozen = self.m_log.IsFrozen()
if t_line - c_line>max_gap:
if not frozen:
self.m_log.SetInsertionPoint(self.m_log_current_pos)
self.m_log.SetScrollPos(wx.VERTICAL, self.m_log_current_pos)
self.m_log.Freeze()
#self.m_log_current_pos = current_pos#self.m_log.GetScrollPos(wx.VERTICAL)#current_pos
self.m_log.SetInsertionPoint(self.m_log_current_pos)
self.m_log.SetScrollPos(wx.VERTICAL, self.m_log_current_pos)
frozen=True
else:
self.m_log_current_pos= self.m_log.GetScrollRange(wx.VERTICAL)
#self.m_log.SetScrollPos(wx.VERTICAL, )
if frozen:
self.m_log.SetInsertionPoint(self.m_log_current_pos)
self.m_log.SetScrollPos(wx.VERTICAL, self.m_log_current_pos)
self.m_log.Thaw()
#time.sleep(0.1)
@gui_event_decorator.gui_even_handle
def on_generate_test_report(self,event):
file_name='{}/dash_report_{}.html'.format(self.log_path, self.timestamp)
report = self.generate_report(filename=file_name)#'{}/dash_report_{}.html'.format(self.log_path, self.timestamp))
report = 'http://{}:{}/log/{}\n{}'.format(self.web_host, self.web_port, file_name.replace(self.log_path, ''),report)
print(report)
@gui_event_decorator.gui_even_handle
def on_leftD_click_url_in_m_log(self, event):
#print(urlString)
mouseEvent = event.GetMouseEvent()
if mouseEvent.LeftDClick():
urlString = self.m_log.GetRange(event.GetURLStart(),event.GetURLEnd())
webbrowser.open(urlString)
event.Skip()
#done: 2017-08-22, 2017-08-19 save main log window to a file
#done: 2017-08-19 add timestamps to log message
#done: 2017-08-22, 2017-08-19 mail to someone
#done: 2017-08-19 run a script in DasH
#done: 2017-08-19 generate test report
#done: 2017-10-7 2017-08-19 publish all test cases in a web page
#done: 2017-10-7 2017-08-19 trigger a test remote via web page
#todo: 2017-08-19 re-run failed cases
#done: 2017-08-19 build executable packege for DasH
#todo: 2017-08-19 a popup window to get email address/password/mail_server...
#done: 2017-08-22 output in m_log window has a lot of empty line, need remove them
#todo: 2017-08-23 in common.call_function_in_module, should end all threads which are started in previous instance
#done: 2017-10-7 2017-08-23 add tips for all tree item in teh left
#done: 2017-10-7 2017-09-30 failed to send command to a session whose name start with numbers e.g. 1_session
# Traceback (most recent call last):
# File "C:/workspace/gDasH\gui\DasHFrame.py", line 588, in on_command_enter
# instance_name, function_name, new_argvs, new_kwargs, str_code = call_function_in_module(module,class_name,function,args, globals())
# File "C:/workspace/gDasH\lib\common.py", line 153, in call_function_in_module
# eval('GetFunArgs({args})'.format(args=args_string))
# File "<string>", line 1
# GetFunArgs(35b)
# ^
# SyntaxError: invalid syntax
#canceled: start thread for all gui event handlers with decoration, catch all exceptions, ###no need to do that
#done: mark red for all strings who match error patterns in "*LOG*", m_log
#fix: generate test report right after case completed (failed or passed)
#todo: change session json file format to "cmd\toutput_string" | StarcoderdataPython |
4909615 | # -*- coding: utf-8 -*-
"""provides sequencing fetching from NCBI and Ensembl
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
import re
import bioutils.seqfetcher
from ..exceptions import HGVSDataNotAvailableError
logger = logging.getLogger(__name__)
class SeqFetcher(object):
"""This class is intended primarily as a mixin for HGVS data providers
that doen't otherwise have access to sequence data. It uses the
fetch_seq() function in this module to fetch sequences from
several sources; see that function for details.
>> sf = SeqFetcher()
>> sf.fetch_seq('NP_056374.2',0,10)
'MESRETLSSS'
"""
def __init__(self):
# If HGVS_SEQREPO_DIR is defined, we use seqrepo for *all* sequences
# Otherwise, we fall back to remote sequence fetching
seqrepo_dir = os.environ.get("HGVS_SEQREPO_DIR")
if seqrepo_dir:
from biocommons.seqrepo import SeqRepo
sr = SeqRepo(seqrepo_dir)
def _fetch_seq_seqrepo(ac, start_i=None, end_i=None):
return sr.fetch(ac, start_i, end_i)
self.fetcher = _fetch_seq_seqrepo
self.source = "local (SeqRepo)"
logger.info("Using SeqRepo({}) sequence fetching".format(seqrepo_dir))
else:
self.fetcher = bioutils.seqfetcher.fetch_seq
self.source = "remote (bioutils.seqfetcher)"
logger.info("Using remote sequence fetching")
def fetch_seq(self, ac, start_i=None, end_i=None):
try:
return self.fetcher(ac, start_i, end_i)
except (RuntimeError, KeyError) as ex:
raise HGVSDataNotAvailableError("No sequence available for {ac} ({ex})".format(ac=ac, ex=ex))
# <LICENSE>
# Copyright 2018 HGVS Contributors (https://github.com/biocommons/hgvs)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# </LICENSE>
| StarcoderdataPython |
9649675 | <reponame>RemainAplomb/OS-Simulator-using-Python-Tkinter
"""
Group Members:
- Abaño, <NAME>
- <NAME>
- Dibansa, Rahmani
- Palattao, <NAME>
Program Description:
- This is a program which mimics and represents the Priority Process Management.
Program Algorithm:
1. Initialize the program and create the tkinter GUI
2. Ask for user input in input1_window of pty_frontEnd.
3. Once user has successfully entered an acceptable input, send the user's input to the backEnd class.
4. Use the backEnd class to generate the data for gantt chart. In addition, compute for ATA, AWT, and CPU Utilization.
5. Use the ganttChart_window of the frontEnd class to display the results.
6. Terminate program if exit button is pressed.
General Program Flow:
input1_window -> generate gantt chart using backEnd class -> display results
pty_backEnd:
insert_inputs: for taking in the user's input into the backEnd class.
get_ganttChart: returns the list containing the data for gantt chart.
get_caa: returns the list containing CPU Utilization, ATA, AWT.
generate_ganttChart: for generating the data for gantt chart.
pty_frontEnd:
clearNodes: for clearing the linked list which contains the windows for memory map, fat, and pat.
addResultNode: for adding a node into the linked list
current_date: sets the current time into the date label.
tick: for updating the clock label.
isNotInteger: This function returns True if the integerInput is not an Integer.
clearWidgets: tries to clear/destroy all of the widgets.
clearWidgetList: destroys a certain group of widgets.
displayChart: This function displays the necessary widgets for gantt chart.
input1_window: the window which takes in the user's input.
input1_computeBTN_Pressed: executes once the user clicks the compute button.
result1_window: the window which displays the gantt chart.
"""
# importing modules
from tkinter import*
from tkinter import ttk
from array import*
from tkinter import messagebox
import csv
import math
import sys
import os
from PIL import Image, ImageTk
global listbox
# end of module importing
# For the backEnd
import datetime
import time
from copy import deepcopy
# getting current directory of the app
try:
currentDirectory = os.getcwd()
###Started(currentDirectory)
except:
print ( " Error : Cannot find the Current Directory. " )
# end of getting current directory
# creating the tkinter root that will accommodate the UI
root = Tk()
root.title ( "Priority Process Management" )
#
# Resources
# -> Background
bg9 = PhotoImage(file = currentDirectory + "\\resources\\background\\bg9.png" )
# *End of resources code block
# This class contains all the back end processes/computations.
class pty_backEnd:
def __init__( self ):
# process details [ processNum, burstTime, arrivalTime, priorityNum ]
self.processDetails = { 1 : [ "P1", 12, 4, 2 ],
2 : [ "P2", 10, 5, 1],
3 : [ "P3", 5, 10, 4],
4 : [ "P4", 7, 7, 3]}
# AWT
self.awtInfo_dic = {}
# process_queue [ processNum, burstTime, arrivalTime, priorityNum, timeRemaining ]
self.process_queue = []
self.numOfProcess = 0
for i in range ( 1, len( list(self.processDetails )) + 1):
self.numOfProcess += 1
self.process_queue.append( [ self.processDetails[i][0],
self.processDetails[i][1],
self.processDetails[i][2],
self.processDetails[i][3],
self.processDetails[i][1] ])
self.awtInfo_dic[self.processDetails[i][0]] = [ int("-" + str(self.processDetails[i][2])) ]
# For accepting in the user's input that will be utilized by the backend class.
def insert_inputs( self, processDetails = None ):
# process details [ processNum, burstTime, arrivalTime, priorityNum ]
if processDetails == None:
self.processDetails = { 1 : [ "P1", 12, 4, 2 ],
2 : [ "P2", 10, 5, 1],
3 : [ "P3", 5, 10, 4],
4 : [ "P4", 7, 7, 3]}
else:
self.processDetails = processDetails
# AWT
self.awtInfo_dic = {}
# process_queue [ processNum, burstTime, arrivalTime, priorityNum, timeRemaining ]
self.process_queue = []
self.numOfProcess = 0
for i in range ( 1, len( list(self.processDetails )) + 1):
self.numOfProcess += 1
self.process_queue.append( [ self.processDetails[i][0],
self.processDetails[i][1],
self.processDetails[i][2],
self.processDetails[i][3],
self.processDetails[i][1] ])
self.awtInfo_dic[self.processDetails[i][0]] = [ int("-" + str(self.processDetails[i][2])) ]
# returns the ganttChart list
def get_ganttChart( self ):
return self.ganttChart
# returns the caa list which contains CPU Utilization, ATA, AWT.
def get_caa( self ):
return self.caa
def generate_ganttChart( self ):
# gantt chart [ start, finish, processNum, arrivalTime, burstTime, percentageprocess ]
self.ganttChart = []
# sort the process_queue by its arrival time.
# process_queue [ processNum, burstTime, arrivalTime, priorityNum, timeRemaining ]
self.process_queue = sorted( self.process_queue, key=lambda x: x[2] )
self.tempProcess_queue = deepcopy( self.process_queue )
self.tempResult_queue = []
self.currentTime = 0
self.tempResult_dic = {}
# ataInfo_dic processNum : finishTime
self.ataInfo_dic = {}
self.isFinished = False
self.prevProcess = -1
while self.isFinished == False:
# waitingList [ processNum, index, priorityNum ]
self.waitingList = []
# In this loop, we will be taking note of all the process that has already arrived
for i in range( len(self.tempProcess_queue) ):
if self.currentTime >= self.tempProcess_queue[i][2]:
self.waitingList.append( [ self.tempProcess_queue[i][0],
i,
self.tempProcess_queue[i][3] ] )
# We will check what process should be processed during the current time
# After knowing the process number, we will take note of it inside "self.tempResult_dic"
try:
# If there are no process yet to arrive, we replace the processNum into "Idle"
if len(self.waitingList) == 0 and len(self.tempProcess_queue) != 0:
self.tempResult_dic[self.currentTime] = "Idle"
self.currentTime += 1
else: # otherwise, the program will take note of the processNum
self.waitingList = sorted( self.waitingList, key= lambda x: x[2] )
self.currentProcess = self.waitingList[0][1]
self.tempProcess_queue[self.currentProcess][4] -= 1
self.tempResult_dic[self.currentTime] = self.tempProcess_queue[self.currentProcess][0]
self.currentTime += 1
self.ataInfo_dic[self.waitingList[0][0]] = [ self.currentTime, self.tempProcess_queue[self.currentProcess][2] ]
if self.tempProcess_queue[self.currentProcess][4] <= 0:
self.tempProcess_queue.pop( self.currentProcess )
if len(self.tempProcess_queue) == 0:
break
except:
break
self.prevProcess = "N/A"
self.currentProcess = "N/A"
self.tempColors = { "Idle" : "#bfbaac",
"P1" : "#f77777",
"P2" : "#f7d977",
"P3" : "#77f7e6",
"P4" : "#77d5f7",
"P5" : "#d577f7",
"EXTRA" : "#fcba03" }
# In this block of code, we will turn the tempResult_dic into the ganttChart data
self.idleTime = 0
for time in list(self.tempResult_dic):
self.time = time
self.currentProcess = self.tempResult_dic[time]
if self.currentProcess == "Idle":
self.idleTime += 1
if self.prevProcess != "N/A":
if self.currentProcess != self.prevProcess:
try:
self.ganttChart.append( [ self.startTime,
self.time,
self.tempResult_dic[self.time-1],
self.time,
self.time - self.startTime,
self.tempColors[self.tempResult_dic[self.time-1]] ] )
if self.tempResult_dic[self.time-1] in self.awtInfo_dic:
self.awtInfo_dic[self.tempResult_dic[self.time-1]].append( self.startTime )
self.awtInfo_dic[self.tempResult_dic[self.time-1]].append( int("-" + str(self.time)) )
except:
pass
self.startTime = self.time
else:
self.startTime = self.time
self.prevProcess = self.currentProcess
else:
self.time += 1
self.ganttChart.append( [ self.startTime,
self.time,
self.tempResult_dic[self.time-1],
self.time,
(self.time) - self.startTime,
self.tempColors[self.tempResult_dic[self.time-1]] ] )
if self.tempResult_dic[self.time-1] in self.awtInfo_dic:
self.awtInfo_dic[self.tempResult_dic[self.time-1]].append( self.startTime )
self.awtInfo_dic[self.tempResult_dic[self.time-1]].append( int("-" + str(self.time)) )
for i in range( len(self.ganttChart) ):
self.ganttChart[i].append( float(( float(self.ganttChart[i][4]) / float(self.time) ) * 100 ))
# The program will compute for the average turn around time here
self.ata = 0.0
for processNum in list( self.ataInfo_dic ):
self.ata += ( self.ataInfo_dic[processNum][0] - self.ataInfo_dic[processNum][1] )
# The program will compute for the average waiting time here
self.awt = 0.0
for processNum in list( self.awtInfo_dic ):
self.tempCalc = 0.0
if len(self.awtInfo_dic[processNum]) // 2 != 0:
self.awtInfo_dic[processNum].pop( -1 )
for i in range( len( self.awtInfo_dic[processNum] )):
self.tempCalc += self.awtInfo_dic[processNum][i]
self.awt += self.tempCalc
# The final computations for cpuUtilization, ata, and awt
self.cpuUtilization = round((( 1-(self.idleTime/self.time)) * 100 ), 2 )
self.ata = round(( self.ata ) / len(self.ataInfo_dic), 2)
self.awt = round(( self.awt ) / len(self.awtInfo_dic), 2)
# Put the calculated cpuUtilization, ata, and awt into one list
# The data inside this list will be displayed to the user.
self.caa = [ self.cpuUtilization,
self.ata,
self.awt ]
"""
print ( self.cpuUtilization )
print( self.ata )
print( self.awt )
print( "awtInfo", self.awtInfo_dic )
"""
# This contains all the necessary functions for the frontEnd.
# This mostly contains widget placements.
class pty_frontEnd:
def __init__( self ):
self.backEnd = pty_backEnd()
self.backEnd.generate_ganttChart()
self.ganttChart = self.backEnd.get_ganttChart()
self.caa = self.backEnd.get_caa()
#self.caa = self.backEnd.get_caa()
# For getting the current date
def current_date( self ):
self.dateString = datetime.date.today().strftime("%B %d, %Y")
self.dateLBL.config(text = self.dateString)
# This updates the clock widget
def tick( self ):
if self.tick_on:
self.timeString = time.strftime("%H:%M:%S")
self.clockLBL.config(text = self.timeString)
self.clockLBL.after(200, self.tick )
else:
pass
# This function returns True if the integerInput is not an Integer.
# If it is an integer, return False.
def isNotInteger( self, integerInput):
try:
self.intTest = int(integerInput)
return False
except ValueError:
return True
# The program has two list which contains a reference to all the program's widgets
# And what this function does is it tries to clear/destroy all of these widgets
# using the lists which contains the program's widgets.
# The two lists are:
# - self.basicWidgetList: For most of the basic widgets
# - self.physicalMemWidgets: For the widgets used to display physical memory map
def clearWidgets( self ):
try:
self.tick_on = False
self.clearWidgetList( self.basicWidgetList )
self.clearWidgetList( self.physicalMemWidgets )
except:
pass
return
# This function destroys all of the widgets inside the inputted widgetsToClear list.
def clearWidgetList ( self, widgetsToClear):
for widget in widgetsToClear:
widget.destroy()
# This function displays the necessary widgets for the gantt chart.
# To get a general gist, the program has around 50 labels which acts as the gantt chart.
# In addition, it has a text label which marks each section of the gantt chart
def displayChart( self, tempPointer, tempColor, tempText, tempPercentage, tempTotalSize ):
self.tempPointer = int(tempPointer)
self.tempColor = tempColor
self.tempText = tempText
self.tempPercentage = tempPercentage
self.tempTotalSize = tempTotalSize
if self.tempPercentage != 0:
self.tempLBL = Label( root , text = " ", font = ('Times New Roman', 30), bg = self.tempColor)
self.tempLBL.place(x = self.xCounter, y = 250)
self.physicalMemWidgets.append( self.tempLBL )
self.tempLBL = Label( root , text = self.tempText , font = ('Times New Roman', 10), bg = self.tempColor)
self.tempLBL.place(x = self.xCounter, y = 220)
self.physicalMemWidgets.append( self.tempLBL )
self.xCounter += 10
for i in range( int( self.tempPercentage/2 ) ):
if self.tempPointer != 0:
self.tempLBL = Label( root , text = " " , font = ('Times New Roman', 30), bg = self.tempColor)
self.tempLBL.place(x = self.xCounter, y = 250)
self.xCounter += 10
self.physicalMemWidgets.append( self.tempLBL )
self.tempPointer -= 1
else:
pass
if self.tempPercentage != 0:
self.tempLBL = Label( root , text = tempTotalSize , font = ('Times New Roman', 10), bg = "#c6e3ad")
self.tempLBL.place(x = self.xCounter - 5, y = 310)
self.physicalMemWidgets.append( self.tempLBL )
return
# This is the input window that will take in the user's inputs.
def input1_window( self ):
self.clearWidgets()
self.basicWidgetList = []
self.bg9LBL = Label ( root , image = bg9, bg = "black" )
self.bg9LBL.place(x = 0, y = 0)
self.basicWidgetList.append( self.bg9LBL )
self.clockLBL = Label( root , font = ('Times New Roman', 17), bg = "#4ec2c2" )
self.clockLBL.place(x = 700, y = 70)
self.tick_on = True
self.tick()
self.basicWidgetList.append( self.clockLBL )
self.dateLBL = Label( root , font = ('Times New Roman', 17), bg = "#4ec2c2")
self.dateLBL.place(x = 650, y = 25)
self.current_date()
self.basicWidgetList.append( self.dateLBL )
self.title1LBL = Label( root , text = "Priority" , font = ('Times New Roman', 20), bg = "#4ec2c2")
self.title1LBL.place(x = 150, y = 20)
self.basicWidgetList.append( self.title1LBL )
self.title2LBL = Label( root , text = "Process Management" , font = ('Times New Roman', 20), bg = "#4ec2c2")
self.title2LBL.place(x = 75, y = 65)
self.basicWidgetList.append( self.title2LBL )
self.title3LBL = Label( root , text = "Input Window" , font = ('Times New Roman', 30), bg = "#c6e3ad")
self.title3LBL.place(x = 370, y = 108)
self.basicWidgetList.append( self.title3LBL )
# Process Num
self.processLBL = Label( root , text = "Process" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.processLBL.place(x = 170, y = 160)
self.basicWidgetList.append( self.processLBL )
self.process1LBL = Label( root , text = "1" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.process1LBL.place(x = 190, y = 210)
self.basicWidgetList.append( self.process1LBL )
self.process2LBL = Label( root , text = "2" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.process2LBL.place(x = 190, y = 260)
self.basicWidgetList.append( self.process2LBL )
self.process3LBL = Label( root , text = "3" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.process3LBL.place(x = 190, y = 310)
self.basicWidgetList.append( self.process3LBL )
self.process4LBL = Label( root , text = "4" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.process4LBL.place(x = 190, y = 360)
self.basicWidgetList.append( self.process4LBL )
self.process5LBL = Label( root , text = "5" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.process5LBL.place(x = 190, y = 410)
self.basicWidgetList.append( self.process5LBL )
# Burst Time
self.burstTimeLBL = Label( root , text = "Burst Time" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.burstTimeLBL.place(x = 340, y = 160)
self.basicWidgetList.append( self.burstTimeLBL )
self.burstTime1ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.burstTime1ENTRY.place(x = 315, y = 210)
self.basicWidgetList.append( self.burstTime1ENTRY )
self.burstTime2ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.burstTime2ENTRY.place(x = 315, y = 260)
self.basicWidgetList.append( self.burstTime2ENTRY )
self.burstTime3ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.burstTime3ENTRY.place(x = 315, y = 310)
self.basicWidgetList.append( self.burstTime3ENTRY )
self.burstTime4ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.burstTime4ENTRY.place(x = 315, y = 360)
self.basicWidgetList.append( self.burstTime4ENTRY )
self.burstTime5ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.burstTime5ENTRY.place(x = 315, y = 410)
self.basicWidgetList.append( self.burstTime5ENTRY )
# Arrival Time
self.arrivalTimeLBL = Label( root , text = "Arrival Time" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.arrivalTimeLBL.place(x = 505, y = 160)
self.basicWidgetList.append( self.arrivalTimeLBL )
self.arrivalTime1ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.arrivalTime1ENTRY.place(x = 490, y = 210)
self.basicWidgetList.append( self.arrivalTime1ENTRY )
self.arrivalTime2ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.arrivalTime2ENTRY.place(x = 490, y = 260)
self.basicWidgetList.append( self.arrivalTime2ENTRY )
self.arrivalTime3ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.arrivalTime3ENTRY.place(x = 490, y = 310)
self.basicWidgetList.append( self.arrivalTime3ENTRY )
self.arrivalTime4ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.arrivalTime4ENTRY.place(x = 490, y = 360)
self.basicWidgetList.append( self.arrivalTime4ENTRY )
self.arrivalTime5ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.arrivalTime5ENTRY.place(x = 490, y = 410)
self.basicWidgetList.append( self.arrivalTime5ENTRY )
# Priority Num
self.priorityNumLBL = Label( root , text = "Priority #" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.priorityNumLBL.place(x = 690, y = 160)
self.basicWidgetList.append( self.priorityNumLBL )
self.priorityNum1ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.priorityNum1ENTRY.place(x = 665, y = 210)
self.basicWidgetList.append( self.priorityNum1ENTRY )
self.priorityNum2ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.priorityNum2ENTRY.place(x = 665, y = 260)
self.basicWidgetList.append( self.priorityNum2ENTRY )
self.priorityNum3ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.priorityNum3ENTRY.place(x = 665, y = 310)
self.basicWidgetList.append( self.priorityNum3ENTRY )
self.priorityNum4ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.priorityNum4ENTRY.place(x = 665, y = 360)
self.basicWidgetList.append( self.priorityNum4ENTRY )
self.priorityNum5ENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.priorityNum5ENTRY.place(x = 665, y = 410)
self.basicWidgetList.append( self.priorityNum5ENTRY )
self.computeBTN = Button ( root , text = 'Compute',command = self.input1_computeBTN_Pressed , font = ('Poppins', 16, 'bold'), width = 12, bg = "#659bdb" )
self.computeBTN.place (x = 390 ,y = 470)
self.basicWidgetList.append( self.computeBTN )
self.exitBTN = Button ( root , text = 'Exit',command = root.destroy , font = ('Poppins', 16, 'bold'), width = 12, bg = "#659bdb" )
self.exitBTN.place (x = 390 ,y = 520)
self.basicWidgetList.append( self.exitBTN )
# Once the user wants to proceed with the computation, this function will be executed.
def input1_computeBTN_Pressed( self ):
if messagebox.askyesno( "Confirmation..." , " Are you sure you want to compute? " ) == True :
self.arrivalTime1 = self.arrivalTime1ENTRY.get()
self.arrivalTime2 = self.arrivalTime2ENTRY.get()
self.arrivalTime3 = self.arrivalTime3ENTRY.get()
self.arrivalTime4 = self.arrivalTime4ENTRY.get()
self.arrivalTime5 = self.arrivalTime5ENTRY.get()
self.arrivalTime_list = [ self.arrivalTime1,
self.arrivalTime2,
self.arrivalTime3,
self.arrivalTime4,
self.arrivalTime5 ]
self.arrivalError = False
self.arrivalTests = [ self.isNotInteger( self.arrivalTime1 ),
self.isNotInteger( self.arrivalTime2 ),
self.isNotInteger( self.arrivalTime3 ),
self.isNotInteger( self.arrivalTime4 ),
self.isNotInteger( self.arrivalTime5 ) ]
for i in range( len( self.arrivalTests ) ):
if self.arrivalTests[i] == True:
if self.arrivalTime_list[i] == "x":
pass
else:
self.arrivalError = True
break
self.priorityNum1 = self.priorityNum1ENTRY.get()
self.priorityNum2 = self.priorityNum2ENTRY.get()
self.priorityNum3 = self.priorityNum3ENTRY.get()
self.priorityNum4 = self.priorityNum4ENTRY.get()
self.priorityNum5 = self.priorityNum5ENTRY.get()
self.priorityNum_list = [ self.priorityNum1,
self.priorityNum2,
self.priorityNum3,
self.priorityNum4,
self.priorityNum5 ]
self.priorityError = False
self.priorityTests = [ self.isNotInteger( self.priorityNum1 ),
self.isNotInteger( self.priorityNum2 ),
self.isNotInteger( self.priorityNum3 ),
self.isNotInteger( self.priorityNum4 ),
self.isNotInteger( self.priorityNum5 ) ]
# process details [ processNum, priorityNum, arrivalTime ]
self.processDetails = {}
for i in range( len( self.priorityTests ) ):
if self.priorityTests[i] == True:
if self.priorityNum_list[i] == "x":
pass
else:
self.priorityError = True
break
if self.priorityNum_list[i] != "x":
self.count = self.priorityNum_list.count( self.priorityNum_list[i] )
if self.count > 1:
self.priorityError = True
break
self.burstTime1 = self.burstTime1ENTRY.get()
self.burstTime2 = self.burstTime2ENTRY.get()
self.burstTime3 = self.burstTime3ENTRY.get()
self.burstTime4 = self.burstTime4ENTRY.get()
self.burstTime5 = self.burstTime5ENTRY.get()
self.burstTime_list = [ self.burstTime1,
self.burstTime2,
self.burstTime3,
self.burstTime4,
self.burstTime5 ]
self.burstError = False
self.burstTests = [ self.isNotInteger( self.burstTime1 ),
self.isNotInteger( self.burstTime2 ),
self.isNotInteger( self.burstTime3 ),
self.isNotInteger( self.burstTime4 ),
self.isNotInteger( self.burstTime5 ) ]
# process details [ processNum, burstTime, arrivalTime ]
self.processDetails = {}
for i in range( len( self.burstTests ) ):
if self.burstTests[i] == True:
if self.burstTime_list[i] == "x":
pass
else:
self.burstError = True
break
else:
self.processDetails[i+1] = [ "P{}".format( i+1 ),
int( self.burstTime_list[i] ),
int( self.arrivalTime_list[i]),
int( self.priorityNum_list[i]) ]
if self.burstError == True:
messagebox.showinfo( "Compute Error" , "Error: Invalid Burst Time input" )
elif self.arrivalError == True:
messagebox.showinfo( "Compute Error" , "Invalid Arrival Time input." )
elif self.priorityError == True:
messagebox.showinfo( "Compute Error" , "Invalid Priority input." )
else:
self.backEnd.insert_inputs( self.processDetails )
self.backEnd.generate_ganttChart()
self.ganttChart = self.backEnd.get_ganttChart()
self.caa = self.backEnd.get_caa()
self.result1_window()
# This function contains and displays the result data.
# Result includes:
# Gantt Chart
# CPU Utilization
# ATA
# AWT
def result1_window( self ):
self.clearWidgets()
self.basicWidgetList = []
self.bg9LBL = Label ( root , image = bg9, bg = "black" )
self.bg9LBL.place(x = 0, y = 0)
self.basicWidgetList.append( self.bg9LBL )
self.clockLBL = Label( root , font = ('Times New Roman', 17), bg = "#4ec2c2" )
self.clockLBL.place(x = 700, y = 70)
self.tick_on = True
self.tick()
self.basicWidgetList.append( self.clockLBL )
self.dateLBL = Label( root , font = ('Times New Roman', 17), bg = "#4ec2c2")
self.dateLBL.place(x = 650, y = 25)
self.current_date()
self.basicWidgetList.append( self.dateLBL )
self.title1LBL = Label( root , text = "Priority" , font = ('Times New Roman', 20), bg = "#4ec2c2")
self.title1LBL.place(x = 150, y = 20)
self.basicWidgetList.append( self.title1LBL )
self.title2LBL = Label( root , text = "Process Management" , font = ('Times New Roman', 20), bg = "#4ec2c2")
self.title2LBL.place(x = 75, y = 65)
self.basicWidgetList.append( self.title2LBL )
self.title3LBL = Label( root , text = "Gantt Chart" , font = ('Times New Roman', 20), bg = "#c6e3ad")
self.title3LBL.place(x = 410, y = 160)
self.basicWidgetList.append( self.title3LBL )
self.physicalMemWidgets = []
self.xCounter = 210
self.indexPointer = 0
self.markLBL = Label( root , text = 0 , font = ('Times New Roman', 10), bg = "#c6e3ad")
self.markLBL.place(x = self.xCounter - 5, y = 310)
self.physicalMemWidgets.append( self.markLBL )
self.tempTotalSize = 0
for tempData in self.ganttChart:
try:
self.tempTotalSize += tempData[4]
self.displayChart( tempData[6], tempData[5], tempData[2], tempData[6], self.tempTotalSize )
except:
pass
self.cpuUtilizationLBL = Label( root , text = " CPU Utilization" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.cpuUtilizationLBL.place(x = 400, y = 360)
self.basicWidgetList.append( self.cpuUtilizationLBL )
self.cpuUtilizationENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.cpuUtilizationENTRY.place(x = 400, y = 405)
self.cpuUtilizationENTRY.insert( 0, str( round(self.caa[0],2)) + "%" )
self.cpuUtilizationENTRY.config( state = "readonly" )
self.basicWidgetList.append( self.cpuUtilizationENTRY )
self.ataLBL = Label( root , text = " ATA" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.ataLBL.place(x = 280, y = 360)
self.basicWidgetList.append( self.ataLBL )
self.ataENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.ataENTRY.place(x = 235, y = 405)
self.ataENTRY.insert( 0, str(self.caa[1]) )
self.ataENTRY.config( state = "readonly" )
self.basicWidgetList.append( self.ataENTRY )
self.awtLBL = Label( root , text = " AWT" , font = ('Times New Roman', 15), bg = "#c6e3ad")
self.awtLBL.place(x = 602, y = 360)
self.basicWidgetList.append( self.awtLBL )
self.awtENTRY = Entry( root , font = ('Poppins', 10, 'bold'), justify= "center" )
self.awtENTRY.place(x = 565, y = 405)
self.awtENTRY.insert( 0, str(self.caa[2]) )
self.awtENTRY.config( state = "readonly" )
self.basicWidgetList.append( self.awtENTRY )
self.backBTN = Button ( root , text = 'BACK',command = self.input1_window , font = ('Poppins', 16, 'bold'), width = 12, bg = "#659bdb" )
self.backBTN.place (x = 300 ,y = 480)
self.basicWidgetList.append( self.backBTN )
self.exitBTN = Button ( root , text = 'Exit',command = root.destroy , font = ('Poppins', 16, 'bold'), width = 12, bg = "#659bdb" )
self.exitBTN.place (x = 490 ,y = 480)
self.basicWidgetList.append( self.exitBTN )
# The Graphical User Interface's activation.
root.resizable( width = FALSE , height = FALSE )
root.geometry( "900x600" )
root.config ( background = "LIGHTBLUE" )
#program = pty_backEnd()
#program.generate_ganttChart()
program1 = pty_frontEnd()
#program1.result1_window()
program1.input1_window()
root.mainloop()
| StarcoderdataPython |
17821 | #!/usr/bin/env python
import rospy
import rosbag
import os
import sys
import textwrap
import yaml
lidarmsg=None
################# read the lidar msg from yaml file and return ##############
def readlidardummy():
global lidarmsg
if lidarmsg==None:
lidarmsg= doreadlidar()
return lidarmsg
def doreadlidar():
global lidarmsg
print('lidardummy do read')
with open(r'/media/student/data5/AirSim/ros/src/airsim_ros_pkgs/scripts/lidar_dummy.txt') as file:
# The FullLoader parameter handles the conversion from YAML
# scalar values to Python the dictionary format
lidarmsg = yaml.load(file)
#print(fruits_list)
#print(lidarmsg['range_max']+20)
#print(lidarmsg['header']['stamp']['secs'])
ranges=lidarmsg['ranges']
#print(len(ranges), ranges)
return lidarmsg
if __name__ == '__main__':
readlidardummy()
| StarcoderdataPython |
111285 | import os
os.environ["CUDA_VISIBLE_DEVICES"]="-1"
import tensorflow as tf
from tensorflow.python.client import timeline
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data",one_hot=True)
x = tf.placeholder(tf.float32, [None, 784])
w = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, w) + b)
y_ = tf.placeholder("float", [None, 10])
cross_entropy = -tf.reduce_sum(y_ * tf.log(y))
train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
writer=tf.summary.FileWriter("logs", sess.graph)
for i in range(1, 1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
if i % 100 == 0:
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys}, options=run_options, run_metadata=run_metadata)
writer.add_run_metadata(run_metadata, 'step %03d' % i)
else:
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
writer.close()
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, 'float'))
print(sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
tl = timeline.Timeline(run_metadata.step_stats)
ctf = tl.generate_chrome_trace_format()
with open('mnist_timeline.json', 'w') as f:
f.write(ctf)
with open('mnist_graph.json', "w") as f:
nodes = []
for n in tf.get_default_graph().as_graph_def().node:
nodes.append("{\"name\":\"" + str(n.name) + "\",\"input\":\"" + str(n.input) + "\"}")
f.write("{\"nodes\":[\n")
f.write(",".join(nodes))
f.write("]}")
| StarcoderdataPython |
8120733 | import unittest
import pandas as pd
from sklearn.linear_model import LogisticRegression, SGDClassifier
from sklearn.pipeline import Pipeline
import numpy as np
from sklearn.utils.estimator_checks import check_estimator
from ITMO_FS.embedded import *
from ITMO_FS.utils import weight_func
np.random.seed(42)
class TestCases(unittest.TestCase):
data, target = np.random.randint(
10, size=(
100, 20)), np.random.randint(
10, size=(
100,))
def test_MOS_err_loss(self):
with self.assertRaises(KeyError):
MOS(model=SGDClassifier(), weight_func=weight_func,
sampling=True, loss="err").fit(self.data,
self.target)
def test_MOS_no_sampling(self):
# MOSS
res = MOS(
model=SGDClassifier(),
weight_func=weight_func).fit_transform(
self.data,
self.target)
assert self.data.shape[0] == res.shape[0]
print("MOSS:", self.data.shape, '--->', res.shape)
def test_MOSS(self):
# MOSS
res = MOS(
model=SGDClassifier(),
weight_func=weight_func,
sampling=True).fit_transform(
self.data,
self.target)
assert self.data.shape[0] == res.shape[0]
print("MOSS:", self.data.shape, '--->', res.shape)
def test_MOSS_n_naigbours_err(self):
# MOSS
with self.assertRaises(ValueError):
MOS(
model=SGDClassifier(),
weight_func=weight_func,
sampling=True, k_neighbors=1000).fit_transform(
self.data,
self.target)
def test_MOSS_hinge(self):
# MOSS
res = MOS(
model=SGDClassifier(),
weight_func=weight_func,
sampling=True, loss="hinge").fit_transform(
self.data,
self.target)
assert self.data.shape[0] == res.shape[0]
print("MOSS:", self.data.shape, '--->', res.shape)
def test_MOSNS(self):
# MOSNS
res = MOS(
model=SGDClassifier(),
weight_func=weight_func,
sampling=False).fit_transform(
self.data,
self.target)
assert self.data.shape[0] == res.shape[0]
print("MOSNS:", self.data.shape, '--->', res.shape)
def test_losses(self):
for loss in ['log', 'hinge']:
res = MOS(
model=SGDClassifier(),
weight_func=weight_func,
loss=loss).fit_transform(
self.data,
self.target)
assert self.data.shape[0] == res.shape[0]
def test_df(self):
f = MOS(model=SGDClassifier(), weight_func=weight_func, sampling=True)
df = f.fit_transform(
pd.DataFrame(
self.data), pd.DataFrame(
self.target))
arr = f.fit_transform(self.data, self.target)
np.testing.assert_array_equal(df, arr)
f = MOS(model=SGDClassifier(), weight_func=weight_func, sampling=False)
df = f.fit_transform(
pd.DataFrame(
self.data), pd.DataFrame(
self.target))
arr = f.fit_transform(self.data, self.target)
np.testing.assert_array_equal(df, arr)
def test_pipeline(self):
# FS
p = Pipeline(
[('FS1', MOS(model=SGDClassifier(), weight_func=weight_func))])
p.fit(self.data, self.target)
res = p.transform(self.data)
assert self.data.shape[0] == res.shape[0]
# FS - estim
p = Pipeline([('FS1', MOS(model=SGDClassifier(),
weight_func=weight_func)),
('E1', LogisticRegression())])
p.fit(self.data, self.target)
assert 0 <= p.score(self.data, self.target) <= 1
# FS - FS
p = Pipeline([('FS1',
MOS(model=SGDClassifier(),
weight_func=weight_func,
loss='log')),
('FS2',
MOS(model=SGDClassifier(),
weight_func=weight_func,
loss='hinge'))])
p.fit(self.data, self.target)
res = p.transform(self.data)
assert self.data.shape[0] == res.shape[0]
# FS - FS - estim
p = Pipeline([('FS1',
MOS(model=SGDClassifier(), weight_func=weight_func,
loss='log')), ('FS2', MOS(
model=SGDClassifier(), weight_func=weight_func, loss='hinge')),
('E1', LogisticRegression())])
p.fit(self.data, self.target)
assert 0 <= p.score(self.data, self.target) <= 1
def test_est(self):
moss = MOS(
model=SGDClassifier(),
weight_func=weight_func,
sampling=True)
mosns = MOS(
model=SGDClassifier(),
weight_func=weight_func,
sampling=False)
# for some reason using local weight_func or lambda here causes it to fail with pickle errors
# so we're using an imported weight_func
check_estimator(moss)
check_estimator(mosns)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
6626201 | <filename>tests/integration/test_dynamodb.py
# -*- coding: utf-8 -*-
import unittest
import json
from localstack.services.dynamodbstreams.dynamodbstreams_api import get_kinesis_stream_name
from localstack.utils import testutil
from localstack.utils.aws import aws_stack
from localstack.utils.aws.aws_models import KinesisStream
from localstack.utils.aws.aws_stack import get_environment
from localstack.utils.common import json_safe, short_uid
PARTITION_KEY = 'id'
TEST_DDB_TABLE_NAME = 'test-ddb-table-1'
TEST_DDB_TABLE_NAME_2 = 'test-ddb-table-2'
TEST_DDB_TABLE_NAME_3 = 'test-ddb-table-3'
TEST_DDB_TABLE_NAME_4 = 'test-ddb-table-4'
TEST_DDB_TAGS = [
{
'Key': 'Name',
'Value': 'test-table'
},
{
'Key': 'TestKey',
'Value': 'true'
}
]
class DynamoDBIntegrationTest (unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dynamodb = aws_stack.connect_to_resource('dynamodb')
def test_non_ascii_chars(self):
aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME, partition_key=PARTITION_KEY)
table = self.dynamodb.Table(TEST_DDB_TABLE_NAME)
# write some items containing non-ASCII characters
items = {
'id1': {PARTITION_KEY: 'id1', 'data': 'foobar123 ✓'},
'id2': {PARTITION_KEY: 'id2', 'data': 'foobar123 £'},
'id3': {PARTITION_KEY: 'id3', 'data': 'foobar123 ¢'}
}
for k, item in items.items():
table.put_item(Item=item)
for item_id in items.keys():
item = table.get_item(Key={PARTITION_KEY: item_id})['Item']
# need to fix up the JSON and convert str to unicode for Python 2
item1 = json_safe(item)
item2 = json_safe(items[item_id])
self.assertEqual(item1, item2)
# clean up
delete_table(TEST_DDB_TABLE_NAME)
def test_large_data_download(self):
aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME_2, partition_key=PARTITION_KEY)
table = self.dynamodb.Table(TEST_DDB_TABLE_NAME_2)
# Create a large amount of items
num_items = 20
for i in range(0, num_items):
item = {PARTITION_KEY: 'id%s' % i, 'data1': 'foobar123 ' * 1000}
table.put_item(Item=item)
# Retrieve the items. The data will be transmitted to the client with chunked transfer encoding
result = table.scan(TableName=TEST_DDB_TABLE_NAME_2)
self.assertEqual(len(result['Items']), num_items)
# clean up
delete_table(TEST_DDB_TABLE_NAME_2)
def test_time_to_live(self):
aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME_3, partition_key=PARTITION_KEY)
table = self.dynamodb.Table(TEST_DDB_TABLE_NAME_3)
# Insert some items to the table
items = {
'id1': {PARTITION_KEY: 'id1', 'data': 'IT IS'},
'id2': {PARTITION_KEY: 'id2', 'data': 'TIME'},
'id3': {PARTITION_KEY: 'id3', 'data': 'TO LIVE!'}
}
for k, item in items.items():
table.put_item(Item=item)
# Describe TTL when still unset.
response = testutil.send_describe_dynamodb_ttl_request(TEST_DDB_TABLE_NAME_3)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response._content)['TimeToLiveDescription']['TimeToLiveStatus'], 'DISABLED')
# Enable TTL for given table
response = testutil.send_update_dynamodb_ttl_request(TEST_DDB_TABLE_NAME_3, True)
self.assertEqual(response.status_code, 200)
self.assertTrue(json.loads(response._content)['TimeToLiveSpecification']['Enabled'])
# Describe TTL status after being enabled.
response = testutil.send_describe_dynamodb_ttl_request(TEST_DDB_TABLE_NAME_3)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response._content)['TimeToLiveDescription']['TimeToLiveStatus'], 'ENABLED')
# Disable TTL for given table
response = testutil.send_update_dynamodb_ttl_request(TEST_DDB_TABLE_NAME_3, False)
self.assertEqual(response.status_code, 200)
self.assertFalse(json.loads(response._content)['TimeToLiveSpecification']['Enabled'])
# Describe TTL status after being disabled.
response = testutil.send_describe_dynamodb_ttl_request(TEST_DDB_TABLE_NAME_3)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response._content)['TimeToLiveDescription']['TimeToLiveStatus'], 'DISABLED')
# Enable TTL for given table again
response = testutil.send_update_dynamodb_ttl_request(TEST_DDB_TABLE_NAME_3, True)
self.assertEqual(response.status_code, 200)
self.assertTrue(json.loads(response._content)['TimeToLiveSpecification']['Enabled'])
# Describe TTL status after being enabled again.
response = testutil.send_describe_dynamodb_ttl_request(TEST_DDB_TABLE_NAME_3)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response._content)['TimeToLiveDescription']['TimeToLiveStatus'], 'ENABLED')
# clean up
delete_table(TEST_DDB_TABLE_NAME_3)
def test_list_tags_of_resource(self):
table_name = 'ddb-table-%s' % short_uid()
dynamodb = aws_stack.connect_to_service('dynamodb')
rs = dynamodb.create_table(
TableName=table_name,
KeySchema=[{
'AttributeName': 'id', 'KeyType': 'HASH'
}],
AttributeDefinitions=[{
'AttributeName': 'id', 'AttributeType': 'S'
}],
ProvisionedThroughput={
'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5
},
Tags=TEST_DDB_TAGS
)
table_arn = rs['TableDescription']['TableArn']
rs = dynamodb.list_tags_of_resource(
ResourceArn=table_arn
)
self.assertEqual(rs['Tags'], TEST_DDB_TAGS)
dynamodb.tag_resource(
ResourceArn=table_arn,
Tags=[
{
'Key': 'NewKey',
'Value': 'TestValue'
}
]
)
rs = dynamodb.list_tags_of_resource(
ResourceArn=table_arn
)
self.assertEqual(len(rs['Tags']), len(TEST_DDB_TAGS) + 1)
tags = {tag['Key']: tag['Value'] for tag in rs['Tags']}
self.assertIn('NewKey', tags.keys())
self.assertEqual(tags['NewKey'], 'TestValue')
dynamodb.untag_resource(
ResourceArn=table_arn,
TagKeys=[
'Name', 'NewKey'
]
)
rs = dynamodb.list_tags_of_resource(
ResourceArn=table_arn
)
tags = {tag['Key']: tag['Value'] for tag in rs['Tags']}
self.assertNotIn('Name', tags.keys())
self.assertNotIn('NewKey', tags.keys())
delete_table(table_name)
def test_stream_spec_and_region_replacement(self):
aws_stack.create_dynamodb_table(
TEST_DDB_TABLE_NAME_4,
partition_key=PARTITION_KEY,
stream_view_type='NEW_AND_OLD_IMAGES'
)
table = self.dynamodb.Table(TEST_DDB_TABLE_NAME_4)
# assert ARN formats
expected_arn_prefix = 'arn:aws:dynamodb:' + aws_stack.get_local_region()
self.assertTrue(table.table_arn.startswith(expected_arn_prefix))
self.assertTrue(table.latest_stream_arn.startswith(expected_arn_prefix))
# assert shard ID formats
ddbstreams = aws_stack.connect_to_service('dynamodbstreams')
result = ddbstreams.describe_stream(StreamArn=table.latest_stream_arn)['StreamDescription']
self.assertIn('Shards', result)
for shard in result['Shards']:
self.assertRegex(shard['ShardId'], r'^shardId\-[0-9]{20}\-[a-zA-Z0-9]{1,36}$')
# clean up
delete_table(TEST_DDB_TABLE_NAME_4)
def test_multiple_update_expressions(self):
dynamodb = aws_stack.connect_to_service('dynamodb')
aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME, partition_key=PARTITION_KEY)
table = self.dynamodb.Table(TEST_DDB_TABLE_NAME)
item_id = short_uid()
table.put_item(Item={PARTITION_KEY: item_id, 'data': 'foobar123 ✓'})
response = dynamodb.update_item(TableName=TEST_DDB_TABLE_NAME,
Key={PARTITION_KEY: {'S': item_id}},
UpdateExpression='SET attr1 = :v1, attr2 = :v2',
ExpressionAttributeValues={
':v1': {'S': 'value1'},
':v2': {'S': 'value2'}
})
self.assertEqual(response['ResponseMetadata']['HTTPStatusCode'], 200)
item = table.get_item(Key={PARTITION_KEY: item_id})['Item']
self.assertEqual(item['attr1'], 'value1')
self.assertEqual(item['attr2'], 'value2')
def test_return_values_in_put_item(self):
aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME, partition_key=PARTITION_KEY)
table = self.dynamodb.Table(TEST_DDB_TABLE_NAME)
# items which are being used to put in the table
item1 = {PARTITION_KEY: 'id1', 'data': 'foobar'}
item2 = {PARTITION_KEY: 'id2', 'data': 'foobar'}
response = table.put_item(Item=item1, ReturnValues='ALL_OLD')
# there is no data present in the table already so even if return values
# is set to 'ALL_OLD' as there is no data it will not return any data.
self.assertFalse(response.get('Attributes'))
# now the same data is present so when we pass return values as 'ALL_OLD'
# it should give us attributes
response = table.put_item(Item=item1, ReturnValues='ALL_OLD')
self.assertTrue(response.get('Attributes'))
self.assertEqual(response.get('Attributes').get('id'), item1.get('id'))
self.assertEqual(response.get('Attributes').get('data'), item1.get('data'))
response = table.put_item(Item=item2)
# we do not have any same item as item2 already so when we add this by default
# return values is set to None so no Attribute values should be returned
self.assertFalse(response.get('Attributes'))
response = table.put_item(Item=item2)
# in this case we already have item2 in the table so on this request
# it should not return any data as return values is set to None so no
# Attribute values should be returned
self.assertFalse(response.get('Attributes'))
def test_empty_and_binary_values(self):
aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME, partition_key=PARTITION_KEY)
table = self.dynamodb.Table(TEST_DDB_TABLE_NAME)
# items which are being used to put in the table
item1 = {PARTITION_KEY: 'id1', 'data': ''}
item2 = {PARTITION_KEY: 'id2', 'data': b'foobar'}
response = table.put_item(Item=item1)
self.assertEqual(response['ResponseMetadata']['HTTPStatusCode'], 200)
response = table.put_item(Item=item2)
self.assertEqual(response['ResponseMetadata']['HTTPStatusCode'], 200)
def test_dynamodb_stream_shard_iterator(self):
def wait_for_stream_created(table_name):
stream_name = get_kinesis_stream_name(table_name)
stream = KinesisStream(id=stream_name, num_shards=1)
kinesis = aws_stack.connect_to_service('kinesis', env=get_environment(None))
stream.connect(kinesis)
stream.wait_for()
dynamodb = aws_stack.connect_to_service('dynamodb')
ddbstreams = aws_stack.connect_to_service('dynamodbstreams')
table_name = 'table_with_stream'
table = dynamodb.create_table(
TableName=table_name,
KeySchema=[{'AttributeName': 'id', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'id', 'AttributeType': 'S'}],
StreamSpecification={
'StreamEnabled': True,
'StreamViewType': 'NEW_IMAGE',
},
ProvisionedThroughput={
'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5
},
)
wait_for_stream_created(table_name)
stream_arn = table['TableDescription']['LatestStreamArn']
result = ddbstreams.describe_stream(StreamArn=stream_arn)
response = ddbstreams.get_shard_iterator(StreamArn=stream_arn,
ShardId=result['StreamDescription']['Shards'][0]['ShardId'],
ShardIteratorType='LATEST'
)
self.assertIn('ShardIterator', response)
def test_global_tables(self):
aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME, partition_key=PARTITION_KEY)
dynamodb = aws_stack.connect_to_service('dynamodb')
# create global table
regions = [{'RegionName': 'us-east-1'}, {'RegionName': 'us-west-1'}, {'RegionName': 'eu-central-1'}]
response = dynamodb.create_global_table(GlobalTableName=TEST_DDB_TABLE_NAME,
ReplicationGroup=regions)['GlobalTableDescription']
self.assertIn('ReplicationGroup', response)
self.assertEqual(len(regions), len(response['ReplicationGroup']))
# describe global table
response = dynamodb.describe_global_table(GlobalTableName=TEST_DDB_TABLE_NAME)['GlobalTableDescription']
self.assertIn('ReplicationGroup', response)
self.assertEqual(len(regions), len(response['ReplicationGroup']))
# update global table
updates = [
{'Create': {'RegionName': 'us-east-2'}},
{'Create': {'RegionName': 'us-west-2'}},
{'Delete': {'RegionName': 'us-west-1'}}
]
response = dynamodb.update_global_table(GlobalTableName=TEST_DDB_TABLE_NAME,
ReplicaUpdates=updates)['GlobalTableDescription']
self.assertIn('ReplicationGroup', response)
self.assertEqual(len(regions) + 1, len(response['ReplicationGroup']))
# assert exceptions for invalid requests
with self.assertRaises(Exception) as ctx:
dynamodb.create_global_table(GlobalTableName=TEST_DDB_TABLE_NAME, ReplicationGroup=regions)
self.assertIn('GlobalTableAlreadyExistsException', str(ctx.exception))
with self.assertRaises(Exception) as ctx:
dynamodb.describe_global_table(GlobalTableName='invalid-table-name')
self.assertIn('GlobalTableNotFoundException', str(ctx.exception))
def test_create_duplicate_table(self):
table_name = 'duplicateTable'
dynamodb = aws_stack.connect_to_service('dynamodb')
dynamodb.create_table(
TableName=table_name,
KeySchema=[{
'AttributeName': 'id', 'KeyType': 'HASH'
}],
AttributeDefinitions=[{
'AttributeName': 'id', 'AttributeType': 'S'
}],
ProvisionedThroughput={
'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5
},
Tags=TEST_DDB_TAGS
)
with self.assertRaises(Exception) as ctx:
dynamodb.create_table(
TableName=table_name,
KeySchema=[{
'AttributeName': 'id', 'KeyType': 'HASH'
}],
AttributeDefinitions=[{
'AttributeName': 'id', 'AttributeType': 'S'
}],
ProvisionedThroughput={
'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5
},
Tags=TEST_DDB_TAGS
)
self.assertIn('ResourceNotFoundException', str(ctx.exception))
def delete_table(name):
dynamodb_client = aws_stack.connect_to_service('dynamodb')
dynamodb_client.delete_table(TableName=name)
| StarcoderdataPython |
50 | <filename>paccmann_chemistry/utils/hyperparams.py
"""Model Parameters Module."""
import torch.optim as optim
from .search import SamplingSearch, GreedySearch, BeamSearch
SEARCH_FACTORY = {
'sampling': SamplingSearch,
'greedy': GreedySearch,
'beam': BeamSearch,
}
OPTIMIZER_FACTORY = {
'adadelta': optim.Adadelta,
'adagrad': optim.Adagrad,
'adam': optim.Adam,
'adamax': optim.Adamax,
'rmsprop': optim.RMSprop,
'sgd': optim.SGD
}
| StarcoderdataPython |
1728851 | <reponame>sunsyw/utils
# a = [x for x in range(10)]
# print(a)
#
# b = (x for x in range(10))
# print(b)
# for b1 in b:
# print(b1)
def generator():
a, b = 0, 1
for i in range(10):
yield b
a, b = b, a+b
if __name__ == '__main__':
print(generator())
a = generator()
print(next(a))
# for i in a:
# print(i) | StarcoderdataPython |
1658159 | <gh_stars>1-10
from dateutil.parser import parse as timeparser
import yippi
class Set(object):
def __init__(self, object):
self.object = object.find("post-set")
self._description = None
self._id = None
self._name = None
self._post_count = None
self._public = None
self._shortname = None
self._updated_at = None
self._user_id = None
self._posts = []
@property
def description(self):
if self.object.description:
self._description = self.object.description.text
return self._description
@property
def id(self):
if self.object.id:
self._id = int(self.object.id.text)
return self._id
@property
def name(self):
if self.object.name:
self._name = self.object.find("name").text
return self._name
@property
def post_count(self):
if self.object.post_count:
self._post_count = int(self.object.find("post-count").text)
return self._post_count
@property
def public(self):
if self.object.public:
self._public = bool(self.object.public.text)
return self._public
@property
def shortname(self):
if self.object.shortname:
self._shortname = self.object.shortname.text
return self._shortname
@property
def updated_at(self):
if self.object.updated_at:
self._updated_at = timeparser(self.object.find("updated_at").text)
return self._updated_at
@property
def user_id(self):
if self.object.user_id:
self._user_id = int(self.object.find("user-id").text)
return self._user_id
@property
def posts(self):
if self.object.findAll("post"):
for post in self.object.findAll("post"):
self._posts.append(Submission(post))
return self._posts
class Submission(object):
def __init__(self, object):
self.object = object
self._id = None
self._tags = None
self._locked_tags = None
self._description = None
self._created_at = None
self._creator_id = None
self._author = None
self._change = None
self._source = None
self._sources = None
self._score = None
self._fav_count = None
self._md5 = None
self._file_size = None
self._file_url = None
self._file_ext = None
self._rating = None
self._status = None
self._width = None
self._height = None
self._has_comments = None
self._has_notes = None
self._has_children = None
self._children = None
self._parent = None
self._artist = None
@property
def description(self):
if self.object.description:
self._description = self.object.description.text
return self._description
@property
def id(self):
if self.object.id:
self._id = int(self.object.id.text)
return self._id
@property
def tags(self):
if self.object.tags:
self._tags = self.object.tags.text.split(' ')
return self._tags
@property
def locked_tags(self):
if self.object.locked_tags:
self._locked_tags = bool(self.object.locked_tags.attrs['nil'])
return self._locked_tags
@property
def created_at(self):
if self.object.created_at:
self._created_at = timeparser(self.object.created_at.text)
return self._created_at
@property
def creator_id(self):
if self.object.creator_id:
self._creator_id = int(self.object.creator_id.text)
return self._creator_id
@property
def author(self):
if self.object.author:
self._author = self.object.author.text
return self._author
@property
def change(self):
if self.object.change:
self._change = int(self.object.change.text)
return self._change
@property
def source(self):
if self.object.source:
self._source = self.object.source.text
return self._source
@property
def score(self):
if self.object.score:
self._score = int(self.object.score.text)
return self._score
@property
def fav_count(self):
if self.object.fav_count:
self._fav_count = int(self.object.fav_count.text)
return self._fav_count
@property
def md5(self):
if self.object.md5:
self._md5 = self.object.md5.text
return self._md5
@property
def file_size(self):
if self.object.file_size:
self._file_size = int(self.object.file_size.text)
return self._file_size
@property
def file_url(self):
if self.object.file_url:
self._file_url = self.object.file_url.text
return self._file_url
@property
def file_ext(self):
if self.object.file_ext:
self._file_ext = self.object.file_ext.text
return self._file_ext
@property
def preview_url(self):
if self.object.preview_url:
self._preview_url = self.object.preview_url.text
return self._preview_url
@property
def preview_width(self):
if self.object.preview_width:
self._preview_width = int(self.object.preview_width.text)
return self._preview_width
@property
def preview_height(self):
if self.object.preview_height:
self._preview_height = int(self.object.preview_height.text)
return self._preview_height
@property
def sample_url(self):
if self.object.sample_url:
self._sample_url = self.object.sample_url.text
return self._sample_url
@property
def sample_width(self):
if self.object.sample_width:
self._sample_width = int(self.object.sample_width.text)
return self._sample_width
@property
def sample_height(self):
if self.object.sample_height:
self._sample_height = int(self.object.sample_height.text)
return self._sample_height
@property
def rating(self):
if self.object.rating:
self._rating = self.object.rating.text
return self._rating
@property
def status(self):
if self.object.status:
self._status = self.object.status.text
return self._status
@property
def width(self):
if self.object.width:
self._width = int(self.object.width.text)
return self._width
@property
def height(self):
if self.object.height:
self._height = int(self.object.height.text)
return self._height
@property
def has_comments(self):
if self.object.has_comments:
self._has_comments = bool(self.object.has_comments.text)
return self._has_comments
@property
def has_notes(self):
if self.object.has_notes:
self._has_notes = bool(self.object.has_notes.text)
return self._has_notes
@property
def has_children(self):
if self.object.has_children:
self._has_children = bool(self.object.has_children.text)
return self._has_children
@property
def children(self):
if self.object.children.text:
self._children = self.object.children.text
splitted = self._children.split(',')
self._children = []
for child in splitted:
self._children.append(yippi.post(child))
return self._children
@property
def parent_id(self):
if self.object.parent_id:
self._parent_id = yippi.post(self.object.parent_id.text)
return self._parent_id
@property
def artist(self):
if self.object.artist:
self._artist = self.object.artist.artist.text
return self._artist
@property
def sources(self):
if self.object.sources:
for source in self.object.sources.findAll("source"):
self._sources.append(source.text)
return self._sources
| StarcoderdataPython |
8158164 | """
The sampler module uses the distance data calculated in "calc_class_distance.py" to sample synthetic data for classes
1 and 2 and samples the under represented classes by copying them in order to have a even class distribution.
"""
import configparser
from dlpipe.data_reader.mongodb import MongoDBConnect
from dlpipe.utils import DLPipeLogger
from accident_predictor.data.upload.data_encoder import sin_cos_representation
from accident_predictor.data.upload.calc_class_distances import upload_distances
from keras.utils.np_utils import to_categorical
import numpy as np
import copy
def generate_synth_data(col, ids, insert=True):
"""
create synthetic data from distance calculation of entries to other classes and save to database
:param col: collection to save synthetic data to
:param ids: list of ids for the records that should be sampledn
:param insert: bool to actually insert all entry (for debugging)
:return: mongodb ids that where inserted as synthetic data
"""
# TODO: potentially change light condition depending on what time (e.g. shouldnt be dark at 13:00)
# TODO: same for date, chance of snow and ice in summer is rather low...
cursor = col.find({"_id": {"$in": ids}})
inserted_ids = []
for row in cursor:
# change age
org_age = copy.deepcopy(row["age"])
for age_idx in range(0, 18):
age_min = 5 * age_idx
age_max = 5 * (age_idx + 1)
new_age = int(np.random.uniform(age_min, age_max, 1)[0])
row["age"] = int(new_age)
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["age"] = org_age
# change time
org_time = copy.deepcopy(row["time"])
for time_idx in range(0, 24):
time_min = 60 * time_idx
time_max = 60 * (time_idx + 1)
new_time = int(np.random.uniform(time_min, time_max, 1)[0])
sin_time, cos_time = sin_cos_representation(new_time, 1440)
row["time"]["value"] = new_time
row["time"]["sin"] = sin_time
row["time"]["cos"] = cos_time
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["time"] = org_time
# change date
org_date = copy.deepcopy(row["date"])
for date_idx in range(0, 18):
date_min = 20 * date_idx
date_max = 20 * (date_idx + 1)
new_date = int(np.random.uniform(date_min, date_max, 1)[0])
sin_date, cos_date = sin_cos_representation(new_date, 361)
row["date"]["value"] = new_date
row["date"]["sin"] = sin_date
row["date"]["cos"] = cos_date
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["date"] = org_date
# change class
org_class = copy.deepcopy(row["class"])
for new_index in range(0, len(org_class["encoded"])):
row["class"] = {
"value": "generated",
"encoded": to_categorical(new_index, num_classes=len(org_class["encoded"])).astype(int).tolist()
}
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["class"] = org_class
# change weather
org_class = copy.deepcopy(row["weather"])
for new_index in range(0, len(org_class["encoded"])):
row["weather"] = {
"value": "generated",
"encoded": to_categorical(new_index, num_classes=len(org_class["encoded"])).astype(int).tolist()
}
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["weather"] = org_class
# change gender
org_class = copy.deepcopy(row["gender"])
for new_index in range(0, len(org_class["encoded"])):
row["gender"] = {
"value": "generated",
"encoded": to_categorical(new_index, num_classes=len(org_class["encoded"])).astype(int).tolist()
}
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["gender"] = org_class
# change vehicle type
org_class = copy.deepcopy(row["vehicle_type"])
for new_index in range(0, len(org_class["encoded"]) - 1):
row["vehicle_type"] = {
"value": "generated",
"encoded": to_categorical(new_index, num_classes=len(org_class["encoded"])).astype(int).tolist()
}
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["vehicle_type"] = org_class
# change road_type
org_class = copy.deepcopy(row["road_type"])
for new_index in range(0, len(org_class["encoded"]) - 1):
row["road_type"] = {
"value": "generated",
"encoded": to_categorical(new_index, num_classes=len(org_class["encoded"])).astype(int).tolist()
}
if insert:
del row["_id"]
inserted_ids.append(col.insert_one(row))
row["road_type"] = org_class
return inserted_ids
def up_sample(col, cursor, nr_create):
"""
Sample a set amount of data by copying the existing data and saving it to mongodb
:param col: mongodb collection where the new documents should be saved to
:param cursor: pymongo cursor with the data that is getting sampled
:param nr_create: how many additional documents should be created
"""
if nr_create < 0:
raise ValueError("Can not create negative amount of entries")
counter = 0
while counter < nr_create:
for row in cursor:
del row["_id"]
col.insert_one(row)
counter += 1
if counter >= nr_create:
break
cursor.rewind()
if __name__ == "__main__":
DLPipeLogger.remove_file_logger()
cp = configparser.ConfigParser()
if len(cp.read('./../../connections.ini')) == 0:
raise ValueError("Config File could not be loaded, please check the correct path!")
MongoDBConnect.add_connections_from_config(cp)
col_train = MongoDBConnect.get_collection("localhost_mongo_db", "accident", "train")
col_distance = MongoDBConnect.get_collection("localhost_mongo_db", "accident", "k_distance")
# find class distances
upload_distances()
# get averaged class distances for class 1 and 2
raw_distance_data_avg_class_1 = col_distance.find({"class": 1, "compared_to": {"$all": [0, 2]}})
raw_distance_data_avg_class_2 = col_distance.find({"class": 2, "compared_to": {"$all": [0, 1]}})
if raw_distance_data_avg_class_1.count() == 0 or raw_distance_data_avg_class_2.count() == 0:
raise ValueError("No distance data found, need to execute 'calc_class_distance.py' first")
# generate synthetic data from class distances
inserted_ids_1 = generate_synth_data(col_train, raw_distance_data_avg_class_1[0]["ids"][0:70], True)
inserted_ids_2 = generate_synth_data(col_train, raw_distance_data_avg_class_2[0]["ids"][0:20], True)
raw_data_train_0 = col_train.find({"accident_severity": 0})
raw_data_train_1 = col_train.find({"accident_severity": 1})
raw_data_train_2 = col_train.find({"accident_severity": 2})
print("Class distribution after synthetic data generation:")
print("Class 0: " + str(raw_data_train_0.count()))
print("Class 1: " + str(raw_data_train_1.count()))
print("Class 2: " + str(raw_data_train_2.count()))
# evenly sample data by copying existing data
max_count = raw_data_train_0.count()
up_sample(col_train, raw_data_train_1, (max_count - raw_data_train_1.count()))
up_sample(col_train, raw_data_train_2, (max_count - raw_data_train_2.count()))
| StarcoderdataPython |
111857 | import numpy as np
from sbrfuzzy import *
entrada = open("dados.txt","a")
v = np.arange(0,300.5,0.5)
v1 = variavellinguistica("População",np.arange(0,300.5,0.5))
v1.adicionar("muito-baixa","trapezoidal",[0,0,25,45])
v1.adicionar("baixa","triangular",[30,50,70])
v1.adicionar("media","triangular",[55,75,110])
v1.adicionar("media-alta","triangular",[110,165,185])
v1.adicionar("alta","triangular",[160,190,210])
v1.adicionar("muito-alta","trapezoidal",[200,210,300,300])
v2 = variavellinguistica("Variação",np.arange(-4,10.5,0.5))
v2.adicionar("baixa-negativa","triangular",[-3,0,0])
v2.adicionar("baixa-positiva","triangular",[0,0,3.4])
v2.adicionar("media","triangular",[2,5,8])
v2.adicionar("alta","trapezoidal",[6,9,12,12])
h = 0.5
x = [2,4,8,16]
br = ["muito-baixa então media",
"baixa então alta",
"media então alta",
"media-alta então media",
"alta então baixa-positiva",
"muito-alta então baixa-negativa"]
aux=[]
retornos=[]
it = [i for i in range(250)]
for i in x:
x0 = i
for i in it:
f = controlador(br,[v1,v2],[x0])
aux.append( x0 )
x0 = x0 + h*f.mamdani()
retornos.append( aux )
aux=[]
for i in range( len(retornos[0]) ):entrada.write(str(it[i]) + "\t" + str(retornos[0][i]) + "\t"
+ str(retornos[1][i]) + "\t" + str(retornos[2][i])
+ "\t" + str(retornos[3][i]) + "\n")
entrada.close()
| StarcoderdataPython |
9681894 | """
"""
# Built-in
# Libs
from tqdm import tqdm
# Pytorch
import torch
from torch import nn
from torch.nn import functional as F
# Own modules
from mrs_utils import misc_utils
from network.backbones import encoders
from network import base_model, emau, ocr
class PSPDecoder(nn.Module):
"""
This module defines the decoder of the PSPNet
The main body of the code comes from https://github.com/Lextal/pspnet-pytorch/blob/master/pspnet.py
"""
def __init__(self, n_class, in_chan, out_chan=1024, bin_sizes=(1, 2, 3, 6),
drop_rate=0.3):
super(PSPDecoder, self).__init__()
self.stages = nn.ModuleList([self.make_stage(in_chan, size) for size in bin_sizes])
self.bottleneck = nn.Conv2d(in_chan * (len(bin_sizes) + 1), out_chan, kernel_size=1)
self.relu = nn.ReLU(inplace=True)
self.drop_1 = nn.Dropout2d(p=drop_rate)
self.up_1 = PSPUpsample(out_chan, out_chan//4)
self.up_2 = PSPUpsample(out_chan//4, out_chan//16)
self.up_3 = PSPUpsample(out_chan//16, out_chan//16)
self.drop_2 = nn.Dropout2d(p=drop_rate/2)
self.final = nn.Conv2d(out_chan//16, n_class, kernel_size=1)
@staticmethod
def make_stage(features, size):
prior = nn.AdaptiveAvgPool2d(output_size=(size, size))
conv = nn.Conv2d(features, features, kernel_size=1, bias=False)
return nn.Sequential(prior, conv)
def forward(self, x):
h, w = x.size(2), x.size(3)
priors = [F.interpolate(input=stage(x), size=(h, w), mode='bilinear') for stage in self.stages] + [x]
bottle = self.bottleneck(torch.cat(tuple(priors), 1))
bottle = self.drop_1(bottle)
up = self.up_1(bottle)
up = self.drop_2(up)
up = self.up_2(up)
up = self.drop_2(up)
up = self.up_3(up)
up = self.drop_2(up)
return self.final(up)
class PSPUpsample(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, 3, padding=1),
nn.BatchNorm2d(out_channels),
nn.PReLU()
)
def forward(self, x):
h, w = 2 * x.size(2), 2 * x.size(3)
p = F.interpolate(input=x, size=(h, w), mode='bilinear')
return self.conv(p)
class PSPNet(base_model.Base):
"""
This module is the original Unet defined in paper
"""
def __init__(self, n_class, out_chan=1024, bin_sizes=(1, 2, 3, 6), drop_rate=0.3,
encoder_name='vgg16', pretrained=True, aux_loss=False, use_emau=False, use_ocr=False):
"""
Initialize the Unet model
:param n_class: the number of class
:param encoder_name: name of the encoder, could be 'base', 'vgg16'
:param pretrained: if True, load the weights from pretrained model
:param aux_loss: if True, will create a classification branch for extracted features
:param use_emau: if True or int, the an EMAU will be appended at the end of the encoder
"""
super(PSPNet, self).__init__()
self.n_class = n_class
self.aux_loss = aux_loss
self.use_emau = use_emau
self.use_ocr = use_ocr
self.encoder_name = misc_utils.stem_string(encoder_name)
strides = (2, 2, 2, 1, 1)
self.encoder = encoders.models(self.encoder_name, pretrained, strides, False)
if self.use_emau:
if isinstance(self.use_emau, int):
c = self.use_emau
else:
c = 64
self.encoder.emau = emau.EMAU(self.encoder.chans[0], c)
if self.use_ocr:
self.encoder.ocr = ocr.OCRModule(self.n_class, self.encoder.chans[0], self.encoder.chans[0], self.encoder.chans[0])
if self.aux_loss:
self.cls = nn.Sequential(
nn.Linear(self.encoder.chans[0], 256),
nn.ReLU(inplace=True),
nn.Linear(256, self.n_class)
)
else:
self.cls = None
self.decoder = PSPDecoder(n_class, self.encoder.chans[0], out_chan, bin_sizes, drop_rate)
def forward(self, x):
input_size = x.shape[2:]
output_dict = dict()
ftr = self.encoder(x)
if self.use_emau:
ftr, output_dict['mu'] = self.encoder.emau(ftr)
if self.use_ocr:
region, ftr = self.encoder.ocr(ftr, ftr)
output_dict['region'] = F.interpolate(region, size=input_size, mode='bilinear', align_corners=False)
if self.aux_loss:
output_dict['aux'] = self.cls(F.adaptive_max_pool2d(input=ftr, output_size=(1, 1)).view(-1, ftr.size(1)))
pred = self.decoder(ftr)
output_dict['pred'] = pred
return output_dict
if __name__ == '__main__':
model = PSPNet(2, encoder_name='resnet50', use_ocr=True)
x = torch.randn((5, 3, 512, 512))
output_dict = model(x)
y, region = output_dict['pred'], output_dict['region']
print(y.shape, region.shape)
| StarcoderdataPython |
3475320 | <gh_stars>0
# Generated by Django 3.0.5 on 2020-05-11 10:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cookbook', '0043_auto_20200507_2302'),
]
operations = [
migrations.CreateModel(
name='ViewLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('recipe', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookbook.Recipe')),
],
),
]
| StarcoderdataPython |
205416 | """
Given a m * n matrix of ones and zeros, return how many square submatrices have all ones.
Example 1:
Input: matrix =
[
[0,1,1,1],
[1,1,1,1],
[0,1,1,1]
]
Output: 15
Explanation:
There are 10 squares of side 1.
There are 4 squares of side 2.
There is 1 square of side 3.
Total number of squares = 10 + 4 + 1 = 15.
Example 2:
Input: matrix =
[
[1,0,1],
[1,1,0],
[1,1,0]
]
Output: 7
Explanation:
There are 6 squares of side 1.
There is 1 square of side 2.
Total number of squares = 6 + 1 = 7.
Constraints:
1 <= arr.length <= 300
1 <= arr[0].length <= 300
0 <= arr[i][j] <= 1
"""
class Solution:
def countSquares(self, matrix: List[List[int]]) -> int:
if matrix is None or len(matrix) == 0:
return 0
rows = len(matrix)
cols = len(matrix[0])
result = 0
for r in range(rows):
for c in range(cols):
if matrix[r][c] == 1:
if r == 0 or c == 0: # Cases with first row or first col
result += 1 # The 1 cells are square on its own
else: # Other cells
cell_val = min(matrix[r-1][c-1], matrix[r][c-1], matrix[r-1][c]) + matrix[r][c]
result += cell_val
matrix[r][c] = cell_val #**memoize the updated result**
return result
| StarcoderdataPython |
6518475 | from typing import List, Optional
from eth_utils import to_checksum_address
from ledgereth.comms import (
Dongle,
decode_response_address,
dongle_send_data,
init_dongle,
)
from ledgereth.constants import (
DEFAULT_ACCOUNTS_FETCH,
LEGACY_ACCOUNTS,
MAX_ACCOUNTS_FETCH,
)
from ledgereth.objects import LedgerAccount
from ledgereth.utils import parse_bip32_path
def get_account_by_path(
path_string: str, dongle: Optional[Dongle] = None
) -> LedgerAccount:
"""Return an account for a specific `BIP-44`_ derivation path
:param path_string: (:code:`str`) - HID derivation path for the account to
sign with.
:param dongle: (:class:`ledgerblue.Dongle.Dongle`) - The Dongle instance to
use to communicate with the Ledger device
:return: :class:`ledgereth.objects.LedgerAccount` instance for the given
account
.. _`BIP-44`: https://en.bitcoin.it/wiki/BIP_0044
"""
dongle = init_dongle(dongle)
path = parse_bip32_path(path_string)
lc = len(path).to_bytes(1, "big")
data = (len(path) // 4).to_bytes(1, "big") + path
response = dongle_send_data(dongle, "GET_ADDRESS_NO_CONFIRM", data, Lc=lc)
return LedgerAccount(path_string, decode_response_address(response))
def get_accounts(
dongle: Optional[Dongle] = None, count: int = DEFAULT_ACCOUNTS_FETCH
) -> List[LedgerAccount]:
"""Return available accounts
:param dongle: (:class:`ledgerblue.Dongle.Dongle`) - The Dongle instance to
use to communicate with the Ledger device
:param count: (:code:`int`) - Amount of accounts to return
:return: list of :class:`ledgereth.objects.LedgerAccount` instances found on
the ledger
"""
accounts = []
dongle = init_dongle(dongle)
for i in range(count):
if LEGACY_ACCOUNTS:
path_string = f"44'/60'/0'/{i}"
else:
path_string = f"44'/60'/{i}'/0/0"
account = get_account_by_path(path_string, dongle)
accounts.append(account)
return accounts
def find_account(
address: str, dongle: Optional[Dongle] = None, count: int = MAX_ACCOUNTS_FETCH
) -> Optional[LedgerAccount]:
"""Find an account by address
:param address: (:class:`str`) - An address to look up
:param dongle: (:class:`ledgerblue.Dongle.Dongle`) - The Dongle instance to
use to communicate with the Ledger device
:param count: (:code:`int`) - How deep in the derivation sequence to look
:return: :class:`ledgereth.objects.LedgerAccount` instance if found on the
Ledger
"""
address = to_checksum_address(address)
for account in get_accounts(dongle, count):
if account.address == address:
return account
return None
| StarcoderdataPython |
84325 | <gh_stars>0
from apps.Util_apps.LogProject import logging
import apps.Util_apps.Util as Util
def funcion_si_respuesta_es_correcta(response_json):
logging.info(response_json)
def funcion_si_respuesta_no_es_correcta(url):
logging.error("No se ha podido realizar la peticion a la url {}".format(url))
if __name__ == "__main__":
Util.generate_request_get(url="https://randomuser.me/api",
success_callback=funcion_si_respuesta_es_correcta,
error_callback=funcion_si_respuesta_no_es_correcta) | StarcoderdataPython |
12816860 | from routeCalculator import calculateBestRoute
print(calculateBestRoute([(1, 2), (2, 3), (7, 1)]))
| StarcoderdataPython |
244284 |
import unittest
import math
from .. import Point, Triangle, Segment, Circle
from ..exceptions import *
class TriangleTestCase(unittest.TestCase):
def assertAlmostEqual(self, test_value, known_value):
self.assertTrue(
round(
float(test_value),
10) == round(
float(known_value),
10))
def assertAlmostNotEqual(self, test_value, known_value):
self.assertTrue(
round(
float(test_value),
10) != round(
float(known_value),
10))
def assertIsTriangle(self, obj):
self.assertIsInstance(obj, Triangle)
self.assertEqual(len(obj), 3)
self.assertEqual(len(obj.vertices), 3)
self.assertEqual(len(obj.sides), 3)
self.assertEqual(len(set(obj.vertices)), 3)
def assertTriangleVerticesEqual(self, triangle, vertices):
for a, b in zip(triangle.vertices, vertices):
self.assertEqual(a, b)
def testTriangleCreationWithoutArguments(self):
'''
'''
t = Triangle()
self.assertIsTriangle(t)
self.assertTriangleVerticesEqual(t, [[0, 0, 0],
[1, 0, 0],
[0, 1, 0]])
def testTriangleClassmethodUnit(self):
'''
'''
t = Triangle.unit()
self.assertIsTriangle(t)
self.assertTriangleVerticesEqual(t, [[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
def testTriangleCreationWithArguments(self):
'''
'''
i, j, k = Point.units()
t = Triangle(i, j, k)
self.assertTriangleVerticesEqual(t, (i, j, k))
t = Triangle([i, j, k])
self.assertTriangleVerticesEqual(t, (i, j, k))
t = Triangle(A=i, B=j, C=k)
self.assertTriangleVerticesEqual(t, (i, j, k))
t = Triangle(B=j, A=i, C=k)
self.assertTriangleVerticesEqual(t, (i, j, k))
t = Triangle(C=k, B=j, A=i)
self.assertTriangleVerticesEqual(t, (i, j, k))
# t = Triangle(i, j, C=k)
# self.assertTriangleVerticesEqual(t, (i, j, k))
#
# t = Triangle(i, B=j, C=k)
# self.assertTriangleVerticesEqual(t, (i, j, k))
#
# t = Triangle(A=i, B=j, C=k)
# self.assertTriangleVerticesEqual(t, (i, j, k))
#
# t = Triangle(None, A=i, B=j, C=k)
# self.assertTriangleVerticesEqual(t, (i, j, k))
#
# t = Triangle(None, None, A=i, B=j, C=k)
# self.assertTriangleVerticesEqual(t, (i, j, k))
#
# t = Triangle(None, None, None, A=i, B=j, C=k)
# self.assertTriangleVerticesEqual(t, (i, j, k))
#
# t = Triangle([None, None, None], None, A=i, B=j, C=k)
# self.assertTriangleVerticesEqual(t, (i, j, k))
def testTriangleProperty_Getters(self):
i, j, k = Point.units()
t = Triangle(i, j, k)
self.assertEqual(t.A, i)
self.assertEqual(t.B, j)
self.assertEqual(t.C, k)
self.assertSequenceEqual(t.AB, (i, j))
self.assertSequenceEqual(t.BA, (j, i))
self.assertSequenceEqual(t.BC, (j, k))
self.assertSequenceEqual(t.CB, (k, j))
self.assertSequenceEqual(t.AC, (i, k))
self.assertSequenceEqual(t.CA, (k, i))
self.assertSequenceEqual(t.ABC, (i, j, k))
self.assertSequenceEqual(t.vertices, (i, j, k))
def testTrianglePropertySetters(self):
i, j, k = Point.units(5)
t = Triangle()
self.assertNotEqual(t.A, i)
t.A = i
self.assertEqual(t.A, i)
self.assertNotEqual(t.B, j)
t.B = j
self.assertEqual(t.B, j)
self.assertNotEqual(t.C, k)
t.C = k
self.assertEqual(t.C, k)
def testTriangleProperty_MultiSetters(self):
i, j, k = Point.units(5)
t = Triangle()
self.assertNotEqual(t.A, i)
self.assertNotEqual(t.B, j)
t.AB = i, j
self.assertSequenceEqual(t.AB, (i, j))
# with self.assertRaises(ValueError):
# t.AB = i, j, k
t.BA = i, j
self.assertSequenceEqual(t.BA, (i, j))
# with self.assertRaises(ValueError):
# t.BA = i, j, k
t.AC = i, j
self.assertSequenceEqual(t.AC, (i, j))
# with self.assertRaises(ValueError):
# t.AC = i, j, k
t.CA = i, j
self.assertSequenceEqual(t.CA, (i, j))
# with self.assertRaises(ValueError):
# t.CA = i, j, k
t.BC = i, j
self.assertSequenceEqual(t.BC, (i, j))
# with self.assertRaises(ValueError):
# t.BC = i, j, k
t.CB = i, j
self.assertSequenceEqual(t.CB, (i, j))
# with self.assertRaises(ValueError):
# t.CB = i, j, k
def testTriangleProperty_edges(self):
t = Triangle()
sAB = Segment(t.AB)
sBC = Segment(t.BC)
sAC = Segment(t.AC)
self.assertSequenceEqual(t.edges(), [sAB, sBC, sAC])
def testTriangleProperty_Hypotenuse(self):
t = Triangle()
h = max(t.edges(),key=lambda s:s.length)
self.assertEqual(t.hypotenuse.length, h.length)
with self.assertRaises(AttributeError):
t.hypotenuse = Segment([14, 17], [22, 44])
def testTriangleProperty_Circumcenter(self):
t = Triangle()
cc = t.circumcenter
dA = t.A.distance(cc)
dB = t.B.distance(cc)
dC = t.C.distance(cc)
self.assertTrue(dA == dB == dC)
with self.assertRaises(AttributeError):
t.circumcenter = Point()
def testTriangleProperty_Orthocenter(self):
t = Triangle()
with self.assertRaises(NotImplementedError):
oc = t.orthocenter
with self.assertRaises(AttributeError):
t.orthocenter = [0,0]
def testTriangleProperty_Circumradius(self):
t = Triangle()
cc = t.circumcenter
cr = t.circumradius
for p in t.ABC:
self.assertEqual(p.distance(cc), cr)
with self.assertRaises(AttributeError):
t.circumradius = 12
def testTriangleProperty_Circumcircle(self):
t = Triangle()
c = t.circumcircle
# for p in t.ABC:
# self.assertEqual(p.distance(c.center), c.radius)
self.assertTrue(all([p.distance(c.center) == c.radius for p in t.ABC]))
with self.assertRaises(AttributeError):
t.circumcircle = Circle()
def testTriangleProperties_AlphaBetaGamma(self):
t = Triangle()
self.assertEqual(sum([t.alpha, t.beta, t.gamma]), math.pi)
self.assertEqual(sum(t.angles), math.pi)
with self.assertRaises(AttributeError):
t.alpha = 11
with self.assertRaises(AttributeError):
t.beta = 11
with self.assertRaises(AttributeError):
t.gamma = 11
def testTriangleProperties_abc(self):
t = Triangle()
self.assertEqual(t.a, t.hypotenuse.length)
self.assertEqual(t.b, 1)
self.assertEqual(t.c, 1)
with self.assertRaises(AttributeError):
t.a = 11
with self.assertRaises(AttributeError):
t.b = 11
with self.assertRaises(AttributeError):
t.c = 11
def testTriangleProperties_sides(self):
t = Triangle()
self.assertEqual(len(t.sides), 3)
self.assertEqual(t.sides[0], Segment(t.BC).length)
self.assertEqual(t.sides[1], Segment(t.AC).length)
self.assertEqual(t.sides[2], Segment(t.AB).length)
with self.assertRaises(AttributeError):
t.sides = [5, 6, 7]
def testTriangleProperty_isCCW_flip_ccw_area(self):
t = Triangle()
for edge in ['AB', 'BC', 'AC']:
self.assertTrue(t.isCCW)
self.assertTrue(t.ccw > 0)
# t.swap(edge, inplace=True)
# self.assertFalse(t.isCCW)
# self.assertTrue(t.ccw < 0)
# t.swap(edge, inplace=True)
# self.assertTrue(t.isCCW)
self.assertAlmostEqual(t.ccw / 2, t.heronsArea)
self.assertAlmostEqual(t.area, t.heronsArea)
with self.assertRaises(AttributeError):
t.ccw = 55
with self.assertRaises(AttributeError):
t.isCCW = False
def testTriangleProperty_perimeter_semiperimeter(self):
t = Triangle()
p = t.A.distance(t.B) + t.B.distance(t.C) + t.C.distance(t.A)
self.assertEqual(t.perimeter, p)
self.assertEqual(t.semiperimeter, p / 2)
with self.assertRaises(AttributeError):
t.perimeter = 11
with self.assertRaises(AttributeError):
t.semiperimeter = 11
def testTriangleProperty_isEquilateral_isIsoceles_isAcute(self):
with self.assertRaises(NotImplementedError):
t = Triangle.withAngles(alpha=60,beta=60,gamma=60,inDegrees=True)
self.assertTrue(t.isEquilateral)
self.assertTrue(t.isIsosceles)
self.assertTrue(t.isAcute)
self.assertAlmostEqual(t.a, t.b)
self.assertAlmostEqual(t.b, t.c)
self.assertAlmostEqual(t.a, t.c)
self.assertAlmostEqual(t.alpha, t.beta)
self.assertAlmostEqual(t.beta, t.gamma)
self.assertAlmostEqual(t.alpha, t.gamma)
self.assertTrue(all([a < (math.pi/2) for a in t.angles]))
def testTriangleProperty_isIsosceles(self):
with self.assertRaises(NotImplementedError):
t = Triangle.withAngles(alpha=40,beta=40,gamma=100,inDegrees=True)
self.assertTrue(t.isIsosceles)
self.assertFalse(t.isEquilateral)
self.assertTrue(t.isAcute)
def testTriangleProperty_isScalene(self):
with self.assertRaises(NotImplementedError):
t = Triangle.withSides(a=1,b=2,c=3)
self.assertTrue(t.isScalene)
def testTriangleProperty_isRight(self):
with self.assertRaises(NotImplementedError):
t = Triangle.withAngles(alpha=90,beta=45,gamma=45,inDegrees=True)
self.assertTrue(t.isRight)
self.assertTrue(t.circumcenter == t.hypotenuse.midpoint)
def testTriangleProperty_isObtuse(self):
with self.assertRaises(NotImplementedError):
t = Triangle.withAngles(alpha=100,beta=40,gamma=40,inDegrees=True)
self.assertTrue(t.isObtuse)
def testTriangleOperation__eq__(self):
t = Triangle()
u = Triangle()
self.assertFalse(t is u)
self.assertTrue(t == u)
def testTriangleMethod_congruent(self):
t = Triangle()
p = Point(10, 10)
u = Triangle(t.A + p, t.B + p, t.C + p)
self.assertTrue(t.congruent(u))
self.assertTrue(u.congruent(t))
def testTriangleOperation__contains__(self):
t = Triangle()
self.assertTrue(t.circumcenter in t)
self.assertFalse(t.A + Point(-10, -10) in t)
def testTriangleMethod_doesIntersect(self):
t = Triangle()
u = Triangle()
| StarcoderdataPython |
12853786 | import yahoo_fin.stock_info as si
import pandas as pd
import os
def download_data(etfs, time_frames):
# 获取数据并存储
if not os.path.exists('./Data'):
os.makedirs('./Data')
if not os.path.exists('./Data/rawdata'):
os.makedirs('./Data/rawdata')
for ticker in etfs:
for interval in time_frames:
historical_price = pd.DataFrame()
print("This is a ticker ",ticker, interval)
historical_price = si.get_data(ticker, interval=interval)
# delete column 'ticker'
historical_price = historical_price.drop(["ticker"], axis=1)
# use date as index of the dataframe
historical_price.index.name = "date"
if interval == "1d":
interval = "daily"
elif interval == "1wk":
interval = "weekly"
# 删除最后一行或者倒数第二行
if historical_price.isnull().any().sum() > 0:
historical_price.dropna(how='any', inplace=True)
else:
historical_price = historical_price.iloc[:-1]
else:
# 删除最后一行或者倒数第二行
if historical_price.isnull().any().sum() > 0:
historical_price.dropna(how='any', inplace=True)
else:
historical_price = historical_price.iloc[:-1]
interval = "monthly"
# sava files
historical_price.to_csv("./data/rawdata/" + ticker + "_" + interval + ".csv") | StarcoderdataPython |
3477117 | import torch.nn as nn
import torch.nn.functional as F
import torch
##############################
# U-NET
##############################
class UNetDown(nn.Module):
def __init__(self, in_size, out_size, normalize=True, dropout=0.0):
super(UNetDown, self).__init__()
model = [nn.Conv2d(in_size, out_size, 4, stride=2, padding=1, bias=False)]
if normalize:
model.append(nn.BatchNorm2d(out_size, 0.8))
model.append(nn.LeakyReLU(0.2))
if dropout:
model.append(nn.Dropout(dropout))
self.model = nn.Sequential(*model)
def forward(self, x):
return self.model(x)
class UNetUp(nn.Module):
def __init__(self, in_size, out_size, dropout=0.0):
super(UNetUp, self).__init__()
model = [
nn.ConvTranspose2d(in_size, out_size, 4, stride=2, padding=1, bias=False),
nn.BatchNorm2d(out_size, 0.8),
nn.ReLU(inplace=True),
]
if dropout:
model.append(nn.Dropout(dropout))
self.model = nn.Sequential(*model)
def forward(self, x, skip_input):
x = self.model(x)
return torch.cat((x, skip_input), 1)
class Generator(nn.Module):
def __init__(self, input_shape):
super(Generator, self).__init__()
channels, _, _ = input_shape
self.down1 = UNetDown(channels, 64, normalize=False)
self.down2 = UNetDown(64, 128)
self.down3 = UNetDown(128 + channels, 256, dropout=0.5)
self.down4 = UNetDown(256, 512, dropout=0.5)
self.down5 = UNetDown(512, 512, dropout=0.5)
self.down6 = UNetDown(512, 512, dropout=0.5)
self.up1 = UNetUp(512, 512, dropout=0.5)
self.up2 = UNetUp(1024, 512, dropout=0.5)
self.up3 = UNetUp(1024, 256, dropout=0.5)
self.up4 = UNetUp(512, 128)
self.up5 = UNetUp(256 + channels, 64)
final = [nn.Upsample(scale_factor=2), nn.Conv2d(128, channels, 3, 1, 1), nn.Tanh()]
self.final = nn.Sequential(*final)
def forward(self, x, x_lr):
# U-Net generator with skip connections from encoder to decoder
d1 = self.down1(x)
d2 = self.down2(d1)
d2 = torch.cat((d2, x_lr), 1)
d3 = self.down3(d2)
d4 = self.down4(d3)
d5 = self.down5(d4)
d6 = self.down6(d5)
u1 = self.up1(d6, d5)
u2 = self.up2(u1, d4)
u3 = self.up3(u2, d3)
u4 = self.up4(u3, d2)
u5 = self.up5(u4, d1)
return self.final(u5)
class Discriminator(nn.Module):
def __init__(self, input_shape):
super(Discriminator, self).__init__()
channels, height, width = input_shape
# Calculate output of image discriminator (PatchGAN)
patch_h, patch_w = int(height / 2 ** 3), int(width / 2 ** 3)
self.output_shape = (1, patch_h, patch_w)
def discriminator_block(in_filters, out_filters, stride, normalize):
"""Returns layers of each discriminator block"""
layers = [nn.Conv2d(in_filters, out_filters, 3, stride, 1)]
if normalize:
layers.append(nn.InstanceNorm2d(out_filters))
layers.append(nn.LeakyReLU(0.2, inplace=True))
return layers
layers = []
in_filters = channels
for out_filters, stride, normalize in [(64, 2, False), (128, 2, True), (256, 2, True), (512, 1, True)]:
layers.extend(discriminator_block(in_filters, out_filters, stride, normalize))
in_filters = out_filters
layers.append(nn.Conv2d(out_filters, 1, 3, 1, 1))
self.model = nn.Sequential(*layers)
def forward(self, img):
return self.model(img)
| StarcoderdataPython |
4854458 | # -----------------------------------------------------------------------------
# Matplotlib cheat sheet
# Released under the BSD License
# -----------------------------------------------------------------------------
# Scripts to generate all the basic plots
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(0.4,0.4))
mpl.rcParams['axes.linewidth'] = 0.5
mpl.rcParams['xtick.major.size'] = 0.0
mpl.rcParams['ytick.major.size'] = 0.0
d = 0.01
ax = fig.add_axes([d,d,1-2*d,1-2*d])
# Step plot
# -----------------------------------------------------------------------------
X = np.linspace(0, 10, 16)
Y = 4+2*np.sin(2*X)
ax.step(X, Y, color="C1", linewidth=0.75)
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 8), ax.set_yticks(np.arange(1,8))
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-step.pdf")
ax.clear()
# Violin plot
# -----------------------------------------------------------------------------
np.random.seed(10)
D = np.random.normal((3,5,4), (0.75, 1.00, 0.75), (200,3))
VP = ax.violinplot(D, [2,4,6], widths=1.5,
showmeans=False, showmedians=False, showextrema=False)
for body in VP['bodies']:
body.set_facecolor('C1')
body.set_alpha(1)
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 8), ax.set_yticks(np.arange(1,8))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-violin.pdf")
ax.clear()
# Boxplot
# -----------------------------------------------------------------------------
np.random.seed(10)
D = np.random.normal((3,5,4), (1.25, 1.00, 1.25), (100,3))
VP = ax.boxplot(D, positions=[2,4,6], widths=1.5, patch_artist=True,
showmeans=False, showfliers=False,
medianprops = {"color": "white",
"linewidth": 0.25},
boxprops = {"facecolor": "C1",
"edgecolor": "white",
"linewidth": 0.25},
whiskerprops = {"color": "C1",
"linewidth": 0.75},
capprops = {"color": "C1",
"linewidth": 0.75})
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 8), ax.set_yticks(np.arange(1,8))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-boxplot.pdf")
ax.clear()
# Barbs plot
# -----------------------------------------------------------------------------
np.random.seed(1)
X = [[2,4,6]]
Y = [[1.5,3,2]]
U = -np.ones((1,3))*0
V = -np.ones((1,3))*np.linspace(50,100,3)
ax.barbs(X,Y,U,V, barbcolor="C1", flagcolor="C1", length=5, linewidth=0.5)
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 8), ax.set_yticks(np.arange(1,8))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-barbs.pdf")
ax.clear()
# Event plot
# -----------------------------------------------------------------------------
np.random.seed(1)
X = [2,4,6]
D = np.random.gamma(4, size=(3, 50))
ax.eventplot(D, colors="C1", orientation="vertical", lineoffsets=X, linewidth=0.25)
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 8), ax.set_yticks(np.arange(1,8))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-event.pdf")
ax.clear()
# Errorbar plot
# -----------------------------------------------------------------------------
np.random.seed(1)
X = [2,4,6]
Y = [4,5,4]
E = np.random.uniform(0.5, 1.5, 3)
ax.errorbar(X, Y, E, color="C1", linewidth=0.75, capsize=1)
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 8), ax.set_yticks(np.arange(1,8))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-errorbar.pdf")
ax.clear()
# Hexbin plot
# -----------------------------------------------------------------------------
np.random.seed(1)
X = np.random.uniform(1.5,6.5,100)
Y = np.random.uniform(1.5,6.5,100)
C = np.random.uniform(0,1,10000)
ax.hexbin(X, Y, C, gridsize=4, linewidth=0.25, edgecolor="white",
cmap=plt.get_cmap("Wistia"), alpha=1.0)
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 8), ax.set_yticks(np.arange(1,8))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-hexbin.pdf")
ax.clear()
# Hist plot
# -----------------------------------------------------------------------------
np.random.seed(1)
X = 4 + np.random.normal(0,1.5,200)
ax.hist(X, bins=8, facecolor="C1", linewidth=0.25, edgecolor="white",)
ax.set_xlim(0, 8), ax.set_xticks(np.arange(1,8))
ax.set_ylim(0, 80), ax.set_yticks(np.arange(1,80,10))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-hist.pdf")
ax.clear()
# Xcorr plot
# -----------------------------------------------------------------------------
np.random.seed(3)
Y = np.random.uniform(-4, 4, 250)
X = np.random.uniform(-4, 4, 250)
ax.xcorr(X, Y, usevlines=True, maxlags=6, normed=True, lw=1,
color="C1")
ax.set_xlim(-8, 8), ax.set_xticks(np.arange(-8,8,2))
ax.set_ylim(-.25, .25), ax.set_yticks(np.linspace(-.25,.25,9))
ax.set_axisbelow(True)
ax.grid(linewidth=0.125)
plt.savefig("../figures/advanced-xcorr.pdf")
ax.clear()
| StarcoderdataPython |
9670284 | <filename>tricks and tips/shutting down a computer.py
import os
os.system('shutdown -s') | StarcoderdataPython |
11220845 | # required to make Python treat the directories as containing packages; | StarcoderdataPython |
235816 | <reponame>pedrolp85/pydevice
from sqlalchemy import Column, ForeignKey, Integer, String
from .database import Base
class Device(Base):
__tablename__ = "device"
id = Column(Integer, primary_key=True)
model = Column(String)
mgmt_interface_id = Column(Integer, ForeignKey("l3interfaces.id"))
manufacturer_id = Column(Integer, ForeignKey("manufacturer.id"))
class Manufacturer(Base):
__tablename__ = "manufacturer"
id = Column(Integer, primary_key=True)
name = Column(String)
enterprise_name = Column(String)
class L3Interface(Base):
__tablename__ = "l3interface"
id = Column(Integer, primary_key=True)
name = Column(String)
ip_address = Column(String)
| StarcoderdataPython |
1857230 | <reponame>Fenghuapiao/PyLeetcode
class Solution(object):
def findMinStep(self, board, hand):
"""
:type board: str
:type hand: str
:rtype: int
"""
def dfs(line, balls, visited):
line = reduceLine(line)
if (line, balls) in visited:
return visited[line, balls]
if len(line) == 0:
return len(hand) - len(balls)
if len(balls) == 0:
return float("inf")
res = float("inf")
for i in range(len(balls)):
for j in range(len(line) + 1):
if j == 0 and line[0] != balls[i]:
continue
elif j == len(line) and line[-1] != balls[i]:
continue
elif 0 < j < len(line) and balls[i] != line[j - 1] and balls[i] != line[j]:
continue
res = min(res, dfs(line[:j] + balls[i] + line[j:], balls[:i] + balls[i + 1:], visited))
visited[line, balls] = res
return res
def reduceLine(line):
def reducer(line):
if len(line) < 3:
return line
ret = []
dp = [1] * len(line)
pre = line[-1]
count = 1
for i in reversed(range(len(line) - 1)):
if line[i] == pre:
count += 1
else:
pre = line[i]
count = 1
dp[i] = count
i = 0
while i < len(line):
if dp[i] >= 3:
i += dp[i]
else:
ret.extend(line[i:i + dp[i]])
i += dp[i]
return "".join(ret)
if len(line) < 3:
return line
ans = line
for _ in range(len(line) / 3):
ans = reducer(ans)
return ans
visited = {}
ret = dfs(board, "".join(sorted(hand)), visited)
return ret if ret != float("inf") else -1 | StarcoderdataPython |
3479623 | <filename>cpf/python/training/thread_pool.py
import time
import concurrent.futures
def func1():
while True:
print("func1")
time.sleep(1)
def func2():
while True:
print("func2")
time.sleep(1)
if __name__ == "__main__":
executor = concurrent.futures.ThreadPoolExecutor(max_workers=2)
executor.submit(func1)
executor.submit(func2)
| StarcoderdataPython |
3433303 | <filename>appskel/signup/modname/utils.py
# coding: utf-8
#
$license
import OpenSSL
import cyclone.escape
import cyclone.web
import httplib
import re
import uuid
from twisted.internet import defer
from $modname.storage import DatabaseMixin
class TemplateFields(dict):
"""Helper class to make sure our
template doesn't fail due to an invalid key"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
return None
def __setattr__(self, name, value):
self[name] = value
class BaseHandler(cyclone.web.RequestHandler):
_email = re.compile("^[a-zA-Z0-9._%-]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,8}$$")
def valid_email(self, email):
return self._email.match(email)
def set_current_user(self, expires_days=1, **kwargs):
self.set_secure_cookie("user", cyclone.escape.json_encode(kwargs),
expires_days=expires_days)
def get_current_user(self):
user_json = self.get_secure_cookie("user", max_age_days=1)
if user_json:
return cyclone.escape.json_decode(user_json)
def clear_current_user(self):
self.clear_cookie("user")
def get_user_locale(self):
lang = self.get_secure_cookie("lang")
if lang:
return cyclone.locale.get(lang)
# custom http error pages
def write_error(self, status_code, **kwargs):
kwargs["code"] = status_code
kwargs["message"] = httplib.responses[status_code]
try:
self.render("error_%d.html" % status_code, fields=kwargs)
except IOError:
self.render("error_all.html", fields=kwargs)
class SessionMixin(DatabaseMixin):
session_cookie_name = "session"
session_redis_prefix = "$modname:s:"
@property
def session_redis_key(self):
token = self.get_secure_cookie(self.session_cookie_name)
if token:
return "%s%s" % (self.session_redis_prefix, token)
@defer.inlineCallbacks
def session_create(self, expires_days=1, **kwargs):
if not kwargs:
raise ValueError("session_create requires one or more key=val")
token = uuid.UUID(bytes=OpenSSL.rand.bytes(16)).hex
k = "%s%s" % (self.session_redis_prefix, token)
yield self.redis.hmset(k, kwargs)
yield self.redis.expire(k, expires_days * 86400)
self.set_secure_cookie(self.session_cookie_name, token,
expires_days=expires_days)
defer.returnValue(token)
@defer.inlineCallbacks
def session_exists(self):
k = self.session_redis_key
if k:
defer.returnValue((yield self.redis.exists(k)))
@defer.inlineCallbacks
def session_set(self, **kwargs):
if not kwargs:
raise ValueError("session_set requires one or more key=val")
k = self.session_redis_key
if k:
yield self.redis.hmset(k, kwargs)
defer.returnValue(True)
@defer.inlineCallbacks
def session_get(self, *args):
if not args:
raise ValueError("session_get requires one or more key names")
k = self.session_redis_key
if k:
r = yield self.redis.hmget(k, args)
defer.returnValue(r[0] if len(args) == 1 else r)
@defer.inlineCallbacks
def session_getall(self):
k = self.session_redis_key
if k:
defer.returnValue((yield self.redis.hgetall(k)))
@defer.inlineCallbacks
def session_destroy(self):
k = self.session_redis_key
if k:
yield self.redis.delete(k)
defer.returnValue(True)
| StarcoderdataPython |
6623503 | <filename>active_learning_lab/data/embeddings.py
import torch
import numpy as np
from pathlib import Path
def get_embedding_matrix(name, vocab, data_dir='.data/'):
from gensim.models.word2vec import Word2VecKeyedVectors
embedding_dir = Path(data_dir).joinpath('embeddings')
embedding_dir.mkdir(parents=True, exist_ok=True)
serialized_file = embedding_dir.joinpath(name + '.bin')
if not serialized_file.exists():
import gensim.downloader as api
model = api.load('word2vec-google-news-300')
model.save(str(serialized_file.resolve()))
return _build_embedding_matrix_from_keyedvectors(model, vocab)
else:
model = Word2VecKeyedVectors.load(str(serialized_file.resolve()), mmap='r')
return _build_embedding_matrix_from_keyedvectors(model, vocab)
def _build_embedding_matrix_from_keyedvectors(pretrained_vectors, vocab, min_freq=1):
vectors = [
np.zeros(pretrained_vectors.vectors.shape[1]) # <pad>
]
num_special_vectors = len(vectors)
vectors += [
pretrained_vectors.vectors[pretrained_vectors.vocab[vocab.itos[i]].index]
if vocab.itos[i] in pretrained_vectors.vocab
else np.zeros(pretrained_vectors.vectors.shape[1])
for i in range(num_special_vectors, len(vocab))
]
for i in range(num_special_vectors, len(vocab)):
if vocab.itos[i] not in pretrained_vectors.vocab and vocab.freqs[vocab.itos[i]] >= min_freq:
vectors[i] = np.random.uniform(-0.25, 0.25, pretrained_vectors.vectors.shape[1])
return torch.as_tensor(np.stack(vectors))
| StarcoderdataPython |
8035103 | #! /usr/bin/env python
import os.path
from collections import defaultdict
single_test_input = [
"acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab | cdfeb fcadb cdfeb cdbaf"
]
test_input = [
"be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe",
"edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc",
"fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg",
"fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb",
"aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea",
"fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb",
"dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe",
"bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef",
"egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb",
"gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce"
]
digits_to_binary = {
# ABCDEFG
0: 0b1110111,
1: 0b0010010,
2: 0b1011101,
3: 0b1011011,
4: 0b0111010,
5: 0b1101011,
6: 0b1101111,
7: 0b1010010,
8: 0b1111111,
9: 0b1111011
}
digits_to_chrs = {
0: 'abcefg',
1: 'cf',
2: 'acdeg',
3: 'acdfg',
4: 'bcdf',
5: 'abdfg',
6: 'abdefg',
7: 'acf',
8: 'abcdefg',
9: 'abcdfg'
}
digits_to_len = {
0: 6,
1: 2,
2: 5,
3: 5,
4: 4,
5: 5,
6: 6,
7: 3,
8: 7,
9: 6
}
def parse_line(line):
parts = line.split(' | ')
return (parts[0].split(' '), parts[1].split(' '))
def part1(data):
uniques = 0
criteria = [
digits_to_len[1],
digits_to_len[4],
digits_to_len[7],
digits_to_len[8],
]
for line in data:
(_, output) = parse_line(line)
uniques += len(list(filter(lambda x: len(x) in criteria, output)))
return uniques
def part2(data):
# Need to figure out decryption key...
#
# For (input | output):
# acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab | cdfeb fcadb cdfeb cdbaf
#
# Based on unique lengths we know:
# 1, 0b0010010, 'cf' => 'ab'
# 4, 0b0111010, 'bcdf' => 'eafb'
# 7, 0b1011011, 'acf' => 'dab'
# 8, 0b1010010, 'abcdefg' => 'acedgfb'
#
# Need to compute remaining:
# 0, 'abcefg' => 'cagedb'
# 2, 'acdeg' => 'gcdfa'
# 3, 'acdfg' => 'fbcad'
# 5, 'abdfg' => 'cdfbe'
# 6, 'abdefg' => 'cdfgeb'
# 9, 'abcdfg' => 'cefabd'
#
# Candidates based on known:
# a - [d]
# b - [e,f]
# c - [a,b]
# d - [e,f]
# e - [c,g]
# f - [a,b]
# g - [c,g]
#
# Need to keep reducing/guessing further to get key:
# 'abcdefg' -> 'deafgbc'
known_lengths = [
digits_to_len[1],
digits_to_len[4],
digits_to_len[7],
digits_to_len[8],
]
for line in data:
# substitution from original to scrambled
key = {}
# possible candidates that character maps to see example above
candidates = defaultdict(set)
num_to_seq = {}
(input, output) = parse_line(line)
all_seqs = input.copy()
all_seqs.extend(output)
# first figure out initial candidates based on known lengths
# starting from shortest to longest sequence
all_seqs.sort(key=len)
for seq in all_seqs:
if len(seq) in known_lengths:
n = [1, 4, 7, 8][known_lengths.index(len(seq))]
num_to_seq[n] = seq
for c in digits_to_chrs[n]:
if len(candidates[c]) == 0:
#print(f"{n}) adding {seq} as candidate for {c}")
candidates[c] |= set(list(seq))
# now figure out the rest!
# - reduce
# - once we have reduced, can we match any further sequences to a number?
# - guess if can't reduce further?
print("known:", num_to_seq)
print(f"candidates: {candidates}")
while reduce(candidates) != candidates:
candidates = reduce(candidates)
print(f"reduced candidates: {candidates}")
for k, v in candidates.items():
if len(v) == 1:
key[k] = v.pop()
to_solve = set(list(range(0, 9+1))) - set(num_to_seq.keys())
print("To solve:", to_solve)
print(f"known key: {key}")
return None
def reduce(candidates):
# - reduce
# - if candidate set is a superset of smaller sets for other candidates
# then we can remove the smaller set from this candidate set ...
# eg. 'a': {'b', 'a', 'd'}
# 'c': {'b', 'a'}
# 'f': {'b', 'a'}
# so given {'b','a'} is 'covered' for both 'c' & 'f' we know
# 'a': {'d'}
result = defaultdict(set)
doubles = []
singles = []
for c, options in candidates.items():
if len(options) == 1:
if options not in singles:
singles.append(options)
if len(options) == 2:
if options not in doubles:
doubles.append(options)
for c, options in candidates.items():
if len(options) > 2:
for sgl in singles:
if sgl <= options:
options = options - sgl
for dbl in doubles:
if dbl <= options:
options = options - dbl
result[c] = options
return result
day = os.path.basename(__file__).split('.')[0][-2:]
input = list((l.strip() for l in open(f"./inputs/day{day}").readlines()))
print(f"Day {day}")
# print("test part 1:", part1(test_input))
# print("part 1:", part1(input))
print("test part 2:", part2(single_test_input))
#print("test part 2:", part2(test_input))
# print("part 2:", part2(input))
| StarcoderdataPython |
1727958 | from soundrts.world import World
from soundrts.worldclient import DummyClient
from soundrts.worldplayercomputer import Computer
from soundrts.worldresource import Deposit
from soundrts.worldroom import Square
class Deposit(Deposit): # type: ignore
def __init__(self, type_):
self.resource_type = type_
class Warehouse:
def __init__(self, types):
self.storable_resource_types = types
def test_is_ok_for_warehouse():
w = World([])
c = Computer(w, DummyClient())
a1 = Square(w, 0, 0, 12)
assert a1.name == "a1"
assert not c.is_ok_for_warehouse(a1, 0)
a1.objects.append(Deposit(0))
assert c.is_ok_for_warehouse(a1, 0)
a1.objects.append(Warehouse([1]))
assert c.is_ok_for_warehouse(a1, 0)
a1.objects.append(Warehouse([0]))
assert not c.is_ok_for_warehouse(a1, 0)
| StarcoderdataPython |
8023486 | <gh_stars>10-100
# -*- coding: utf-8 -*-
"""Utility functions for running optimizers."""
import time
def float2str(x):
s = "{:.10e}".format(x)
mantissa, exponent = s.split("e")
return mantissa.rstrip("0") + "e" + exponent
def make_run_name(weight_decay, batch_size, num_epochs, learning_rate,
lr_sched_epochs, lr_sched_factors, random_seed,
**optimizer_hyperparams):
"""Creates a name for the output file of an optimizer run.
Args:
weight_decay (float): The weight decay factor used (or ``None`` to signify
the testproblem's default).
batch_size (int): The mini-batch size used.
num_epochs (int): The number of epochs trained.
learning_rate (float): The learning rate used.
lr_sched_epochs (list): A list of epoch numbers (positive integers) that
mark learning rate changes.
lr_sched_factors (list): A list of factors (floats) by which to change the
learning rate.
random_seed (int): Random seed used.
Returns:
run_folder_name: Name for the run folder consisting of num_epochs,
batch_size, weight_decay, all the optimizer hyperparameters, and the
learning rate (schedule).
file_name: Name for the output file, consisting of random seed and a time
stamp.
"""
run_folder_name = "num_epochs__" + str(
num_epochs) + "__batch_size__" + str(batch_size) + "__"
if weight_decay is not None:
run_folder_name += "weight_decay__{0:s}__".format(
float2str(weight_decay))
# Add all hyperparameters to the name (sorted alphabetically).
for hp_name, hp_value in sorted(optimizer_hyperparams.items()):
run_folder_name += "{0:s}__".format(hp_name)
run_folder_name += "{0:s}__".format(
float2str(hp_value) if isinstance(hp_value, float
) else str(hp_value))
if lr_sched_epochs is None:
run_folder_name += "lr__{0:s}".format(float2str(learning_rate))
else:
run_folder_name += ("lr_schedule__{0:d}_{1:s}".format(
0, float2str(learning_rate)))
for epoch, factor in zip(lr_sched_epochs, lr_sched_factors):
run_folder_name += ("_{0:d}_{1:s}".format(
epoch, float2str(factor * learning_rate)))
file_name = "random_seed__{0:d}__".format(random_seed)
file_name += time.strftime("%Y-%m-%d-%H-%M-%S")
return run_folder_name, file_name
def make_lr_schedule(lr_base, lr_sched_epochs, lr_sched_factors):
"""Creates a learning rate schedule in the form of a dictionary.
After ``lr_sched_epochs[i]`` epochs of training, the learning rate will be set
to ``lr_sched_factors[i] * lr_base``. The schedule is given as a dictionary
mapping epoch number to learning rate. The learning rate for epoch 0 (that is
``lr_base``) will automatically be added to the schedule.
Examples:
- ``make_schedule(0.3, [50, 100], [0.1, 0.01])`` yields
``{0: 0.3, 50: 0.03, 100: 0.003}``.
- ``make_schedule(0.3)`` yields ``{0: 0.3}``.
- ``make_schedule(0.3, [], [])`` yields ``{0: 0.3}``.
Args:
lr_base: A base learning rate (float).
lr_sched_epochs: A (possibly empty) list of integers, specifying epochs at
which to decrease the learning rate.
lr_sched_factors: A (possibly empty) list of floats, specifying factors by
which to decrease the learning rate.
Returns:
sched: A dictionary mapping epoch numbers to learning rates.
"""
if lr_sched_epochs is None and lr_sched_factors is None:
return {0: lr_base}
# Make sure learning rate schedule has been properly specified
if lr_sched_epochs is None or lr_sched_factors is None:
raise TypeError(
"""Specifiy *both* lr_sched_epochs and lr_sched_factors.""")
if ((not isinstance(lr_sched_epochs, list))
or (not isinstance(lr_sched_factors, list))
or (len(lr_sched_epochs) != len(lr_sched_factors))):
raise ValueError(
"""lr_sched_epochs and lr_sched_factors must be lists of
the same length.""")
# Create schedule as dictionary epoch->factor; add value for epoch 0.
sched = {n: f * lr_base for n, f in zip(lr_sched_epochs, lr_sched_factors)}
sched[0] = lr_base
return sched
| StarcoderdataPython |
3590028 | import numpy as np
import math
from numba import jit, cuda, prange
from pythonabm import cuda_magnitude
@cuda.jit
def jkr_forces_gpu(jkr_edges, delete_edges, locations, radii, jkr_forces, poisson, youngs, adhesion_const):
""" This just-in-time compiled CUDA kernel performs the actual
calculations for the jkr_forces() method.
"""
# get the index in the edges array
edge_index = cuda.grid(1)
# double check that index is within the array
if edge_index < jkr_edges.shape[0]:
# get the cell indices of the edge
cell_1 = jkr_edges[edge_index][0]
cell_2 = jkr_edges[edge_index][1]
# get the locations of the two cells
location_1 = locations[cell_1]
location_2 = locations[cell_2]
# get the magnitude of the distance between the cells and the overlap of the cells
mag = cuda_magnitude(location_1, location_2)
overlap = (radii[cell_1] + radii[cell_2] - mag) / 1e6 # convert radii from um to m
# get two values used for JKR calculation
e_hat = (((1 - poisson[0] ** 2) / youngs[0]) + ((1 - poisson[0] ** 2) / youngs[0])) ** -1
r_hat = (1e6 * ((1 / radii[cell_1]) + (1 / radii[cell_2]))) ** -1 # convert radii from um to m
# value used to calculate the max adhesive distance after bond has been already formed
overlap_ = (((math.pi * adhesion_const[0]) / e_hat) ** (2 / 3)) * (r_hat ** (1 / 3))
# get the nondimensionalized overlap
d = overlap / overlap_
# check to see if the cells will have a force interaction based on the nondimensionalized distance
if d > -0.360562:
# plug the value of d into polynomial approximation for nondimensionalized force
f = (-0.0204 * d ** 3) + (0.4942 * d ** 2) + (1.0801 * d) - 1.324
# convert from the nondimensionalized force to find the JKR force
jkr_force = f * math.pi * adhesion_const[0] * r_hat
# loops over all directions of space
for i in range(3):
# get the vector by axis between the two cells
vector_part = location_1[i] - location_2[i]
# if the magnitude is 0 use the zero vector, otherwise find the normalized vector for each axis
if mag != 0:
normal = vector_part / mag
else:
normal = 0
# adds the adhesive force as a vector in opposite directions to each cell's force holder
jkr_forces[cell_1][i] += jkr_force * normal
jkr_forces[cell_2][i] -= jkr_force * normal
# remove the edge if the it fails to meet the criteria for distance, simulating that the bond is broken
else:
delete_edges[edge_index] = 1
@jit(nopython=True, parallel=True, cache=True)
def jkr_forces_cpu(number_edges, jkr_edges, delete_edges, locations, radii, jkr_forces, poisson, youngs,
adhesion_const):
""" This just-in-time compiled method performs the actual
calculations for the jkr_forces() method.
"""
# go through the edges array
for edge_index in prange(number_edges):
# get the cell indices of the edge
cell_1 = jkr_edges[edge_index][0]
cell_2 = jkr_edges[edge_index][1]
# get the vector between the centers of the cells, the magnitude of this vector and the overlap of the cells
vector = locations[cell_1] - locations[cell_2]
mag = np.linalg.norm(vector)
overlap = (radii[cell_1] + radii[cell_2] - mag) / 1e6 # convert radii from um to m
# get two values used for JKR calculation
e_hat = (((1 - poisson ** 2) / youngs) + ((1 - poisson ** 2) / youngs)) ** -1
r_hat = (1e6 * ((1 / radii[cell_1]) + (1 / radii[cell_2]))) ** -1 # convert radii from um to m
# value used to calculate the max adhesive distance after bond has been already formed
overlap_ = (((math.pi * adhesion_const) / e_hat) ** (2 / 3)) * (r_hat ** (1 / 3))
# get the nondimensionalized overlap
d = overlap / overlap_
# check to see if the cells will have a force interaction based on the nondimensionalized distance
if d > -0.360562:
# plug the value of d into polynomial approximation for nondimensionalized force
f = (-0.0204 * d ** 3) + (0.4942 * d ** 2) + (1.0801 * d) - 1.324
# convert from the nondimensionalized force to find the JKR force
jkr_force = f * math.pi * adhesion_const * r_hat
# if the magnitude is 0 use the zero vector, otherwise find the normalized vector for each axis. numba's
# jit prefers a reduction instead of generating a new normalized array
normal = np.array([0.0, 0.0, 0.0])
if mag != 0:
normal += vector / mag
# adds the adhesive force as a vector in opposite directions to each cell's force holder
jkr_forces[cell_1] += jkr_force * normal
jkr_forces[cell_2] -= jkr_force * normal
# remove the edge if the it fails to meet the criteria for distance, simulating that the bond is broken
else:
delete_edges[edge_index] = 1
return jkr_forces, delete_edges
@cuda.jit
def apply_forces_gpu(jkr_force, motility_force, locations, radii, stokes, size, move_dt):
""" This just-in-time compiled CUDA kernel performs the actual
calculations for the apply_forces() method.
"""
# get the index in the array
index = cuda.grid(1)
# double check that the index is within bounds
if index < locations.shape[0]:
# stokes law for velocity based on force and fluid viscosity, convert radii to m from um
stokes_friction = 6 * math.pi * stokes[0] * (radii[index] / 1e6)
# loop over all directions of space
for i in range(3):
# update the velocity of the cell based on stokes
velocity = (jkr_force[index][i] + motility_force[index][i]) / stokes_friction
# set the new location, convert velocity from m/s to um/s
new_location = locations[index][i] + move_dt[0] * (velocity * 1e6)
# check if new location is in the space
if new_location > size[i]:
locations[index][i] = size[i]
elif new_location < 0:
locations[index][i] = 0
else:
locations[index][i] = new_location
@jit(nopython=True, parallel=True, cache=True)
def apply_forces_cpu(number_agents, jkr_force, motility_force, locations, radii, stokes, size, move_dt):
""" This just-in-time compiled method performs the actual
calculations for the apply_forces() method.
"""
for index in prange(number_agents):
# stokes law for velocity based on force and fluid viscosity, convert radii to m from um
stokes_friction = 6 * math.pi * stokes * (radii[index] / 1e6)
# update the velocity of the cell based on stokes
velocity = (motility_force[index] + jkr_force[index]) / stokes_friction
# set the new location, convert velocity from m/s to um/s
new_location = locations[index] + move_dt * (velocity * 1e6)
# loop over all directions of space and check if new location is in the space
for i in range(0, 3):
if new_location[i] > size[i]:
locations[index][i] = size[i]
elif new_location[i] < 0:
locations[index][i] = 0
else:
locations[index][i] = new_location[i]
return locations
@jit(nopython=True, cache=True)
def update_diffusion_jit(BMP_base, NOG_base, BMP_add):
""" This just-in-time compiled method performs the actual
calculations for the update_diffusion() method.
"""
# constants
dt = 0.25
dx = 10
k_1 = 0.01 # inhibition of BMP signaling mediated by NOGGIN
k_2 = 0.003 # inhibition of BMP signaling mediated by relocalized receptors
k_3 = 0.003 # Degradation rate of BMP4
k_4 = 0.008 # Production rate of NOGGIN mediated by BMP signaling
k_5 = 0.009 # Degradation rate of NOGGIN
D_bmp = 10 # Diffusivity of BMP4, 10 um^2/s
D_nog = 50 # Diffusivity of NOG, 50 um^2/s
# calculate the number of steps
steps = int(1800 / dt)
# finite difference method to solve laplacian diffusion equation, currently 2D
for i in range(steps):
# set the Neumann boundary conditions by reflecting the edges of the gradient
NOG_base[:, 0] = NOG_base[:, 1] * 0
NOG_base[:, -1] = NOG_base[:, -2] * 0
NOG_base[0, :] = NOG_base[1, :] * 0
NOG_base[-1, :] = NOG_base[-2, :] * 0
# add BMP morphogen
BMP_base += BMP_add / steps
# calculate the Laplacian of BMP and NOG
delta_BMP = (BMP_base[2:, 1:-1] + BMP_base[:-2, 1:-1] + BMP_base[1:-1, 2:] + BMP_base[1:-1, :-2]
- 4 * BMP_base[1:-1, 1:-1]) / dx ** 2
delta_NOG = (NOG_base[2:, 1:-1] + NOG_base[:-2, 1:-1] + NOG_base[1:-1, 2:] + NOG_base[1:-1, :-2]
- 4 * NOG_base[1:-1, 1:-1]) / dx ** 2
# get the center values
BMP_center = BMP_base[1:-1, 1:-1]
NOG_center = NOG_base[1:-1, 1:-1]
# update the center values
f = - k_1 * NOG_center - k_2
g = + k_4 * BMP_center
f_1 = np.argwhere(f > 1)
for j in range(len(f_1)):
f[f_1[j]] = 1
f_0 = np.argwhere(f < 0)
for j in range(len(f_0)):
f[f_0[j]] = 0
g_5 = np.argwhere(g > 5)
for j in range(len(g_5)):
g[g_5[j]] = 5
g_0 = np.argwhere(g < 0)
for j in range(len(g_0)):
g[g_0[j]] = 0
BMP_base[1:-1, 1:-1] = BMP_center + dt * (D_bmp * delta_BMP - k_3 * BMP_center + f)
NOG_base[1:-1, 1:-1] = NOG_center + dt * (D_nog * delta_NOG - k_5 * NOG_center + f)
return BMP_base, NOG_base
| StarcoderdataPython |
1954158 | <filename>tests/grids/test_rectilinear.py
#! /usr/bin/env python
import unittest
import numpy as np
from pymt.grids import Rectilinear, RectilinearPoints
from ..grids.test_utils import NumpyArrayMixIn
class TestRectilinearGrid(unittest.TestCase):
def assert_point_count(self, grid, point_count):
self.assertEqual(point_count, grid.get_point_count())
def assert_cell_count(self, grid, cell_count):
self.assertEqual(cell_count, grid.get_cell_count())
def assert_shape(self, grid, shape):
self.assertListEqual(list(grid.get_shape()), list(shape))
def assert_spacing(self, grid, spacing):
self.assertListEqual(list(grid.get_spacing()), list(spacing))
def assert_origin(self, grid, origin):
self.assertListEqual(list(grid.get_origin()), list(origin))
def assert_x(self, grid, x):
self.assertListEqual(list(x), list(grid.get_x()))
def assert_y(self, grid, y):
self.assertListEqual(list(y), list(grid.get_y()))
def assert_z(self, grid, z):
self.assertListEqual(list(z), list(grid.get_z()))
def assert_offset(self, grid, offset):
self.assertListEqual(list(offset), list(grid.get_offset()))
def assert_connectivity(self, grid, connectivity):
self.assertListEqual(list(connectivity), list(grid.get_connectivity()))
@unittest.skip("xy indexing is deprecated")
def test_xy_indexing(self):
grid = Rectilinear([1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0])
self.assert_point_count(grid, 12)
self.assert_cell_count(grid, 6)
self.assert_shape(grid, (3, 4))
self.assert_x(
grid, [1.0, 2.0, 4.0, 8.0, 1.0, 2.0, 4.0, 8.0, 1.0, 2.0, 4.0, 8.0]
)
self.assert_y(
grid, [1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0]
)
def test_ij_indexing(self):
grid = Rectilinear([1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0], indexing="ij")
self.assert_point_count(grid, 12)
self.assert_cell_count(grid, 6)
self.assert_shape(grid, (4, 3))
self.assert_x(
grid, [1.0, 2.0, 3.0, 1.0, 2.0, 3.0, 1.0, 2.0, 3.0, 1.0, 2.0, 3.0]
)
self.assert_y(
grid, [1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 4.0, 4.0, 4.0, 8.0, 8.0, 8.0]
)
self.assert_connectivity(
grid,
[0, 1, 4, 3, 1, 2, 5, 4, 3, 4, 7, 6, 4, 5, 8, 7, 6, 7, 10, 9, 7, 8, 11, 10],
)
def test_grid_of_points(self):
grid = RectilinearPoints(
[1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0], indexing="ij", set_connectivity=True
)
self.assert_point_count(grid, 12)
self.assert_cell_count(grid, 0)
self.assert_shape(grid, (4, 3))
self.assert_x(
grid, [1.0, 2.0, 3.0, 1.0, 2.0, 3.0, 1.0, 2.0, 3.0, 1.0, 2.0, 3.0]
)
self.assert_y(
grid, [1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 4.0, 4.0, 4.0, 8.0, 8.0, 8.0]
)
self.assert_connectivity(grid, np.arange(grid.get_point_count()))
self.assert_offset(grid, np.arange(1, grid.get_point_count() + 1))
def test_1d_grid(self):
grid = Rectilinear([1, 3, 4, 5, 6], set_connectivity=True)
self.assert_point_count(grid, 5)
self.assert_cell_count(grid, 4)
self.assert_shape(grid, (5,))
self.assert_x(grid, [1.0, 3.0, 4.0, 5.0, 6.0])
# self.assert_y (grid, [0., 0., 0., 0., 0.])
self.assert_connectivity(grid, [0, 1, 1, 2, 2, 3, 3, 4])
self.assert_offset(grid, [2, 4, 6, 8])
@unittest.skip("xy indexing is deprecated")
def test_3d_xy_indexing(self):
grid = Rectilinear(
[0, 1, 2, 3], [4, 5, 6], [7, 8], set_connectivity=True, indexing="xy"
)
self.assert_point_count(grid, 24)
self.assert_cell_count(grid, 6)
self.assert_shape(grid, (2, 3, 4))
self.assert_x(
grid,
[
0.0,
1.0,
2.0,
3.0,
0.0,
1.0,
2.0,
3.0,
0.0,
1.0,
2.0,
3.0,
0.0,
1.0,
2.0,
3.0,
0.0,
1.0,
2.0,
3.0,
0.0,
1.0,
2.0,
3.0,
],
)
self.assert_y(
grid,
[
4.0,
4.0,
4.0,
4.0,
5.0,
5.0,
5.0,
5.0,
6.0,
6.0,
6.0,
6.0,
4.0,
4.0,
4.0,
4.0,
5.0,
5.0,
5.0,
5.0,
6.0,
6.0,
6.0,
6.0,
],
)
self.assert_z(
grid,
[
7.0,
7.0,
7.0,
7.0,
7.0,
7.0,
7.0,
7.0,
7.0,
7.0,
7.0,
7.0,
8.0,
8.0,
8.0,
8.0,
8.0,
8.0,
8.0,
8.0,
8.0,
8.0,
8.0,
8.0,
],
)
self.assert_offset(grid, 8.0 * np.arange(1, grid.get_cell_count() + 1))
def test_3d_ij_indexing(self):
grid = Rectilinear(
[0, 1, 2, 3], [4, 5, 6], [7, 8], set_connectivity=True, indexing="ij"
)
self.assert_point_count(grid, 24)
self.assert_cell_count(grid, 6)
self.assert_shape(grid, (4, 3, 2))
self.assert_x(
grid,
[
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
7.0,
8.0,
],
)
self.assert_y(
grid,
[
4.0,
4.0,
5.0,
5.0,
6.0,
6.0,
4.0,
4.0,
5.0,
5.0,
6.0,
6.0,
4.0,
4.0,
5.0,
5.0,
6.0,
6.0,
4.0,
4.0,
5.0,
5.0,
6.0,
6.0,
],
)
self.assert_z(
grid,
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
3.0,
3.0,
3.0,
3.0,
3.0,
3.0,
],
)
self.assert_offset(grid, 8.0 * np.arange(1, grid.get_cell_count() + 1))
class TestRectilinearGridUnits(unittest.TestCase):
def test_2d_units_ij_indexing_by_name(self):
grid = Rectilinear(
[1.0, 2.0, 4.0, 8.0],
[1.0, 2.0, 3.0],
indexing="ij",
units=["y_units", "x_units"],
)
self.assertEqual(grid.get_x_units(), "x_units")
self.assertEqual(grid.get_y_units(), "y_units")
@unittest.skip("xy indexing is deprecated")
def test_2d_units_xy_indexing_by_name(self):
grid = Rectilinear(
[1.0, 2.0, 4.0, 8.0],
[1.0, 2.0, 3.0],
indexing="xy",
units=["x_units", "y_units"],
)
self.assertEqual(grid.get_x_units(), "x_units")
self.assertEqual(grid.get_y_units(), "y_units")
def test_2d_units_ij_indexing_by_coordinate(self):
grid = Rectilinear(
[1.0, 2.0, 4.0, 8.0],
[1.0, 2.0, 3.0],
indexing="ij",
units=["y_units", "x_units"],
)
self.assertEqual(grid.get_coordinate_units(0), "y_units")
self.assertEqual(grid.get_coordinate_units(1), "x_units")
@unittest.skip("xy indexing is deprecated")
def test_2d_units_xy_indexing_by_coordinate(self):
grid = Rectilinear(
[1.0, 2.0, 4.0, 8.0],
[1.0, 2.0, 3.0],
indexing="ij",
units=["y_units", "x_units"],
)
self.assertEqual(grid.get_coordinate_units(0), "x_units")
self.assertEqual(grid.get_coordinate_units(1), "y_units")
class TestRectilinearGridCoordinateNames(unittest.TestCase):
def test_2d_coordinate_name_ij_default(self):
grid = Rectilinear([1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0], indexing="ij")
self.assertEqual(grid.get_coordinate_name(0), "y")
self.assertEqual(grid.get_coordinate_name(1), "x")
@unittest.skip("xy indexing is deprecated")
def test_2d_coordinate_name_xy_default(self):
grid = Rectilinear([1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0], indexing="xy")
self.assertEqual(grid.get_coordinate_name(0), "x")
self.assertEqual(grid.get_coordinate_name(1), "y")
def test_2d_coordinate_name_ij_indexing(self):
grid = Rectilinear(
[1.0, 2.0, 4.0, 8.0],
[1.0, 2.0, 3.0],
indexing="ij",
coordinate_names=["longitude", "latitude"],
)
self.assertEqual(grid.get_coordinate_name(0), "longitude")
self.assertEqual(grid.get_coordinate_name(1), "latitude")
@unittest.skip("xy indexing is deprecated")
def test_2d_coordinate_name_xy_indexing(self):
grid = Rectilinear(
[1.0, 2.0, 4.0, 8.0],
[1.0, 2.0, 3.0],
indexing="xy",
coordinate_names=["latitude", "longitude"],
)
self.assertEqual(grid.get_coordinate_name(0), "latitude")
self.assertEqual(grid.get_coordinate_name(1), "longitude")
def test_2d_coordinate_name_ij_indexing_with_kwds(self):
grid = Rectilinear(
[1.0, 2.0, 4.0, 8.0],
[1.0, 2.0, 3.0],
indexing="ij",
coordinate_names=["latitude", "longitude"],
)
self.assertEqual(grid.get_coordinate_name(0), "latitude")
self.assertEqual(grid.get_coordinate_name(1), "longitude")
# self.assertEqual (grid.get_coordinate_name (0, indexing='xy'), 'longitude')
# self.assertEqual (grid.get_coordinate_name (1, indexing='xy'), 'latitude')
class TestRectilinearGridCoordinates(unittest.TestCase, NumpyArrayMixIn):
def test_2d_coordinates_ij_default(self):
grid = Rectilinear([1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0], indexing="ij")
self.assertArrayEqual(
grid.get_point_coordinates(axis=0),
np.array([1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 4.0, 4.0, 4.0, 8.0, 8.0, 8.0]),
)
self.assertArrayEqual(
grid.get_point_coordinates(axis=1), np.array([1.0, 2.0, 3.0] * 4)
)
@unittest.skip("xy indexing is deprecated")
def test_2d_coordinates_xy_default(self):
grid = Rectilinear([1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0], indexing="xy")
self.assertTrue(
np.allclose(
grid.get_point_coordinates(axis=0), np.array([1.0, 2.0, 4.0, 8.0] * 3)
)
)
self.assertTrue(
np.allclose(
grid.get_point_coordinates(axis=1),
np.tile([1.0, 2.0, 3.0], (4, 1)).T.flat,
)
)
def test_2d_coordinates_ij_indexing_with_kwds(self):
grid = Rectilinear([1.0, 2.0, 4.0, 8.0], [1.0, 2.0, 3.0], indexing="ij")
# self.assertTrue (np.allclose (grid.get_point_coordinates (axis=0, indexing='xy'),
# np.array ([1., 2., 3.] * 4)))
# self.assertTrue (np.allclose (grid.get_point_coordinates (axis=1, indexing='xy'),
# np.tile ([1., 2., 4., 8.], (3, 1)).T.flat))
self.assertTrue(
np.allclose(
grid.get_point_coordinates(axis=0, indexing="ij"),
np.tile([1.0, 2.0, 4.0, 8.0], (3, 1)).T.flat,
)
)
self.assertTrue(
np.allclose(
grid.get_point_coordinates(axis=1, indexing="ij"),
np.array([1.0, 2.0, 3.0] * 4),
)
)
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(TestRectilinearGrid)
return suite
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
12823984 | from django.dispatch import Signal
from django.template.loader import select_template, TemplateDoesNotExist
from django.contrib.staticfiles import finders
from debug_toolbar.panels import DebugPanel
from widgy.models import Content
template_hierarchy_called = Signal(providing_args=['cls', 'kwargs', 'templates', 'used'])
def unique_list(lst):
"""
Removes duplicates from a list while preserving order
"""
res = []
for i, item in enumerate(lst):
if i == len(lst) - 1 or lst[i + 1] != item:
res.append(item)
return res
def monkey_patch():
"""
Monkey patch in our own, instrumented, get_templates_hierarchy. Make
sure to keep it a classmethod.
"""
old_get_templates_hierarchy_unbound = Content.get_templates_hierarchy.im_func
def new_get_templates_hierarchy(cls, **kwargs):
res = old_get_templates_hierarchy_unbound(cls, **kwargs)
res = unique_list(res)
try:
name = select_template(res).name
except TemplateDoesNotExist:
name = [i for i in res if finders.find(i)]
template_hierarchy_called.send(sender=cls, cls=cls, kwargs=kwargs, templates=res, used=name)
return res
Content.get_templates_hierarchy = classmethod(new_get_templates_hierarchy)
class TemplatePanel(DebugPanel):
name = 'Widgy templates'
template = 'widgy/debugtoolbar/templates.html'
def __init__(self, *args, **kwargs):
super(TemplatePanel, self).__init__(*args, **kwargs)
monkey_patch()
template_hierarchy_called.connect(self._store_info)
self.calls = []
def nav_title(self):
return 'Widgy templates'
def _store_info(self, **kwargs):
# A class can't be outputted directly in a template, because it's
# callable. `_meta` can't be accessed because it starts with an
# underscore.
kwargs['cls_name'] = '%s.%s' % (kwargs['cls']._meta.app_label,
kwargs['cls'].__name__)
self.calls.append(kwargs)
def process_response(self, request, response):
self.record_stats({
'calls': self.calls,
})
def url(self):
return ''
def title(self):
return '%s widgets rendered' % len(self.calls)
@property
def has_content(self):
return bool(self.calls)
| StarcoderdataPython |
4822508 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 0.5.0.5149 on 2015-07-06.
# 2015, SMART Health IT.
import os
import io
import unittest
import json
from . import observation
from .fhirdate import FHIRDate
class ObservationTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("Observation", js["resourceType"])
return observation.Observation(js)
def testObservation1(self):
inst = self.instantiate_from("obs-genetics-example1-somatic.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation1(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation1(inst2)
def implObservation1(self, inst):
self.assertEqual(inst.code.coding[0].code, "21665-5")
self.assertEqual(inst.code.coding[0].display, "EGFR gene mutation analysis")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.comments, "The EGFR p.L858R mutation has been associated with response to anti-EGFR therapy")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomeBuild")
self.assertEqual(inst.extension[0].valueString, "GRCh 37")
self.assertEqual(inst.extension[1].url, "http://hl7.org/fhir/StructureDefinition/geneticsChromosome")
self.assertEqual(inst.extension[1].valueString, "7")
self.assertEqual(inst.extension[2].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomicReferenceSequenceId")
self.assertEqual(inst.extension[2].valueCode, "NC_000007.13")
self.assertEqual(inst.extension[3].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomicStart")
self.assertEqual(inst.extension[3].valueInteger, 55259515)
self.assertEqual(inst.extension[4].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomicStop")
self.assertEqual(inst.extension[4].valueInteger, 55259515)
self.assertEqual(inst.extension[5].url, "http://hl7.org/fhir/StructureDefinition/geneticsReferenceAllele")
self.assertEqual(inst.extension[5].valueString, "T")
self.assertEqual(inst.extension[6].url, "http://hl7.org/fhir/StructureDefinition/geneticsObservedAllele")
self.assertEqual(inst.extension[6].valueString, "G")
self.assertEqual(inst.extension[7].url, "http://hl7.org/fhir/StructureDefinition/geneticsGeneId")
self.assertEqual(inst.extension[7].valueCodeableConcept.coding[0].code, "3236")
self.assertEqual(inst.extension[7].valueCodeableConcept.coding[0].display, "EGFR")
self.assertEqual(inst.extension[7].valueCodeableConcept.coding[0].system, "http://www.genenames.org")
self.assertEqual(inst.extension[8].url, "http://hl7.org/fhir/StructureDefinition/geneticsTranscriptReferenceSequenseId")
self.assertEqual(inst.extension[8].valueCode, "ENSESTT00000085772.1")
self.assertEqual(inst.extension[9].url, "http://hl7.org/fhir/StructureDefinition/geneticsDNASequenceVariation")
self.assertEqual(inst.extension[9].valueString, "c.2573T>G")
self.assertEqual(inst.id, "genetics-example1-somatic")
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
def testObservation2(self):
inst = self.instantiate_from("obs-genetics-example2-germline.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation2(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation2(inst2)
def implObservation2(self, inst):
self.assertEqual(inst.code.coding[0].code, "21636-6")
self.assertEqual(inst.code.coding[0].display, "BRCA1 gene mutation analysis")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomeBuild")
self.assertEqual(inst.extension[0].valueString, "GRCh 37")
self.assertEqual(inst.extension[1].url, "http://hl7.org/fhir/StructureDefinition/geneticsChromosome")
self.assertEqual(inst.extension[1].valueString, "17")
self.assertEqual(inst.extension[2].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomicReferenceSequenceId")
self.assertEqual(inst.extension[2].valueCode, "NC_000017.10")
self.assertEqual(inst.extension[3].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomicStart")
self.assertEqual(inst.extension[3].valueInteger, 41258504)
self.assertEqual(inst.extension[4].url, "http://hl7.org/fhir/StructureDefinition/geneticsGenomicStop")
self.assertEqual(inst.extension[4].valueInteger, 41258504)
self.assertEqual(inst.extension[5].url, "http://hl7.org/fhir/StructureDefinition/geneticsReferenceAllele")
self.assertEqual(inst.extension[5].valueString, "A")
self.assertEqual(inst.extension[6].url, "http://hl7.org/fhir/StructureDefinition/geneticsObservedAllele")
self.assertEqual(inst.extension[6].valueString, "C")
self.assertEqual(inst.extension[7].url, "http://hl7.org/fhir/StructureDefinition/geneticsGeneId")
self.assertEqual(inst.extension[7].valueCodeableConcept.coding[0].code, "1100")
self.assertEqual(inst.extension[7].valueCodeableConcept.coding[0].display, "BRCA1")
self.assertEqual(inst.extension[7].valueCodeableConcept.coding[0].system, "http://www.genenames.org")
self.assertEqual(inst.extension[8].url, "http://hl7.org/fhir/StructureDefinition/geneticsTranscriptReferenceSequenseId")
self.assertEqual(inst.extension[8].valueCode, "NM_007294.3")
self.assertEqual(inst.extension[9].url, "http://hl7.org/fhir/StructureDefinition/geneticsDNASequenceVariation")
self.assertEqual(inst.extension[9].valueString, "c.181T>G")
self.assertEqual(inst.id, "genetics-example2-germline")
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
def testObservation3(self):
inst = self.instantiate_from("obs-uslab-example1.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation3(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation3(inst2)
def implObservation3(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-03").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-03")
self.assertEqual(inst.code.coding[0].code, "10368-9")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "CAPLD")
self.assertEqual(inst.code.coding[1].display, "Blood Lead, Capillary")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "Capillary Blood Lead Screen")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example1")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "1234")
self.assertEqual(inst.interpretation.coding[0].code, "H")
self.assertEqual(inst.interpretation.coding[0].system, "http://hl7.org/fhir/v2/0078")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-04T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-04T15:42:15-08:00")
self.assertEqual(inst.referenceRange[0].high.code, "ug/dL")
self.assertEqual(inst.referenceRange[0].high.system, "http://unitsofmeasure.org")
self.assertEqual(inst.referenceRange[0].high.units, "microgram per deciliter")
self.assertEqual(inst.referenceRange[0].high.value, 5.0)
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.valueQuantity.code, "ug/dL")
self.assertEqual(inst.valueQuantity.system, "http://unitsofmeasure.org")
self.assertEqual(inst.valueQuantity.units, "microgram per deciliter")
self.assertEqual(inst.valueQuantity.value, 65)
def testObservation4(self):
inst = self.instantiate_from("obs-uslab-example2.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation4(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation4(inst2)
def implObservation4(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-05").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-05")
self.assertEqual(inst.code.coding[0].code, "43305-2")
self.assertEqual(inst.code.coding[0].display, "Neisseria gonorrhoeae rRNA [Presence] in Unspecified specimen by Probe and target amplification method")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "GCPCR")
self.assertEqual(inst.code.coding[1].display, "APTIMA GC")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "Neisseria gonorrhoeae by PCR")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example2")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "2345")
self.assertEqual(inst.interpretation.coding[0].code, "A")
self.assertEqual(inst.interpretation.coding[0].system, "http://hl7.org/fhir/v2/0078")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-06T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-06T15:42:15-08:00")
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.valueCodeableConcept.coding[0].code, "10828004")
self.assertEqual(inst.valueCodeableConcept.coding[0].display, "Positive")
self.assertEqual(inst.valueCodeableConcept.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.valueCodeableConcept.coding[1].code, "POS")
self.assertEqual(inst.valueCodeableConcept.coding[1].display, "Positive")
self.assertEqual(inst.valueCodeableConcept.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.valueCodeableConcept.text, "Positive for Neisseira gonorrheoea rRNA")
def testObservation5(self):
inst = self.instantiate_from("obs-uslab-example3.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation5(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation5(inst2)
def implObservation5(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-05").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-05")
self.assertEqual(inst.code.coding[0].code, "6463-4")
self.assertEqual(inst.code.coding[0].display, "Bacteria identified in Unspecified specimen by Culture")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "MSCBACT")
self.assertEqual(inst.code.coding[1].display, "Misc Culture")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "Miscellaneous Culture")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example3")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "3456")
self.assertEqual(inst.interpretation.coding[0].code, "A")
self.assertEqual(inst.interpretation.coding[0].system, "http://hl7.org/fhir/v2/0078")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-06T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-06T15:42:15-08:00")
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.valueString, "Moderate Growth of gram-positive cocci bacteria")
def testObservation6(self):
inst = self.instantiate_from("obs-uslab-example4.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation6(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation6(inst2)
def implObservation6(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-05").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-05")
self.assertEqual(inst.code.coding[0].code, "6812-2")
self.assertEqual(inst.code.coding[0].display, "Dengue virus IgM Ab [Titer] in Serum")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "DGIGM")
self.assertEqual(inst.code.coding[1].display, "Dengue IgM QN")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "Dengue IgM Titer")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example4")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "4567")
self.assertEqual(inst.interpretation.coding[0].code, "H")
self.assertEqual(inst.interpretation.coding[0].system, "http://hl7.org/fhir/v2/0078")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-06T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-06T15:42:15-08:00")
self.assertEqual(inst.referenceRange[0].text, "<1:64")
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.valueRatio.denominator.value, 256)
self.assertEqual(inst.valueRatio.numerator.value, 1)
def testObservation7(self):
inst = self.instantiate_from("obs-uslab-example5.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation7(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation7(inst2)
def implObservation7(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-05").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-05")
self.assertEqual(inst.code.coding[0].code, "5671-3")
self.assertEqual(inst.code.coding[0].display, "Lead [Mass/volume] in Blood")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "BLDLD")
self.assertEqual(inst.code.coding[1].display, "Blood Lead")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "Blood Lead")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example5")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "1234")
self.assertEqual(inst.interpretation.coding[0].code, "H")
self.assertEqual(inst.interpretation.coding[0].system, "http://hl7.org/fhir/v2/0078")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-06T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-06T15:42:15-08:00")
self.assertEqual(inst.referenceRange[0].high.code, "ug/dL")
self.assertEqual(inst.referenceRange[0].high.system, "http://unitsofmeasure.org")
self.assertEqual(inst.referenceRange[0].high.units, "microgram per deciliter")
self.assertEqual(inst.referenceRange[0].high.value, 5.0)
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.valueQuantity.code, "ug/dL")
self.assertEqual(inst.valueQuantity.system, "http://unitsofmeasure.org")
self.assertEqual(inst.valueQuantity.units, "microgram per deciliter")
self.assertEqual(inst.valueQuantity.value, 65)
def testObservation8(self):
inst = self.instantiate_from("obs-uslab-example6.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation8(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation8(inst2)
def implObservation8(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-05").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-05")
self.assertEqual(inst.code.coding[0].code, "64017-7")
self.assertEqual(inst.code.coding[0].display, "Chlamydia trachomatis and Neisseria gonorrhoeae rRNA panel - Unspecified specimen by Probe and target amplification method")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "GCCTPCR")
self.assertEqual(inst.code.coding[1].display, "GC CT PCR")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "GC CT RNA Panel")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example6")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "8901")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-06T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-06T15:42:15-08:00")
self.assertEqual(inst.related[0].type, "has-component")
self.assertEqual(inst.related[1].type, "has-component")
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
def testObservation9(self):
inst = self.instantiate_from("obs-uslab-example7.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation9(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation9(inst2)
def implObservation9(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-05").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-05")
self.assertEqual(inst.code.coding[0].code, "43304-5")
self.assertEqual(inst.code.coding[0].display, "Chlamydia trachomatis rRNA [Presence] in Unspecified specimen by Probe and target amplification method")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "CTPCR")
self.assertEqual(inst.code.coding[1].display, "APTIMA CT")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "Chlamydia trachomatis by PCR")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example7")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "9012")
self.assertEqual(inst.interpretation.coding[0].code, "N")
self.assertEqual(inst.interpretation.coding[0].system, "http://hl7.org/fhir/v2/0078")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-06T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-06T15:42:15-08:00")
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.valueCodeableConcept.coding[0].code, "260385009")
self.assertEqual(inst.valueCodeableConcept.coding[0].display, "Negative")
self.assertEqual(inst.valueCodeableConcept.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.valueCodeableConcept.coding[1].code, "NEG")
self.assertEqual(inst.valueCodeableConcept.coding[1].display, "Negative")
self.assertEqual(inst.valueCodeableConcept.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.valueCodeableConcept.text, "Negative for Chlamydia Trachomatis rRNA")
def testObservation10(self):
inst = self.instantiate_from("obs-uslab-example8.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation10(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation10(inst2)
def implObservation10(self, inst):
self.assertEqual(inst.appliesDateTime.date, FHIRDate("2014-12-03").date)
self.assertEqual(inst.appliesDateTime.as_json(), "2014-12-03")
self.assertEqual(inst.code.coding[0].code, "5821-4")
self.assertEqual(inst.code.coding[0].system, "http://loinc.org")
self.assertEqual(inst.code.coding[1].code, "WBCFLD")
self.assertEqual(inst.code.coding[1].display, "White blood cells (WBC) in urine ")
self.assertEqual(inst.code.coding[1].system, "urn:oid:2.16.840.1.113883.172.16.58.3")
self.assertEqual(inst.code.text, "White blood cells (WBC) in urine")
self.assertEqual(inst.extension[0].url, "http://hl7.org/fhir/StructureDefinition/uslabobservationkind")
self.assertEqual(inst.extension[0].valueCode, "result")
self.assertEqual(inst.id, "uslab-example8")
self.assertEqual(inst.identifier[0].system, "http://lis.acmelabs.org/identifiers/labtestresult")
self.assertEqual(inst.identifier[0].type.text, "lab test result ID")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "9099")
self.assertEqual(inst.interpretation.coding[0].code, "H")
self.assertEqual(inst.interpretation.coding[0].system, "http://hl7.org/fhir/v2/0078")
self.assertEqual(inst.issued.date, FHIRDate("2014-12-04T15:42:15-08:00").date)
self.assertEqual(inst.issued.as_json(), "2014-12-04T15:42:15-08:00")
self.assertEqual(inst.referenceRange[0].high.code, "/[HPF]")
self.assertEqual(inst.referenceRange[0].high.system, "http://unitsofmeasure.org")
self.assertEqual(inst.referenceRange[0].high.units, "WBC/HPF")
self.assertEqual(inst.referenceRange[0].high.value, 5)
self.assertEqual(inst.referenceRange[0].low.code, "/[HPF]")
self.assertEqual(inst.referenceRange[0].low.system, "http://unitsofmeasure.org")
self.assertEqual(inst.referenceRange[0].low.units, "WBC/HPF")
self.assertEqual(inst.referenceRange[0].low.value, 0)
self.assertEqual(inst.status, "final")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.valueRange.high.units, "WBC/HPF")
self.assertEqual(inst.valueRange.high.value, 20)
self.assertEqual(inst.valueRange.low.units, "WBC/HPF")
self.assertEqual(inst.valueRange.low.value, 10)
| StarcoderdataPython |
3218914 | <reponame>seukjung/sentry-custom
from __future__ import absolute_import
import mock
from sentry.testutils import TestCase
from sentry.utils.retries import TimedRetryPolicy, RetryException
class TimedRetryPolicyTestCase(TestCase):
def test_policy_success(self):
bomb = Exception('Boom!')
callable = mock.MagicMock(side_effect=[bomb, mock.sentinel.OK])
retry = TimedRetryPolicy(30, delay=lambda i: 10)
retry.clock = mock.Mock()
retry.clock.sleep = mock.MagicMock()
retry.clock.time = mock.MagicMock(side_effect=[0, 15])
assert retry(callable) is mock.sentinel.OK
assert callable.call_count == 2
def test_policy_failure(self):
bomb = Exception('Boom!')
callable = mock.MagicMock(side_effect=bomb)
retry = TimedRetryPolicy(30, delay=lambda i: 10)
retry.clock = mock.Mock()
retry.clock.sleep = mock.MagicMock()
retry.clock.time = mock.MagicMock(side_effect=[0, 15, 25])
try:
retry(callable)
except RetryException as exception:
assert exception.exception is bomb
else:
self.fail('Expected {!r}!'.format(RetryException))
assert callable.call_count == 2
| StarcoderdataPython |
330643 | <filename>deeppulsarnet/model/model_output.py
import torch.nn as nn
from torch.nn.utils import weight_norm
from torch.nn import functional as F
import torch
from model.TemporalBlock import TemporalBlock
class OutputLayer(nn.Module):
def __init__(self, input_channels, intermediate, final_nonlin,
dropout=0, kernel=19, residual=True, output_channels=1):
super().__init__()
layers = []
layers_2 = []
self.input_channels = input_channels
self.output_channels = output_channels
if intermediate > 1 and kernel != 0:
layers += [nn.Dropout2d(dropout),
TemporalBlock(input_channels, intermediate, kernel, stride=1, dilation=1,
norm_groups=1,conv_groups=1, residual=residual, final_norm=False),
nn.Conv1d(intermediate, output_channels, 1)]
elif intermediate > 1:
layers += [nn.Dropout2d(dropout),
nn.Conv1d(input_channels, intermediate, 1),
nn.LeakyReLU(),
nn.Conv1d(intermediate, output_channels, 1)]
else:
layers += [nn.Dropout2d(dropout),
nn.Conv1d(input_channels, output_channels, 1)]
layers_2 += [nn.LeakyReLU(),
# nn.Conv1d(1, 1, 1, bias=False)
]
if final_nonlin:
layers_2 += [nn.Tanh()]
self.network_1 = nn.Sequential(*layers)
self.network_2 = nn.Sequential(*layers_2)
def forward(self, x):
out_1 = self.network_1(x)
out_2 = self.network_2(out_1)
return out_2
| StarcoderdataPython |
209839 | from time import sleep
for c in range(10, -1, -1):
sleep(1)
print(c)
print('Boom! Boom! Pow!')
| StarcoderdataPython |
6613513 | <reponame>robotlightsyou/pfb-resources<filename>sessions/003 session-numbers/exercises/cash_register_video.py
#! /usr/bin/env python3
'''
write a function that will ask for the
user for input which will be an amount
of money, then return the minimum
number of coins.
'''
user_coins = [25, 10, 5, 1]
# print return of get_change
def main():
print(get_change())
def get_change():
# take input
print("How much change is owed?")
amount = input("> ")
# modify input from integer
amount = int(float(amount) * 100)
# find least number of coins
# coins = 0
# coins += amount // 25 # -> 1
# amount %= 25 # -> 16
# coins += amount // 10 # -> 1
# amount %= 10 # -> 6
# coins += amount // 5 # -> 1
# amount %= 5 # 1
# coins += amount
# return coins
# find least number of coins
total_coins = 0
for coin in user_coins:
total_coins += amount // coin
amount %= coin
return total_coins
if __name__ == '__main__':
main()
'''
.31
[25, 10, 1]
1 '25', 6 '1'
3 '10', 1 '1'
''' | StarcoderdataPython |
3279231 | """
Functional tests for the web service using Selenium
"""
from sys import platform
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from django.test import LiveServerTestCase
from pyvirtualdisplay import Display
from contextlib import contextmanager
class LoginTest(LiveServerTestCase):
"""
Selenium tests for the web service
"""
# Needed to overwrite LiveServerTestCase default port (i.e. 8001)
port = 8000
# Wait for a new window to load
@contextmanager
def wait_for_new_window(self, driver, timeout=10):
handles_before = self.driver.window_handles
yield
WebDriverWait(driver, timeout).until(
lambda driver: len(handles_before) != len(driver.window_handles))
def setUp(self):
# Create chrome sessions depending on OS
if platform == "win32":
path = "C:\\DRIVERS\\chromedriver_win32\\chromedriver.exe"
self.driver = webdriver.Chrome(executable_path=path)
elif platform == "darwin":
path = "/usr/local/bin/chromedriver"
self.driver = webdriver.Chrome(executable_path=path)
else:
# Assure no visibility display for Ubuntu CLI to render without UI
# Assure large enough screen to preven responsive look which hides some buttons
display = Display(visible=0, size=(1920, 1080))
display.start()
path = "/usr/bin/chromedriver"
self.driver = webdriver.Chrome(executable_path=path)
self.driver.implicitly_wait(3)
self.driver.maximize_window()
self.driver.get(self.live_server_url)
super(LoginTest, self).setUp()
def tearDown(self):
self.driver.quit()
super(LoginTest, self).tearDown()
def test_work_process(self):
"""
End to end test of the recommendation workflow
"""
"""
___ _ _ _ ___
/ __| |_ __ _| |_(_)__ | _ \__ _ __ _ ___ ___
\__ \ _/ _` | _| / _| | _/ _` / _` / -_|_-<
|___/\__\__,_|\__|_\__| |_| \__,_\__, \___/__/
|___/
"""
# get the login buttons
login_button = self.driver.find_element_by_xpath(
"//*[@id='navbarBasicExample']/div[2]/div/div")
login_button.click()
# Check if the user redirected back to the main page
self.assertEqual(f'http://localhost:{LoginTest.port}/', self.driver.current_url)
# About button
about_ele = self.driver.find_element_by_xpath(
"//*[@id='navbarBasicExample']/div[1]/a[2]")
about_ele.click()
# Check if the user at about page
self.assertEqual(f'http://localhost:{LoginTest.port}/about', self.driver.current_url)
"""
_ ___ ___ _ _
_ | / __| | _ \___ _ __ ___ __(_) |_ ___ _ _ _ _
| || \__ \ | / -_) '_ \/ _ (_-< | _/ _ \ '_| || |
\__/|___/ |_|_\___| .__/\___/__/_|\__\___/_| \_, |
|_| |__/
"""
# My repositories button
reps_ele_path = "//*[@id='navbarBasicExample']/div[1]/a[4]"
reps_ele = self.driver.find_element_by_xpath(reps_ele_path)
reps_ele.click()
# Check if the user at my repositories page
self.assertEqual(f'http://localhost:{LoginTest.port}/repositories', self.driver.current_url)
# The express element from the repos list
first_repo_ele = self.driver.find_element_by_xpath(
"//tbody//a[text()='pkgpkr1/express']")
first_repo_ele.click()
# Wait until the loading animation is disappeared
loading_state = self.driver.find_element_by_xpath("//*[@class='pageloader']")
WebDriverWait(self.driver, 30).until(EC.invisibility_of_element(loading_state))
# Check if the user at recommendations page
self.assertEqual("Recommendations", self.driver.title)
showing_text = self.driver.find_element_by_xpath("//*[@id='recommend-table_info']")
self.assertIn('Showing 1', showing_text.get_attribute('textContent'))
# Check if text in branch selector is `master`
branch_span = self.driver.find_element_by_xpath("//*[@class='dropdown-trigger']/button/span")
self.assertEqual("master", branch_span.get_attribute('textContent'))
# Click on dropdown and another branch in the Dropdown
branch_to_click = self.driver.find_element_by_xpath("//*[@class='dropdown-menu']/div/a[@href='?branch=test']")
branch_dropdown = self.driver.find_element_by_xpath("//*[@class='dropdown-trigger']/button")
branch_dropdown.click()
branch_to_click.click()
# Wait until the loading animation is disappeared
loading_state = self.driver.find_element_by_xpath("//*[@class='pageloader']")
WebDriverWait(self.driver, 30).until(EC.invisibility_of_element(loading_state))
# Check if the user at recommendations page with different branch
self.assertEqual("Recommendations", self.driver.title)
showing_text = self.driver.find_element_by_xpath("//*[@id='recommend-table_info']")
self.assertIn('Showing 1', showing_text.get_attribute('textContent'))
# Assure that we are looking at another branch
branch_span = self.driver.find_element_by_xpath("//*[@class='dropdown-trigger']/button/span")
self.assertEqual("test", branch_span.get_attribute('textContent'))
# Category border
category_order_ele = self.driver.find_element_by_xpath(
"//*[@id='recommend-table']/thead/tr/th[4]")
# Click it twice to make sure the first recommendation has at least one category
category_order_ele.click()
category_order_ele.click()
# The first category
first_category_ele = self.driver.find_element_by_xpath(
"//*[@id='recommend-table']/tbody/tr[1]/td[3]/div[1]/button")
first_category_ele.click()
# Clear button
clear_ele = self.driver.find_element_by_xpath("//*[@id='category-clear']")
clear_ele.click()
# Filter text inputs
search_ele_path = "//*[@id='recommendationFilter']"
search_ele = self.driver.find_element_by_xpath(search_ele_path)
search_ele.send_keys("te")
search_ele.clear()
# The first element from the recommendation list
with self.wait_for_new_window(self.driver):
first_recommendation_ele = self.driver.find_element_by_xpath(
"//*[@id='recommend-table']/tbody/tr[1]/td[1]/div/div[1]")
first_recommendation_ele.click()
# Ensure that the package detail window opened as expected
self.driver.switch_to_window("package_details")
self.driver.find_element_by_xpath("//*[@id='app']")
# Close the npm page
self.driver.close()
window_before = self.driver.window_handles[0]
self.driver.switch_to_window(window_before)
""" _ ___ ___
_ | / __| | \ ___ _ __ ___
| || \__ \ | |) / -_) ' \/ _ \
\__/|___/ |___/\___|_|_|_\___/
"""
# Go back to the home page
home_ele = self.driver.find_element_by_xpath(
"//*[@id='navbarBasicExample']/div[1]/a[1]")
home_ele.click()
# Check if the user is on the home page
self.assertEqual(f'http://localhost:{LoginTest.port}/', self.driver.current_url)
# Try the recommendations demo
demo_ele = self.driver.find_element_by_xpath(
"//*[@id='recommendation-button']")
demo_ele.click()
# Wait until the loading animation is disappeared
loading_state = self.driver.find_element_by_xpath("//*[@class='pageloader']")
WebDriverWait(self.driver, 30).until(EC.invisibility_of_element(loading_state))
# Check if the user at recommendations page
self.assertEqual("Recommendations", self.driver.title)
showing_text = self.driver.find_element_by_xpath("//*[@id='recommend-table_info']")
self.assertIn('Showing 1', showing_text.get_attribute('textContent'))
# Click the PkgPkr Score button
score_ele = self.driver.find_element_by_xpath("//*[@id='recommend-table']/tbody/tr[1]/td[2]/img")
score_ele.click()
# Make sure the modal opens
close_ele = self.driver.find_element_by_xpath("//*[@id='modal-close']")
close_ele.click()
"""
___ _ _ ___
| _ \_ _| |_| |_ ___ _ _ | \ ___ _ __ ___
| _/ || | _| ' \/ _ \ ' \ | |) / -_) ' \/ _ \
|_| \_, |\__|_||_\___/_||_| |___/\___|_|_|_\___/
|__/
"""
# Go back to the home page
home_ele = self.driver.find_element_by_xpath(
"//*[@id='navbarBasicExample']/div[1]/a[1]")
home_ele.click()
# Check if the user is on the home page
self.assertEqual(f'http://localhost:{LoginTest.port}/', self.driver.current_url)
# Choose the Python from demo dropdown and click recommendation
select = self.driver.find_element_by_xpath('//*[@id="lang-select"]')
select.click()
python_option = self.driver.find_element_by_xpath('//*[@id="lang-select"]/option[text()="Python"]')
python_option.click()
text_area = self.driver.find_element_by_xpath('//*[@id="manual-input"]')
self.assertIn('Django', text_area.get_attribute('value'))
recommend_btn = self.driver.find_element_by_xpath('//*[@id="recommendation-button"]')
recommend_btn.click()
# # Wait until the loading animation is disappeared
loading_state = self.driver.find_element_by_xpath("//*[@class='pageloader']")
WebDriverWait(self.driver, 30).until(EC.invisibility_of_element(loading_state))
# Assure it is showing Python on recommendation page and have results
title = self.driver.find_element_by_xpath('/html/body/section[2]//h2')
self.assertIn('Python Package', title.get_attribute('textContent'))
#showing_text = self.driver.find_element_by_xpath("//*[@id='recommend-table_info']")
#self.assertIn('Showing 1', showing_text.get_attribute('textContent'))
# Logout button
logout_ele = self.driver.find_element_by_xpath(
"//*[@id='navbarBasicExample']/div[2]/div/div")
logout_ele.click()
# Check if the user redirected back to the main page
self.assertEqual("Package Picker", self.driver.title)
| StarcoderdataPython |
5183632 | import errno
import os
import sys
from zmq.eventloop import ioloop
class RedirectorHandler(object):
def __init__(self, redirector, name, process, pipe):
self.redirector = redirector
self.name = name
self.process = process
self.pipe = pipe
def __call__(self, fd, events):
if not (events & ioloop.IOLoop.READ):
if events == ioloop.IOLoop.ERROR:
self.redirector.remove_redirection(self.pipe)
return
try:
data = os.read(fd, self.redirector.buffer)
if len(data) == 0:
self.redirector.remove_redirection(self.pipe)
else:
datamap = {'data': data, 'pid': self.process.pid,
'name': self.name}
datamap.update(self.redirector.extra_info)
self.redirector.redirect(datamap)
except IOError as ex:
if ex.args[0] != errno.EAGAIN:
raise
try:
sys.exc_clear()
except Exception:
pass
class Redirector(object):
def __init__(self, redirect, extra_info=None,
buffer=4096, loop=None):
self.running = False
self.pipes = {}
self._active = {}
self.redirect = redirect
self.extra_info = extra_info
self.buffer = buffer
if extra_info is None:
extra_info = {}
self.extra_info = extra_info
self.loop = loop or ioloop.IOLoop.instance()
def _start_one(self, name, process, pipe):
fd = pipe.fileno()
if fd not in self._active:
handler = RedirectorHandler(self, name, process, pipe)
self.loop.add_handler(fd, handler, ioloop.IOLoop.READ)
self._active[fd] = handler
def start(self):
for name, process, pipe in self.pipes.values():
self._start_one(name, process, pipe)
self.running = True
def _stop_one(self, fd):
if fd in self._active:
self.loop.remove_handler(fd)
del self._active[fd]
def stop(self):
for fd in list(self._active.keys()):
self._stop_one(fd)
self.running = False
def add_redirection(self, name, process, pipe):
fd = pipe.fileno()
self._stop_one(fd)
self.pipes[fd] = name, process, pipe
if self.running:
self._start_one(name, process, pipe)
def remove_redirection(self, pipe):
try:
fd = pipe.fileno()
except ValueError:
return
self._stop_one(fd)
if fd in self.pipes:
del self.pipes[fd]
| StarcoderdataPython |
8122226 | <filename>test/integration/test_env_vars.py
import os.path
import re
from six import assertRegex
from . import *
class TestEnvVars(IntegrationTest):
def __init__(self, *args, **kwargs):
IntegrationTest.__init__(self, 'env_vars', *args, **kwargs)
@skip_if_backend('msbuild')
def test_test(self):
self.build('test')
def test_command(self):
assertRegex(self, self.build('script'),
re.compile('^\s*hello script$', re.MULTILINE))
self.assertExists(output_file('file'))
| StarcoderdataPython |
5029649 | <gh_stars>10-100
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('aldryn_people', '0012_auto_20150728_1114'),
]
operations = [
migrations.AddField(
model_name='peopleplugin',
name='show_ungrouped',
field=models.BooleanField(default=False, help_text='when using "group by group", show ungrouped people too.', verbose_name='show ungrouped'),
preserve_default=True,
),
]
| StarcoderdataPython |
1957649 | <reponame>radiome-flow/radiome
# -*- coding: utf-8 -*-
"""Unit test package for radiome."""
| StarcoderdataPython |
3325337 | <filename>t_9_classes/t_9_8_iterators/main.py<gh_stars>0
for element in [1, 2, 3]:
print(element)
for element in (1, 2, 3):
print(element)
for key in {'one': 1, 'two': 2}:
print(key)
for char in '123':
print(char)
for line in open("myfile.txt"):
print(line, end='')
fruits = {'1': 'apple', '2': 'lemon', '3': 'pine'}
for key in fruits:
print(key, fruits.get(key))
print("Confirmation of Iterator object.")
s = 'abc'
it = iter(s)
print(it)
print(next(it))
print(next(it))
print(next(it))
# print(next(it)) This will be StopIteration.
class Reverse:
"""Iterator for looping over a sequence back words"""
def __init__(self, data):
self.data = data
self.index = len(data)
def __iter__(self):
return self
def __next__(self):
if self.index == 0:
raise StopIteration
self.index = self.index - 1
return self.data[self.index]
rev = Reverse("spam")
iter(rev)
for char in rev:
print(char)
| StarcoderdataPython |
6543985 | <reponame>nkowdley/CarND-Behavioral-Cloning-P3<gh_stars>0
#!/usr/bin/env python
"""
A python script used for Term 1, Project 3 Behavioral Cloning
This script does data ingestion and training
"""
import csv
import cv2
import matplotlib.image as mpimg
import numpy as np
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split
from keras.models import Model, Sequential
from keras.layers import Flatten, Dense, Lambda, Convolution2D, MaxPooling2D, Dropout, SpatialDropout2D
from keras.layers.convolutional import Cropping2D
from keras.optimizers import Adam
import matplotlib.pyplot as plt
from pprint import pprint
# Globals and Hyperparameters for Training/Testing
EPOCHS = 5
CORRECTION_FACTOR = .05
BATCH_SIZE = 128
STRAIGHT_KEEP_PROB = .8
STRAIGHT_THRESHOLD = .1
LEARNING_RATE = 0.001
def get_filename(path):
"""
a helper function to get the filename of an image.
This function splits a path provided by the argument by '/'
and returns the last element of the result
"""
return path.split('/')[-1]
def left_steering(measurement):
"""
a helper function to make sure we do not over correct
"""
measurement = (measurement + CORRECTION_FACTOR)
if measurement > 1:
measurement = 1
if measurement < -1:
measurement = -1
return measurement
def right_steering(measurement):
"""
a helper function to make sure we do not over correct
"""
measurement = (measurement - CORRECTION_FACTOR)
if measurement < -1:
measurement = -1
if measurement > 1:
measurement = 1
return measurement
def remove_straights(samples, drop_prob = STRAIGHT_KEEP_PROB, threshold = STRAIGHT_THRESHOLD):
"""
a helper function to remove a percentage(keep_prob) of the samples that have a steering near 0
This is to prevent overfitting of straights
"""
i = 1
while i < len(samples):
measurement = samples[i][3]
if abs(float(measurement)) < threshold:
if np.random.rand() < drop_prob:
del samples[i]
i -= 1
i += 1
return samples
def generator(samples, batch_size = BATCH_SIZE):
"""
a generator to help efficiently allocate memory
"""
num_samples = len(samples)
while 1: # Loop forever so the generator never terminates
shuffle(samples)
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset + batch_size]
augmented_images, augmented_measurements = [],[]
for batch_sample in batch_samples:
# Load the images
path = './data/IMG/' # The current path of where the data is located
#path = './edata/IMG/'
center_image = mpimg.imread(path + get_filename(line[0]))
left_image = mpimg.imread(path + get_filename(line[1]))
right_image = mpimg.imread(path + get_filename(line[2]))
# Load the measurements associated with these images
measurement = float(line[3])
left_measurement = left_steering(measurement)
right_measurement = right_steering(measurement)
# capture the images for the center, left and right cameras
augmented_images.extend([center_image, left_image, right_image])
augmented_measurements.extend([measurement, left_measurement, right_measurement])
# and the flipped image, so we get twice the data for free
augmented_images.extend([np.fliplr(center_image), np.fliplr(left_image), np.fliplr(right_image)])
# note that flipped images turn the opposite direction, so recalculate measurements
measurement = measurement * -1.0
left_measurement = right_steering(measurement)
right_measurement = left_steering(measurement)
augmented_measurements.extend([measurement, left_measurement, right_measurement] )
# Put the data into numpy arrays so that keras can use it
X_train = np.array(augmented_images)
y_train = np.array(augmented_measurements)
yield shuffle( X_train, y_train )
lines = []
with open('./data/driving_log.csv') as csvfile:
#with open('./edata/driving_log.csv') as csvfile:
reader = csv.reader(csvfile)
for line in reader:
lines.append(line)
# Strip out some of the data
remove_straights(lines)
train_samples, validation_samples = train_test_split(lines, test_size = .20)
train_generator = generator(train_samples, batch_size = BATCH_SIZE)
validation_generator = generator(validation_samples, batch_size = BATCH_SIZE)
#Instantiate the model
model = Sequential()
#Normalize the data
model.add(Lambda(lambda x: x / 255.0 - 0.5, input_shape=(160, 320, 3))) #normalize the data and give it a mean of 0
# Crop the data
model.add(Cropping2D(cropping=((50,25),(0,0))))
# Nvidia model taken from: https://devblogs.nvidia.com/deep-learning-self-driving-cars/
model.add(Convolution2D(24, 5, 5, subsample=(2,2), activation='relu'))
#model.add(SpatialDropout2D(.2))
model.add(Convolution2D(36, 5, 5, subsample=(2,2), activation='relu'))
#model.add(SpatialDropout2D(.2))
model.add(Convolution2D(48, 5, 5, subsample=(2,2), activation='relu'))
#model.add(SpatialDropout2D(.2))
model.add(Convolution2D(64, 3, 3, activation='relu'))
model.add(Convolution2D(64, 3, 3, activation='relu'))
model.add(Flatten())
model.add(Dense(1164, activation='relu'))
model.add(Dense(100, activation='relu'))
#model.add(Dropout(.5))
model.add(Dense(50, activation='relu'))
model.add(Dense(10, activation='relu'))
model.add(Dense(1, activation='relu'))
# compile the model
model.compile(loss='mae', optimizer=Adam(lr = LEARNING_RATE))
# train the model
#model.fit(X_train, y_train, validation_split=.2, shuffle=True, nb_epoch=EPOCHS)
print(len(train_samples))
print(len(lines))
model.fit_generator(train_generator,
samples_per_epoch= len(train_samples)*6,
validation_data=validation_generator,
nb_val_samples=len(validation_samples)*6,
nb_epoch=EPOCHS)
model.save('model.h5')
# print the keys contained in the history object
#print(history_object.history.keys())
# plot the training and validation loss for each epoch
#plt.plot(history_object.history['loss'])
#plt.plot(history_object.history['val_loss'])
#plt.title('model mean squared error loss')
#plt.ylabel('mean squared error loss')
#plt.xlabel('epoch')
#plt.legend(['training set', 'validation set'], loc='upper right')
#plt.show()
| StarcoderdataPython |
4965159 | <reponame>GYosifov88/Python-Fundamentals
import re
text = input()
matches = re.finditer(r"\b(_{1})(?P<variable>[A-Za-z\d]+)\b", text)
variables_list = list()
for match in matches:
variable = match.group('variable')
variables_list.append(variable)
print(','.join(variables_list))
| StarcoderdataPython |
3278207 | <gh_stars>0
import json
import os
import sys
import time
import requests
folder = "wallets" # .json
page_num_path = folder + "/page_num.txt"
_timeout = 20
time_gap = 5
count_quick_response = 0
count_success_time = 0
def thread(wallet_name):
global _timeout, count_quick_response, time_gap, count_success_time
url = "<to be fill>" + wallet_name + "<to be fill>"
print("Getting: " + wallet_name)
try_times = 5
while try_times > 0:
try_times -= 1
try:
print("Timeout:" + str(_timeout) + "\tTime Gap:" + str(time_gap))
r = requests.get(url, timeout=_timeout).text
except :
print(sys.exc_info())
time_gap += 2
time.sleep(time_gap)
continue
try:
r_js = json.loads(r)
break
except :
# "Too many requests"
print(sys.exc_info())
print(r)
time_gap += 2
time.sleep(time_gap)
continue
count_success_time += 1
if count_success_time > 5:
if time_gap > 2:
time_gap -= 1
count_success_time = 0
print("Get")
all_addr = r_js["addresses_count"]
with open(page_num_path, "a") as f:
f.write(json.dumps({wallet_name: all_addr}) + "\n")
time.sleep(time_gap)
def main():
service_to_urls = {}
for name in os.listdir(folder):
if name.endswith(".json"):
print(name)
with open(folder + "/" + name) as wallet_f:
service_to_urls.update(json.loads(wallet_f.read()))
print(len(service_to_urls))
# 读取已获取的
wallet_name_set = set()
if os.path.isfile(page_num_path):
with open(page_num_path) as f:
while True:
_l = f.readline()
if _l == "":
break
wallet_name_set.update(json.loads(_l).keys())
for service in service_to_urls:
for url in service_to_urls[service]:
if url[url.rindex("/") + 1:] in wallet_name_set:
continue
thread(url[url.rindex("/") + 1:])
if __name__ == '__main__':
main()
| StarcoderdataPython |
82481 | <filename>python/testData/intentions/PyInvertIfConditionIntentionTest/conditionAssignmentMultiple.py
def get_value():
return 1
<caret>if not (value := get_value()) or value <= 1:
print("Less or equal")
else:
print("Greater") | StarcoderdataPython |
9619191 | anos = int(input("Anos de Serviço: "))
valor_por_ano = float(input("Valor por ano: "))
bonus = anos * valor_por_ano
print("Bônus de R$ %5.2f" % bonus) | StarcoderdataPython |
5157806 | <filename>src/geocurrency/rates/permissions.py
"""
Permissions for Rate APIs
"""
from rest_framework import permissions
class RateObjectPermission(permissions.BasePermission):
"""
Permissions for /rates APIs
"""
def has_object_permission(self, request, view, obj):
"""
Limit modifications to authenticated users
"""
if request.method in permissions.SAFE_METHODS:
return True
if request.user and request.user.is_authenticated:
if request.method == 'POST':
return True
elif request.method.lower() in ['put', 'patch', 'delete'] and \
request.user == obj.user:
return True
return False
| StarcoderdataPython |
4801808 | from sklearn import ensemble
#0.75091
MODELS = {
"randomforest": ensemble.RandomForestClassifier(n_estimators=200, n_jobs=-1, verbose=2),
"extratrees": ensemble.ExtraTreesClassifier(n_estimators=200, n_jobs=-1, verbose=2),
} | StarcoderdataPython |
6702905 | from oscar.apps.basket import config
class BasketConfig(config.BasketConfig):
name = 'forked_apps.basket'
| StarcoderdataPython |
5143306 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from utopia import signals
from utopia.client import ProtocolClient
from utopia.plugins.handshake import HandshakePlugin
from utopia.plugins.protocol import ProtocolPlugin
from utopia.plugins.util import LogPlugin
from test.util import unique_identity
def test_unicode_privmsg():
"""
Ensure the client can send unicode and also receives unicode.
"""
TEST_STRING = u'± äöü @ o ↑↑↓↓←→←→BA コナミコマンド'
client1 = ProtocolClient(
unique_identity(), 'localhost', plugins=[
HandshakePlugin,
LogPlugin(),
ProtocolPlugin()
]
)
client2 = ProtocolClient(
unique_identity(), 'localhost', plugins=[
HandshakePlugin,
LogPlugin(),
ProtocolPlugin()
]
)
class Container(object):
pass
got_message = Container()
got_message.value = False
def on_376(client, prefix, target, args):
client.join_channel('#test')
def on_join(client, prefix, target, args):
client.privmsg('#test', TEST_STRING)
def on_privmsg(client, prefix, target, args):
got_message.value = (args[0] == TEST_STRING)
client1.terminate()
client2.terminate()
signals.m.on_376.connect(on_376, sender=client1)
signals.m.on_376.connect(on_376, sender=client2)
signals.m.on_JOIN.connect(on_join, sender=client1)
signals.m.on_PRIVMSG.connect(on_privmsg, sender=client2)
assert(client1.connect().get() is True)
assert(client2.connect().get() is True)
client1._io_workers.join(timeout=5)
client2._io_workers.join(timeout=5)
assert(got_message.value)
| StarcoderdataPython |
3288228 | <gh_stars>0
# -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft Corporation.
Licensed under the MIT License.
"""
import os
from setuptools import setup
# Load version in dapr package.
version_info = {}
with open('dapr/version.py') as fp:
exec(fp.read(), version_info)
__version__ = version_info['__version__']
def is_release():
return '.dev' not in __version__
name = 'dapr'
version = __version__
description = 'The official release of Dapr Python SDK.'
long_description = '''
Dapr is a portable, serverless, event-driven runtime that makes it easy for developers to
build resilient, stateless and stateful microservices that run on the cloud and edge and
embraces the diversity of languages and developer frameworks.
Dapr codifies the best practices for building microservice applications into open,
independent, building blocks that enable you to build portable applications with the language
and framework of your choice. Each building block is independent and you can use one, some,
or all of them in your application.
'''.lstrip()
# Get build number from GITHUB_RUN_NUMBER environment variable
build_number = os.environ.get('GITHUB_RUN_NUMBER', '0')
if not is_release():
name += '-dev'
version = f'{__version__}{build_number}'
description = 'The developmental release for Dapr Python SDK.'
long_description = 'This is the developmental release for Dapr Python SDK.'
print(f'package name: {name}, version: {version}', flush=True)
setup(
name=name,
version=version,
description=description,
long_description=long_description,
)
| StarcoderdataPython |
1839775 | import hashlib
import traceback
from flask import Flask, request, make_response
from StockAnalysisSystem.core.config import Config
from StockAnalysisSystem.wechatservice.route import dispatch_wechat_message
app = Flask(__name__)
WECHAT_TOKEN = "xxxxxxx"
def load_config():
global WECHAT_TOKEN
config = Config()
config.load_config()
WECHAT_TOKEN = config.get('wechat_token')
if WECHAT_TOKEN == '':
print('Warning: Wechat token is empty.')
@app.route('/weixin', methods=['GET', 'POST'])
def wechat_entry():
args = request.args
signature = args.get('signature')
timestamp = args.get('timestamp')
nonce = args.get('nonce')
echostr = args.get('echostr')
# 1. 将token、timestamp、nonce三个参数进行字典序排序
sign_arr = [WECHAT_TOKEN, timestamp, nonce]
sign_arr.sort()
sign_str = ''.join(sign_arr)
# 2. 将三个参数字符串拼接成一个字符串进行sha1加密
sign_dig = hashlib.sha1(sign_str).hexdigest()
# 3. 开发者获得加密后的字符串可与signature对比
if sign_dig == signature:
# 根据请求方式.返回不同的内容 ,如果是get方式,代表是验证服务器有效性
if request.method == 'GET':
return echostr
# 如果POST方式,代表是微服务器转发给我们的消息
elif request.method == 'POST':
return dispatch_wechat_message(request)
else:
return 'errno', 403
def main():
load_config()
app.run(host='0.0.0.0', port=8000, debug=True)
if __name__ == '__main__':
try:
app.run(host='0.0.0.0', port=8000, debug=True)
except Exception as e:
print('Error =>', e)
print('Error =>', traceback.format_exc())
exit()
finally:
pass
| StarcoderdataPython |
5118920 | <reponame>bram-rongen/contentful-management.py
from unittest import TestCase
from contentful_management.editor_interfaces_proxy import EditorInterfacesProxy
from .test_helper import CLIENT, PLAYGROUND_SPACE
class EditorInterfacesProxyTest(TestCase):
def test_editor_interfaces_proxy(self):
proxy = EditorInterfacesProxy(CLIENT, PLAYGROUND_SPACE, 'master', 'foo')
self.assertEqual(str(proxy), "<EditorInterfacesProxy space_id='{0}' environment_id='master' content_type_id='foo'>".format(PLAYGROUND_SPACE))
def test_editor_interfaces_proxy_not_supported_methods(self):
proxy = EditorInterfacesProxy(CLIENT, PLAYGROUND_SPACE)
with self.assertRaises(Exception):
proxy.create()
with self.assertRaises(Exception):
proxy.delete()
| StarcoderdataPython |
4849037 | <filename>src/loqet/file_utils.py
"""
Copyright (c) 2021, <NAME>
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
"""
import os
import shutil
import time
from typing import Union
from loqet.loqet_configs import SAFE_MODE
def backup_file(filename: str) -> None:
"""
Create a copy of a file with a .bak.<timestamp>
:param filename: File path to back up
:return: n/a
"""
if SAFE_MODE:
update_gitignore(filename)
backup_filename = f"{filename}.bak.{int(time.time())}"
shutil.copyfile(filename, backup_filename)
print(f"Backed up old {filename} to {backup_filename}")
def update_gitignore(filename: str) -> None:
"""
Updates .gitignore at either directory of filename or
at LOQET_GITIGNORE with .open/.bak extensions
:param filename: Target file to place .gitignore next to
:return: n/a
"""
gitignore_entries = [
"*.open*",
"*.bak.*"
]
if os.path.isdir(filename):
target_dir = os.path.realpath(filename)
else:
target_dir = os.path.dirname(os.path.realpath(filename))
default_gitignore_file = os.path.join(target_dir, ".gitignore")
gitignore_file = os.environ.get("LOQET_GITIGNORE", default_gitignore_file)
gitignore_file = (
gitignore_file
if gitignore_file.endswith(".gitignore")
else default_gitignore_file
)
with open(gitignore_file, "a+") as f:
f.seek(0)
contents = f.read()
for entry in gitignore_entries:
if entry not in contents:
if len(contents) > 0:
f.write("\n")
f.write(f"{entry}\n")
def read_file(filename: str) -> str:
"""Read contents of file as string"""
with open(filename, "r") as f:
contents = f.read()
return contents
def write_file(contents: Union[str, bytes], filename: str) -> None:
"""Write string or bytes to file"""
if isinstance(contents, bytes):
write_mode = "wb"
else:
write_mode = "w"
with open(filename, write_mode) as f:
f.write(contents)
| StarcoderdataPython |
326165 | from __future__ import unicode_literals
from smartmin.views import *
from sigtrac.carriers.models import Carrier
from sigtrac.reports.models import Report
from django.db.models import Avg
from django.utils import timezone
from django.shortcuts import get_object_or_404
from datetime import timedelta
import json
class IndexView(SmartTemplateView):
template_name = 'public/public_index.html'
def get_context_data(self, **kwargs):
carriers = Carrier.objects.all()
data = []
for carrier in carriers:
carrier_data = dict()
carrier_data['carrier'] = carrier.slug
carrier_data['color'] = "#" + carrier.color
end = timezone.now() + timedelta(hours=1)
end = end.replace(minute=0, second=0, microsecond=0)
start = end - timedelta(hours=120)
series = []
while start <= end:
reports = Report.objects.filter(carrier=carrier, download_speed__gte=0, created_on__range=[start, start+timedelta(hours=1)]).order_by('created_on').aggregate(download_speed=Avg('download_speed'))
if reports['download_speed'] is not None:
series.append([start.strftime('%Y-%m-%dT%H:%M:%S.%f-0200'), reports['download_speed']])
start += timedelta(hours=1)
carrier_data['series'] = series
data.append(carrier_data)
return dict(time_data=data)
class Series(View):
def get(self, request, *args, **kwargs):
"""
Public view, 1mo time series, hourly ave bandwidth per carrier
"""
time_series = {}
# d is the length of the x axis, interval is the distance between points
deltas = [{'name': 'hour', 'd': timedelta(hours=1), 'interval': timedelta(minutes=5)},
{'name': 'day', 'd': timedelta(days=1), 'interval': timedelta(hours=1)},
{'name': 'week', 'd': timedelta(days=7), 'interval': timedelta(hours=3)}]
for delta in deltas:
time_series[delta['name']] = {}
for carrier in Carrier.objects.all():
time_series[delta['name']][carrier.slug] = self.get_time_series(carrier, delta)
return HttpResponse(json.dumps(time_series), content_type="application/json")
def get_time_series(self, carrier, delta):
end = timezone.now() + timedelta(hours=1)
end = end.replace(minute=0, second=0, microsecond=0)
start = end - delta['d']
series = []
while start <= end:
reports = carrier.report_set.filter(carrier=carrier, download_speed__gte=0, created_on__range=[start, start+delta['interval']]).order_by('created_on').aggregate(download_speed=Avg('download_speed'))
if reports['download_speed'] is not None:
series.append([start.strftime('%Y-%m-%dT%H:%M:%S.%f-0200'), reports['download_speed']])
start += delta['interval']
return series
| StarcoderdataPython |
1844256 | <gh_stars>1-10
class Solution:
def merge(self, nums1, m, nums2, n):
"""
:type nums1: List[int]
:type m: int
:type nums2: List[int]
:type n: int
:rtype: void Do not return anything, modify nums1 in-place instead.
"""
# O(m)
while m > 0 and n > 0:
if nums1[m - 1] > nums2[n - 1]:
nums1[m + n - 1] = nums1[m - 1]
m -= 1
else:
nums1[m + n - 1] = nums2[n - 1]
n -= 1
if n > 0:
nums1[:n] = nums2[:n]
return nums1
def use_sort_solution(self, nums1, m, nums2, n):
# O(logn)
nums1[m:] = nums2
nums1.sort()
return nums1
if __name__ == "__main__":
print(Solution().use_sort_solution([4, 5, 6, 0, 0], 2, [1, 6, 7], 3))
print(Solution().merge([4, 5, 6, 0, 0], 2, [1, 6, 7], 3))
| StarcoderdataPython |
8029936 | import logging
from slack_sdk.webhook import WebhookClient
from gkentn.core.handler import State
class Notifier:
def __init__(self, slack_webhook_url: str, logger: logging.Logger) -> None:
self.logger = logger
self.slack_webhook_url = slack_webhook_url
def notify(self, state: State) -> None:
if state.termination_time is None:
raise ValueError("termination time is None")
self.logger.info("Notifying instance state to slack")
webhook = WebhookClient(self.slack_webhook_url)
webhook.send(
attachments=[
{
"color": "warning",
"title": ":warning: Node Termination",
"fields": [
{
"title": "InstanceName",
"value": state.instance_name,
"short": False,
},
{
"title": "MachineType",
"value": state.machine_type,
"short": False,
},
{
"title": "Zone",
"value": state.zone,
"short": True,
},
{
"title": "ProjectID",
"value": state.project_id,
"short": True,
},
{
"title": "Termination Time",
"value": state.termination_time.strftime("%Y-%m-%d %H:%M:%S"),
"short": True,
},
{
"title": "Is Preemptible Node",
"value": str(state.is_preemptible_node).lower(),
"short": True,
},
],
}
]
)
| StarcoderdataPython |
5073094 |
import os
import yaml
confdir = os.getenv('INTAKE_CONF_DIR',
os.path.join(os.path.expanduser('~'), '.intake'))
conffile = os.getenv('INTAKE_CONF_FILE', None)
defaults = {'auth': {'class': 'intake.auth.base.BaseAuth'},
'port': 5000}
conf = {}
def reset_conf():
"""Set conf values back to defaults"""
conf.clear()
conf.update(defaults)
def load_conf(fn=None):
"""Update global config from YAML file
If fn is None, looks in global config directory, which is either defined
by the INTAKE_CONF_DIR env-var or is ~/.intake/ .
"""
if fn is None:
fn = os.path.join(confdir, 'conf.yaml')
if os.path.isfile(fn):
with open(fn) as f:
conf.update(yaml.load(f))
reset_conf()
load_conf(conffile)
| StarcoderdataPython |
1902257 | <reponame>burdettadam/token-plugin
import json
import pytest
from base58 import b58decode
from sovtoken.constants import UTXO_CACHE_LABEL
from sovtokenfees.serializers import txn_root_serializer
from indy_node.test.request_handlers.helper import get_fake_ledger
from sovtoken import TOKEN_LEDGER_ID
from sovtoken.utxo_cache import UTXOCache
from indy.payment import create_payment_address
from indy.wallet import create_wallet, open_wallet, close_wallet, delete_wallet
from common.serializers import serialization
from plenum.common.constants import KeyValueStorageType, BLS_LABEL
from plenum.common.txn_util import append_txn_metadata
from plenum.common.util import randomString
from plenum.server.database_manager import DatabaseManager
from plenum.test.testing_utils import FakeSomething
from state.pruning_state import PruningState
from storage.helper import initKeyValueStorage
FAKE_UNCOMMITTED_ROOT_HASH = b58decode("1".encode())
FAKE_COMMITTED_ROOT_HASH = b58decode("1".encode())
@pytest.fixture(scope="module")
def bls_store(db_manager):
multi_sigs = FakeSomething()
multi_sigs.as_dict = lambda: {"a": "b"}
bls = FakeSomething()
bls.get = lambda _: multi_sigs
db_manager.register_new_store(BLS_LABEL, bls)
return bls
@pytest.fixture(scope="module")
def db_manager(tconf):
_db_manager = DatabaseManager()
storage = initKeyValueStorage(KeyValueStorageType.Memory,
None,
"tokenInMemoryStore",
txn_serializer=serialization.multi_sig_store_serializer)
ledger = get_fake_ledger()
def commit_txns(count):
ledger.committed_root_hash = ledger.uncommitted_root_hash
return None, [1]
ledger.commitTxns = commit_txns
ledger.root_hash = txn_root_serializer.serialize("1")
ledger.uncommitted_root_hash = "1"
ledger.uncommitted_size = 1
ledger.size = 0
ledger.discardTxns = lambda x: None
ledger.committed_root_hash = "-1"
ledger.append_txns_metadata = lambda txns, txn_time: [append_txn_metadata(txn, 2, txn_time, 2) for txn in txns]
ledger.appendTxns = lambda x: (None, x)
_db_manager.register_new_database(TOKEN_LEDGER_ID, ledger, PruningState(storage))
return _db_manager
@pytest.yield_fixture(scope="module")
def utxo_cache(db_manager):
cache = UTXOCache(initKeyValueStorage(
KeyValueStorageType.Memory, None, "utxoInMemoryStore"))
db_manager.register_new_store(UTXO_CACHE_LABEL, cache)
yield cache
if cache.un_committed:
cache.reject_batch()
@pytest.fixture(scope="module")
def payment_address(libsovtoken, looper, wallet):
payment_address_future = create_payment_address(wallet, "sov", "{}")
payment_address = looper.loop.run_until_complete(payment_address_future)
return payment_address
@pytest.fixture(scope="module")
def payment_address_2(libsovtoken, looper, wallet):
payment_address_future = create_payment_address(wallet, "sov", "{}")
payment_address = looper.loop.run_until_complete(payment_address_future)
return payment_address
@pytest.yield_fixture(scope="module")
def wallet(looper):
wallet_name = randomString()
create_wallet_future = create_wallet(json.dumps({"id": wallet_name}), json.dumps({"key": "1"}))
looper.loop.run_until_complete(create_wallet_future)
open_wallet_future = open_wallet(json.dumps({"id": wallet_name}), json.dumps({"key": "1"}))
wallet_handle = looper.loop.run_until_complete(open_wallet_future)
yield wallet_handle
close_wallet_future = close_wallet(wallet_handle)
looper.loop.run_until_complete(close_wallet_future)
delete_wallet_future = delete_wallet(json.dumps({"id": wallet_name}), json.dumps({"key": "1"}))
looper.loop.run_until_complete(delete_wallet_future)
| StarcoderdataPython |
1687092 | """[15 - Classe Bichinho Virtual++: Melhore o programa do bichinho virtual, permitindo que o usuário especifique quanto de comida ele fornece ao bichinho e por quanto tempo ele brinca com o bichinho. Faça com que estes valores afetem quão rapidamente os níveis de fome e tédio caem.]
"""
class bichinho():
"""[Classe bixinho, essa classe possui 6 metodos e um __init__]
"""
def __init__(self, nome, fome = 10, saude = 100, idade = 1):
"""[Inicia a classe]
Args:
nome ([str]): [Nome do bichinho]
fome (int, optional): [Porcentagem da fome do bichinho]. Defaults to 10.
saude (int, optional): [Porcentagem da saude do bichinho]. Defaults to 100.
idade (int, optional): [Idade do bichinho]. Defaults to 1.
"""
self.nome = nome
self.fome = fome
self.saude = saude
self.idade = idade
def status(self):
"""[Descreve o status do bichinho]
Returns:
[str]: [Retorna o estado do bichinho]
"""
print (f"O nome do bichinho é {self.nome}\nA fome de {self.nome} está em {self.fome}%\nA saúde de {self.nome} está em {self.saude}%\nA idade de {self.nome} é {self.idade} anos")
bichinho.novo_humor()
return (f"O nome do bichinho é {self.nome}\nA fome de {self.nome} está em {self.fome}%\nA saúde de {self.nome} está em {self.saude}%\nA idade de {self.nome} é {self.idade} anos")
def alterar_nome(self):
"""[função para alterar o nome do bichinho]
Returns:
[str]: [Retorna o novo nome do bichinho]
"""
self.nome = str(input("Qual o novo nome do bichinho? "))
print (f"O novo nome do bichinho é {self.nome}")
bichinho.novo_humor()
return (f"O novo nome do bichinho é {self.nome}")
def alterar_fome(self):
"""[função para alterar a fome do bichinho]
Returns:
[str]: [Retorna a porcentagem da fome do bichinho]
"""
self.fome = abs(int(input(f"Qual a nova porcentagem de fome de {self.nome}? ")))
print (f"A fome de {self.nome} está em {self.fome}%")
bichinho.novo_humor()
return (f"A fome de {self.nome} está em {self.fome}%")
def alterar_saude(self):
"""[função para alterar a saúde do bichinho]
Returns:
[str]: [Retorna a porcentagem da saúde do bichinho]
"""
self.saude = abs(int(input(f"Qual a nova porcentagem de saude de {self.nome}? ")))
print (f"A saúde de {self.nome} está em {self.saude}%")
bichinho.novo_humor()
return (f"A saúde de {self.nome} está em {self.saude}%")
def alterar_idade(self):
"""[Função para alterar a idade do bichinho]
Returns:
[str]: [Idade do bichinho]
"""
self.idade = abs(int(input(f"Qual a nova idade de {self.nome}? ")))
print (f"A idade de {self.nome} é {self.idade} anos")
bichinho.novo_humor()
return (f"A idade de {self.nome} é {self.idade} anos")
def novo_humor(self):
"""
[Serve para calcular o humor do bichinho, baseado na sua fome e saúde]
"""
if self.fome > 75 or self.saude < 25 or self.brincar < 2:
self.humor = print(f'{self.nome} está Irritado')
elif self.fome > 50 or self.saude < 50 or self.brincar < 5:
self.humor = print(f'{self.nome} está Triste')
elif self.fome > 25 or self.saude < 75 or self.brincar < 10:
self.humor = print(f'{self.nome} está Feliz')
else:
self.humor = print(f'{self.nome} está Muito feliz')
def brincar(self):
"""[Serve para informar quanto tempo você deseja brincar com o bichinho]
"""
tempo = abs(float(input(f"Por quanto minutos deseja brincar com {self.nome}? ")))
self.brincar = tempo
return self.brincar
bichinho = bichinho("Midas")
bichinho.brincar()
bichinho.status() | StarcoderdataPython |
3404037 | from flask import Flask, render_template
import random
import yaml
from sqlalchemy import create_engine
app = Flask(__name__)
#path_steam_user_id = './data/steam_user_id.txt'
config = yaml.safe_load(open('./src/config.yaml'))
db_username = config['mysql']['username']
db_password = config['mysql']['password']
db_endpoint = config['mysql']['endpoint']
db_database = config['mysql']['database']
engine = create_engine('mysql+pymysql://{}:{}@{}/{}?charset=utf8mb4'.format(db_username, db_password, db_endpoint, db_database))
lst_user_id = [i[0] for i in engine.execute('select user_id from game_steam_user').fetchall()]
lst_popular_games = engine.execute('''
SELECT
game_steam_app.app_id,
game_steam_app.name,
game_steam_app.initial_price,
game_steam_app.header_image
FROM game_steam_app
JOIN recommended_games_popularity_based
ON game_steam_app.app_id = recommended_games_popularity_based.app_id
AND game_steam_app.type = "game"
AND game_steam_app.release_date <= CURDATE()
AND game_steam_app.initial_price IS NOT NULL
ORDER BY recommended_games_popularity_based.peak_today DESC
LIMIT 5''').fetchall()
@app.route('/')
def recommender():
user_id = random.choice(lst_user_id)
# user_id = 76561197960323774 # no purchase info
lst_most_played_games = engine.execute('''
SELECT
game_steam_app.app_id,
game_steam_app.name,
game_steam_app.initial_price,
game_steam_app.header_image
FROM game_steam_app
JOIN game_steam_user
ON game_steam_app.app_id = game_steam_user.app_id
WHERE game_steam_user.user_id = {}
AND game_steam_user.playtime_forever > 0
AND game_steam_app.type = "game"
AND game_steam_app.release_date <= CURDATE()
AND game_steam_app.initial_price IS NOT NULL
ORDER BY game_steam_user.playtime_forever DESC
LIMIT 3'''.format(user_id)).fetchall()
if lst_most_played_games:
favorite_app_id = lst_most_played_games[0][0]
# get content based recommendation
lst_content_recommended = engine.execute('''
SELECT app_id, name, initial_price, header_image
FROM game_steam_app
WHERE type = "game"
AND release_date <= CURDATE()
AND initial_price IS NOT NULL
AND app_id IN ({})'''.format(','.join(
[str(i) for i in engine.execute('SELECT `0`,`1`,`2` FROM recommended_games_content_based WHERE app_id = {}'.format(favorite_app_id)).first()]
)
)
).fetchall()
# get item based recommendation
lst_item_recommended = engine.execute('''
SELECT app_id, name, initial_price, header_image
FROM game_steam_app
WHERE type = "game"
AND release_date <= CURDATE()
AND initial_price IS NOT NULL
AND app_id IN ({})'''.format(','.join(
[str(i) for i in engine.execute('SELECT `0`,`1`,`2` FROM recommended_games_item_based WHERE app_id = {}'.format(favorite_app_id)).first()]
)
)
).fetchall()
# get ALS based recommendation
lst_als_recommended = engine.execute('''
SELECT app_id, name, initial_price, header_image
FROM game_steam_app
WHERE type = "game"
AND release_date <= CURDATE()
AND initial_price IS NOT NULL
AND app_id IN ({})'''.format(','.join(
[str(i) for i in engine.execute('SELECT `0`,`1`,`2` FROM recommended_games_als_based WHERE user_id = {}'.format(user_id)).first()]
)
)
).fetchall()
else:
lst_content_recommended = []
lst_item_recommended = []
lst_als_recommended = []
return render_template('recommendation.html',
user_id = user_id,
lst_most_played_games = lst_most_played_games,
lst_content_recommended = lst_content_recommended,
lst_item_recommended = lst_item_recommended,
lst_als_recommended = lst_als_recommended,
lst_popular_games = lst_popular_games)
if __name__ == '__main__':
app.run(debug=True)
| StarcoderdataPython |
5111086 | <gh_stars>1-10
import sys
from os.path import dirname, abspath
from src.fleets.electric_vehicles_fleet.electric_vehicles_fleet import ElectricVehiclesFleet
#from src.services.reg_service.test import fleet_name
sys.path.insert(0, dirname(dirname(dirname(abspath(__file__)))))
from dateutil import parser
from datetime import datetime, timedelta
from fleet_config import FleetConfig
from fleets.battery_inverter_fleet.battery_inverter_fleet import BatteryInverterFleet
from services.peak_managment_service.peak_management_service import PeakManagementService
from grid_info import GridInfo
if __name__ == '__main__':
# Time stamp to start the simulation
#ts = datetime(2018, 9, 20, 5, 0, 00, 000000)
# Parameters of the grid
grid = GridInfo('Grid_Info_DATA_2.csv')
fleet_sim_step = timedelta(minutes=60)
# Instantiate a peak management service
pms = PeakManagementService(sim_step=fleet_sim_step)
# Get start time for the simulation from drive cycle file
start_time = pms.drive_cycle["dt"][0]
# Instantiate a fleet for this test
fleet = ElectricVehiclesFleet(grid, start_time)
fleet.is_autonomous = False
fleet.is_P_priority = True
pms.fleet = fleet
# Set up the fleet (if necessary?)
# fleet_config = FleetConfig(is_P_priority=True, is_autonomous=False, autonomous_threshold=None)
fleet_config = FleetConfig(is_P_priority=True, is_autonomous=False, FW_Param=[], v_thresholds=[])
# Instantiate a peak management service, connected to the previous fleet
#pms = PeakManagementService(fleet=fleet)
# Do it
pms.request_loop(start_time,fleet_name='ElectricVehicle')
| StarcoderdataPython |
6604394 | from functools import partial
from glob import glob
from multiprocessing import Pool
from pprint import pprint
from typing import Dict
import os
import pickle as pkl
from Bio import SeqIO
import tensorflow as tf
from .remote_homology_serializer import serialize_remote_homology_sequence
from .vocabs import PFAM_VOCAB
ss_tuple_to_int = {
(1, 0, 0): 0,
(0, 1, 0): 1,
(0, 0, 1): 2
}
sa_tuple_to_int = {
(1, 0): 0,
(0, 1): 1
}
def get_scop_labels_from_string(scop_label):
"""
In [23]: label
Out[23]: 'a.1.1.1'
In [24]: get_scop_labels_from_string(label)
Out[24]: ('a', 'a.1', 'a.1.1', 'a.1.1.1')
"""
class_, fold, superfam, fam = scop_label.split('.')
fold = '.'.join([class_, fold])
superfam = '.'.join([fold, superfam])
fam = '.'.join([superfam, fam])
return class_, fold, superfam, fam
def get_pssm(data_dir):
all_pssm_files = glob(os.path.join(data_dir, '*.pssm_fea'))
pssms = {}
with Pool() as p:
pssm_for_scop_ids = p.map(get_pssm_for_file, all_pssm_files)
for scop_id, pssm in pssm_for_scop_ids:
pssms[scop_id] = pssm
return pssms
def get_pssm_for_file(filename):
scop_id = filename.split('/')[-1].split('.')[0]
pssm_for_scop_id = []
with open(filename, 'r') as f:
lines = f.read().split()
# 20 scores for each mutation at a given position
position_mutations = []
for i, line in enumerate(lines[2:]):
if i % 20 == 0 and i != 0:
pssm_for_scop_id.append(position_mutations)
position_mutations = []
mutation_score = int(line.split(':')[1])
position_mutations.append(mutation_score)
pssm_for_scop_id.append(position_mutations)
return scop_id, pssm_for_scop_id
def get_sequence_and_secondary_structure_and_solvent_accessibility(data_dir):
all_feature_files = glob(os.path.join(data_dir, '*.fea_aa_ss_sa'))
sequence = {}
secondary_structure = {}
solvent_accessibility = {}
with Pool() as p:
extracted_data = p.map(get_seq_ss_and_sa_for_file, all_feature_files)
for scop_id, seq, ss, sa in extracted_data:
sequence[scop_id] = seq
secondary_structure[scop_id] = ss
solvent_accessibility[scop_id] = sa
return sequence, secondary_structure, solvent_accessibility
def get_seq_ss_and_sa_for_file(filename):
scop_id = filename.split('/')[-1].split('.')[0]
sequence = []
secondary_structure = []
solvent_accessibility = []
with open(filename, 'r') as f:
lines = f.read().split()
# clip the beginning
lines = lines[2:]
# split off indexes
bits = [int(l.split(':')[1]) for l in lines]
# 20 amino acids 1 hot
# Followed by 3 secondary structure labels
# Followed by 2 solvent accessibility labels
aa_label = []
ss_label = []
sa_label = []
for i, bit in enumerate(bits, start=0):
if i % 25 == 0 and i != 0:
# sequence.append(aa_tuple_to_int[tuple(aa_label)])
assert len(aa_label) <= 20
assert len(ss_label) <= 3
assert len(sa_label) <= 2
sequence.append(tuple(aa_label))
secondary_structure.append(ss_tuple_to_int[tuple(ss_label)])
solvent_accessibility.append(sa_tuple_to_int[tuple(sa_label)])
aa_label = []
ss_label = []
sa_label = []
if i % 25 < 20:
aa_label.append(bit)
elif i % 25 >= 20 and i % 25 < 23:
ss_label.append(bit)
else:
sa_label.append(bit)
sequence.append(tuple(aa_label))
secondary_structure.append(ss_tuple_to_int[tuple(ss_label)])
solvent_accessibility.append(sa_tuple_to_int[tuple(sa_label)])
return scop_id, sequence, secondary_structure, solvent_accessibility
def get_scope_id_to_label(filename):
scop_id_to_label = {}
with open(filename, 'r') as f:
for l in f:
scop_id = l.split()[0]
label = l.split()[2]
scop_id_to_label[scop_id] = label
return scop_id_to_label
def convert_deepsf_data_to_tfrecords(filenames: str,
outfilenames: str,
feature_dir: str,
pssm_dir: str,
fasta: str,
vocab: Dict[str, int]):
serialize_with_vocab = partial(serialize_remote_homology_sequence, vocab=vocab)
# need to construct dictionaries with label string:int maps
class_to_int_label = {}
fold_to_int_label = {}
superfamily_to_int_label = {}
family_to_int_label = {}
seq_dict, ss_dict, sa_dict = get_sequence_and_secondary_structure_and_solvent_accessibility(feature_dir)
pssm_dict = get_pssm(pssm_dir)
# I don't trust the sequences in the deepsf feature files, so we will look them up in the fasta
scop_id_to_scop_string = {record.name: record.description.split()[1] for record in SeqIO.parse(fasta, 'fasta')}
scop_id_to_seq = {record.name: str(record.seq).upper() for record in SeqIO.parse(fasta, 'fasta')}
skipped_ids = []
incongruent_ids = []
for filename, outfilename in zip(filenames, outfilenames):
to_tfrecord = []
scop_id_to_label = get_scope_id_to_label(filename)
for scop_id, _ in scop_id_to_label.items():
use_alt = False
alt = scop_id.replace('_', '.')
if scop_id not in scop_id_to_seq:
if alt not in scop_id_to_seq:
skipped_ids.append(scop_id)
continue
else:
use_alt = True
if use_alt:
seq = scop_id_to_seq[alt]
scop_string = scop_id_to_scop_string[alt]
else:
seq = scop_id_to_seq[scop_id]
scop_string = scop_id_to_scop_string[scop_id]
seq_id = scop_id
class_, fold, superfam, fam = get_scop_labels_from_string(scop_string)
class_label = class_to_int_label.setdefault(class_, len(class_to_int_label))
fold_label = fold_to_int_label.setdefault(fold, len(fold_to_int_label))
superfamily_label = superfamily_to_int_label.setdefault(superfam, len(superfamily_to_int_label))
family_label = family_to_int_label.setdefault(fam, len(family_to_int_label))
pssm = pssm_dict[seq_id]
ss = ss_dict[seq_id]
sa = sa_dict[seq_id]
if not all([len(pssm) == len(ss), len(ss) == len(sa), len(sa) == len(seq)]):
print('id {} pssm {} ss {} sa {} parsed aa {} fasta {}'.format(seq_id,
len(pssm),
len(ss),
len(sa),
len(seq_dict[seq_id]),
len(seq)))
incongruent_ids.append(scop_id)
continue
# assert(len(pssm) == len(ss) == len(sa) == len(seq))
to_tfrecord.append([seq, seq_id, class_label, fold_label, superfamily_label, family_label, pssm, ss, sa])
print('Serializing {} examples...'.format(len(to_tfrecord)))
with Pool() as p:
serialized_examples = p.starmap(serialize_with_vocab, to_tfrecord)
with tf.python_io.TFRecordWriter(outfilename) as writer:
print('Creating TFrecords...')
for serialized_example in serialized_examples:
writer.write(serialized_example)
print('Incongruent {}'.format(len(incongruent_ids)))
print('Skipped {}'.format(len(skipped_ids)))
pprint(incongruent_ids)
pprint(skipped_ids)
print('Num classes {}'.format(len(class_to_int_label)))
pprint(class_to_int_label)
with open('class_to_int_label.pkl', 'wb') as f:
pkl.dump(class_to_int_label, f)
print('Num folds {}'.format(len(fold_to_int_label)))
pprint(fold_to_int_label)
with open('fold_to_int_label.pkl', 'wb') as f:
pkl.dump(fold_to_int_label, f)
print('Num superfams {}'.format(len(superfamily_to_int_label)))
pprint(superfamily_to_int_label)
with open('superfamily_to_int_label.pkl', 'wb') as f:
pkl.dump(superfamily_to_int_label, f)
print('Num fams {}'.format(len(family_to_int_label)))
pprint(family_to_int_label)
with open('family_to_int_label.pkl', 'wb') as f:
pkl.dump(family_to_int_label, f)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='convert protein sequences to tfrecords')
parser.add_argument('--listdir', required=True, help='DeepSF train split list directory: each list contains scop ids')
parser.add_argument('--outprefix', required=True, help='prefix for output files')
parser.add_argument('--featuredir', required=True)
parser.add_argument('--pssmdir', required=True)
parser.add_argument('--fasta', required=True)
args = parser.parse_args()
vocab = PFAM_VOCAB
filenames = glob(os.path.join(args.listdir, '*.list*'))
infilenames = ['test_dataset.list_family', 'test_dataset.list_superfamily', 'test_dataset.list_fold', 'validation.list', 'Traindata.list']
infilenames = [os.path.join(args.listdir, name) for name in infilenames]
outfilenames = ['remote_homology_test_family_holdout.tfrecords',
'remote_homology_test_superfamily_holdout.tfrecords',
'remote_homology_test_fold_holdout.tfrecords',
'remote_homology_valid.tfrecords',
'remote_homology_train.tfrecords']
outfilenames = [os.path.join(args.outprefix, name) for name in outfilenames]
convert_deepsf_data_to_tfrecords(infilenames,
outfilenames,
args.featuredir,
args.pssmdir,
args.fasta,
vocab)
# all deepsf data can be downloaded from
# http://iris.rnet.missouri.edu/DeepSF/download.html
# fasta downloaded from
# http://iris.rnet.missouri.edu/DeepSF/download/PDB_SCOP95_seq_scop1.75.txt
# DONT USE!!
# astral scop was downloaded from
# https://scop.berkeley.edu/downloads/scopseq-1.75/astral-scopdom-seqres-all-1.75.fa
# python -m tape.data_utils.convert_deepsf_to_remote_homology \
# --listdir ~/git/DeepSF/datasets/D2_Three_levels_dataset/ \
# --outprefix data/remote_homology_tfrecord/ \
# --featuredir ~/git/DeepSF/datasets/features/Feature_aa_ss_sa/ \
# --pssmdir ~/git/DeepSF/datasets/features/PSSM_Fea/ \
# --fasta ~/git/DeepSF/PDB_SCOP95_seq_scop1.75.txt > out.txt
| StarcoderdataPython |
1601458 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@Title : 模块
@File : __init__.py
@Author : vincent
@Time : 2020/8/28 4:53 下午
@Version : 1.0
'''
import json
import logging
import os
from flask import Blueprint, render_template, request, make_response, redirect
base_path = os.getcwd()
template_path = os.path.join(base_path, "app/main/web/templates")
static_path = os.path.join(base_path, "app/main/web/static")
# 在这里注册不同模块
idcard_app = Blueprint('idcard', __name__, url_prefix="/idcard")
case_app = Blueprint('case', __name__, url_prefix="/case")
# 通用的也做成蓝图
app = Blueprint('app', __name__,
url_prefix="",
template_folder=template_path,
static_folder=static_path,
static_url_path="/main/static")
logger = logging.getLogger(__name__)
userdata = None
@app.route("/sandbox")
def index():
"""
首页
@return:
"""
version = "1.0"
return render_template('index.html', version=version)
@app.route('/', methods=["GET"])
@app.route('/demo', methods=["GET"])
def demo_index():
"""
demo展示页面
@return:
"""
return render_template("index_demo.html")
@app.route('/<regex(".*.html"):url>')
def html_request(url):
"""
url: html url
"""
print("请求页面:", url)
logger.info("请求页面:%r", url)
return render_template(url)
# return user_auth(url)
@app.route('/login', methods=['POST'])
def login():
print(request.form.get('username', None, str))
logger.info("keys:>%r", request.form.fromkeys('password', str))
username = request.form.get('username', None, str)
password = request.form.get('password', None, str)
language = request.form.get('language', None, str)
if isAuthenticated(username, password):
responseJson = {
"ok": './',
}
response = make_response(json.dumps(responseJson))
response.set_cookie('username', username, 7200) # 超时时间1小时?
response.set_cookie('password', password, <PASSWORD>)
response.set_cookie('language', language, 7200)
return response
else:
responseJson = {
"ok": '/browser/error',
}
return make_response(json.dumps(responseJson))
def user_auth(url):
"""
用户登录验证
"""
cookie = request.cookies
if isAuthenticated(cookie.get('username'), cookie.get('password')):
return render_template(url)
else:
return redirect("login")
def isAuthenticated(username, password):
# TODO 应该写session 不是cookie
global userdata
if userdata == None:
with open("app/browser/cfg/userdata.conf") as config:
userdata = eval(config.read())
try:
if userdata[username] == password:
return True
except Exception:
return False
def get_ip_address():
if request.headers.get("X-Real-Ip"):
return request.headers.get("X-Real-Ip")
else:
return request.remote_addr
# TODO 必须在启动程序中引用到才会注册,不然不会。 不会全部扫描,只扫描用到的程序。import * 不太行
from app.main.app.views import idcard_controller
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.