code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
'''
실행 방법 : 아나콘다 프롬프트에서 main.py가 있는 폴더 경로에 아래 명령어 입력
python glowpick.py
데이터 목록
1. Category
2. Brand_Name
3. Product_Name
4. volume
5. price
6. Sales Rank
7. rate
8. nb_reviews
7. product_number
'''
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
import pandas as pd
import os
import time
class GlowPick:
def __init__(self):
# url
self.url = 'https://www.glowpick.com/'
# self.driver setting
self.driver = webdriver.Chrome()
self.li_lst = None
def run(self):
# make total dataframe
total_df = pd.DataFrame()
# if there's file, load file and concatenate
if os.path.isfile('../dataset/glowpick_products.csv'):
df = pd.read_csv('../dataset/glowpick_products.csv')
total_df = pd.concat([total_df, df], axis=0)
print('total_df.shape: ',total_df.shape)
titla_nb = 17 # 크게 17가지 범주
for cat in range(1,titla_nb+1):
# glowpick main page
self.get_main(cat=cat)
# get title
title = self.driver.find_element_by_xpath(f'//*[@id="gp-home"]/section[1]/div[1]/form/fieldset[1]/div/div/ul/li[{cat}]/div[1]/span[1]').text
print('title: ',title)
print('len(self.li_lst): ',len(self.li_lst))
for i in range(len(self.li_lst)):
self.get_main(cat=cat)
self.driver.implicitly_wait(5)
category = self.li_lst[i].text
print()
print(f'{i} category: ',category)
# if category in total df, continue
if total_df.shape[0] > 0:
title_df = total_df[total_df['title'] == title]
if ('category' in title_df.columns) and (self.li_lst[i].text in title_df.category.unique()):
continue
# select category
self.driver.execute_script("arguments[0].click();", self.li_lst[i])
self.driver.implicitly_wait(5)
# search click
element = self.driver.find_element_by_xpath('//*[@id="gp-home"]/section[1]/div[1]/form/div/button').send_keys(Keys.ENTER)
self.driver.implicitly_wait(5)
# scroll down
self.scroll_down()
# crawling
df = self.crawling()
df['category'] = category
df['title'] = title
total_df = pd.concat([total_df, df], axis=0)
total_df.to_csv('../dataset/glowpick_products.csv', index=False)
print(total_df.tail())
print()
print('Complete')
self.driver.quit()
def get_main(self, cat=17):
'''
args:
- get_lst : sub category li list
- cat : category index. default 17: 남자화장품
'''
# glowpick main page
self.driver.get(self.url)
self.driver.implicitly_wait(5)
# click category list
self.driver.find_element_by_xpath('//*[@id="gp-home"]/section[1]/div[1]/form/fieldset[1]/div/button').send_keys(Keys.ENTER)
time.sleep(1)
self.driver.find_element_by_xpath(f'//*[@id="gp-home"]/section[1]/div[1]/form/fieldset[1]/div/div/ul/li[{cat}]').click()
self.driver.implicitly_wait(5)
time.sleep(1)
li_ul = self.driver.find_element_by_xpath(f'//*[@id="gp-home"]/section[1]/div[1]/form/fieldset[1]/div/div/ul/li[{cat}]/div[2]/ul')
self.li_lst = li_ul.find_elements_by_css_selector('.list-item')
def scroll_down(self):
# scoll down
ul = self.driver.find_element_by_xpath('//*[@id="gp-list"]/div/section[2]/ul')
start_height = ul.size['height']
target = self.driver.find_element_by_link_text('사업자정보 확인')
start = self.driver.find_element_by_link_text('브랜드')
loop = True
actions = ActionChains(self.driver)
while loop:
actions.move_to_element(start)
actions.perform()
actions.move_to_element(target)
actions.perform()
time.sleep(0.5)
# check loop
ul = self.driver.find_element_by_xpath('//*[@id="gp-list"]/div/section[2]/ul')
current_height = ul.size['height']
if start_height == current_height:
loop = False
else:
start_height = current_height
def crawling(self):
# crawling list
brand_lst = []
product_lst = []
vol_price_lst = []
sales_rank_lst = []
rate_lst = []
nb_reviews_lst = []
product_url_lst = []
divs = self.driver.find_elements_by_xpath('//*[@id="gp-list"]/div/section[2]/ul/li')
for div in divs:
lst = div.text.split('\n') # exclude a last element '-'
product_url = div.find_element_by_css_selector('div > div > div').get_attribute('data-url')
product_url_lst.append(product_url)
print(lst, end=" ")
print(product_url)
brand_lst.append(lst[0])
product_lst.append(lst[1])
vol_price_lst.append(lst[2])
rate_lst.append(lst[3])
nb_reviews_lst.append(lst[4])
sales_rank_lst.append(lst[5])
# save dataframe
df = pd.DataFrame({'brand':brand_lst,
'product':product_lst,
'vol_price':vol_price_lst,
'rate':rate_lst,
'nb_reviews':nb_reviews_lst,
'sales_rank':sales_rank_lst,
'product_url':product_url_lst})
return df
if __name__ == '__main__':
gp = GlowPick()
gp.run() | [
"pandas.read_csv",
"selenium.webdriver.Chrome",
"time.sleep",
"os.path.isfile",
"selenium.webdriver.common.action_chains.ActionChains",
"pandas.DataFrame",
"pandas.concat"
] | [((577, 595), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (593, 595), False, 'from selenium import webdriver\n'), ((698, 712), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (710, 712), True, 'import pandas as pd\n'), ((781, 831), 'os.path.isfile', 'os.path.isfile', (['"""../dataset/glowpick_products.csv"""'], {}), "('../dataset/glowpick_products.csv')\n", (795, 831), False, 'import os\n'), ((3334, 3347), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3344, 3347), False, 'import time\n'), ((3533, 3546), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3543, 3546), False, 'import time\n'), ((4125, 4150), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['self.driver'], {}), '(self.driver)\n', (4137, 4150), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((5638, 5842), 'pandas.DataFrame', 'pd.DataFrame', (["{'brand': brand_lst, 'product': product_lst, 'vol_price': vol_price_lst,\n 'rate': rate_lst, 'nb_reviews': nb_reviews_lst, 'sales_rank':\n sales_rank_lst, 'product_url': product_url_lst}"], {}), "({'brand': brand_lst, 'product': product_lst, 'vol_price':\n vol_price_lst, 'rate': rate_lst, 'nb_reviews': nb_reviews_lst,\n 'sales_rank': sales_rank_lst, 'product_url': product_url_lst})\n", (5650, 5842), True, 'import pandas as pd\n'), ((851, 898), 'pandas.read_csv', 'pd.read_csv', (['"""../dataset/glowpick_products.csv"""'], {}), "('../dataset/glowpick_products.csv')\n", (862, 898), True, 'import pandas as pd\n'), ((923, 956), 'pandas.concat', 'pd.concat', (['[total_df, df]'], {'axis': '(0)'}), '([total_df, df], axis=0)\n', (932, 956), True, 'import pandas as pd\n'), ((4338, 4353), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (4348, 4353), False, 'import time\n'), ((2653, 2686), 'pandas.concat', 'pd.concat', (['[total_df, df]'], {'axis': '(0)'}), '([total_df, df], axis=0)\n', (2662, 2686), True, 'import pandas as pd\n')] |
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import kodi
import dom_parser2
import log_utils # @UnusedImport
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
import scraper
BASE_URL = 'http://allrls.me'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE, VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'RLSSource.net'
def get_sources(self, video):
hosters = []
source_url = self.get_url(video)
if not source_url or source_url == FORCE_NO_MATCH: return hosters
url = scraper_utils.urljoin(self.base_url, source_url)
html = self._http_get(url, require_debrid=False, cache_limit=.5)
q_str = ''
match = re.search('class="entry-title">([^<]+)', html)
if match:
q_str = match.group(1)
pattern = 'href="?([^" ]+)(?:[^>]+>){2}\s+\|'
for match in re.finditer(pattern, html, re.DOTALL):
url = match.group(1)
if 'adf.ly' in url:
continue
hoster = {'multi-part': False, 'class': self, 'views': None, 'url': url, 'rating': None, 'quality': None, 'direct': False}
hoster['host'] = urlparse.urlsplit(url).hostname
hoster['quality'] = scraper_utils.blog_get_quality(video, q_str, hoster['host'])
hosters.append(hoster)
return hosters
def get_url(self, video):
return self._blog_get_url(video, delim=' ')
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
settings = scraper_utils.disable_sub_check(settings)
name = cls.get_name()
settings.append(' <setting id="%s-filter" type="slider" range="0,180" option="int" label=" Filter results older than (0=No Filter) (days)" default="30" visible="eq(-3,true)"/>' % (name))
settings.append(' <setting id="%s-select" type="enum" label=" Automatically Select" values="Most Recent|Highest Quality" default="0" visible="eq(-4,true)"/>' % (name))
return settings
def search(self, video_type, title, year, season=''): # @UnusedVariable
html = self._http_get(self.base_url, params={'s': title, 'go': 'Search'}, require_debrid=False, cache_limit=1)
posts = []
for post in dom_parser2.parse_dom(html, 'div', {'id': re.compile('post-\d+')}):
match = dom_parser2.parse_dom(post.content, 'a', req='href')
if not match: continue
match_url = match[0].attrs['href']
match_title = match[0].content
match_date = dom_parser2.parse_dom(post, 'span', {'class': 'entry-date'})
posts.append('<url>%s</url><title>%s</title><date>%s</date>' % (match_url, match_title, match_date))
pattern = '<url>(?P<url>.*?)</url><title>(?P<post_title>.*?)</title><date>(?P<post_date>.*?)</date>'
date_format = '%B %d, %Y'
return self._blog_proc_results('\n'.join(posts), pattern, date_format, video_type, title, year)
| [
"salts_lib.scraper_utils.urljoin",
"salts_lib.scraper_utils.blog_get_quality",
"dom_parser2.parse_dom",
"re.compile",
"salts_lib.scraper_utils.disable_sub_check",
"re.finditer",
"urlparse.urlsplit",
"re.search"
] | [((1565, 1613), 'salts_lib.scraper_utils.urljoin', 'scraper_utils.urljoin', (['self.base_url', 'source_url'], {}), '(self.base_url, source_url)\n', (1586, 1613), False, 'from salts_lib import scraper_utils\n'), ((1731, 1777), 're.search', 're.search', (['"""class="entry-title">([^<]+)"""', 'html'], {}), '(\'class="entry-title">([^<]+)\', html)\n', (1740, 1777), False, 'import re\n'), ((1907, 1944), 're.finditer', 're.finditer', (['pattern', 'html', 're.DOTALL'], {}), '(pattern, html, re.DOTALL)\n', (1918, 1944), False, 'import re\n'), ((2594, 2635), 'salts_lib.scraper_utils.disable_sub_check', 'scraper_utils.disable_sub_check', (['settings'], {}), '(settings)\n', (2625, 2635), False, 'from salts_lib import scraper_utils\n'), ((2277, 2337), 'salts_lib.scraper_utils.blog_get_quality', 'scraper_utils.blog_get_quality', (['video', 'q_str', "hoster['host']"], {}), "(video, q_str, hoster['host'])\n", (2307, 2337), False, 'from salts_lib import scraper_utils\n'), ((3409, 3461), 'dom_parser2.parse_dom', 'dom_parser2.parse_dom', (['post.content', '"""a"""'], {'req': '"""href"""'}), "(post.content, 'a', req='href')\n", (3430, 3461), False, 'import dom_parser2\n'), ((3612, 3672), 'dom_parser2.parse_dom', 'dom_parser2.parse_dom', (['post', '"""span"""', "{'class': 'entry-date'}"], {}), "(post, 'span', {'class': 'entry-date'})\n", (3633, 3672), False, 'import dom_parser2\n'), ((2213, 2235), 'urlparse.urlsplit', 'urlparse.urlsplit', (['url'], {}), '(url)\n', (2230, 2235), False, 'import urlparse\n'), ((3363, 3386), 're.compile', 're.compile', (['"""post-\\\\d+"""'], {}), "('post-\\\\d+')\n", (3373, 3386), False, 'import re\n')] |
#!/usr/bin/env python
# from google.cloud import speech
from google.cloud import speech_v1p1beta1 as speech
from google.cloud.speech_v1p1beta1 import enums
from google.cloud.speech_v1p1beta1 import types
from google.api_core.exceptions import InvalidArgument, OutOfRange
import pyaudio
import Queue
import rospy
import rospkg
import signal
import yaml
from std_msgs.msg import String
class GspeechClient:
def __init__(self):
# Audio stream input setup
FORMAT = pyaudio.paInt16
CHANNELS = 1
RATE = 44100
self.CHUNK = 4096
self.audio = pyaudio.PyAudio()
self.stream = self.audio.open(format=FORMAT, channels=CHANNELS,
rate=RATE, input=True,
frames_per_buffer=self.CHUNK,
stream_callback=self.get_data)
self._buff = Queue.Queue() # Buffer to hold audio data
self.closed = False
# ROS Text Publisher
self.text_pub = rospy.Publisher('/google_client/text', String, queue_size=10)
# Context clues in yaml file
rospack = rospkg.RosPack()
yamlFileDir = rospack.get_path('dialogflow_ros') + '/config/context.yaml'
with open(yamlFileDir, 'r') as f:
self.context = yaml.load(f)
def get_data(self, in_data, frame_count, time_info, status):
"""PyAudio callback to continuously get audio data from the server and put it in a buffer.
"""
self._buff.put(in_data)
return None, pyaudio.paContinue
def generator(self):
"""Generator function that continuously yields audio chunks from the buffer.
Used to stream data to the Google Speech API Asynchronously.
"""
while not self.closed:
# Check first chunk of data
chunk = self._buff.get()
if chunk is None:
return
data = [chunk]
# Read in a stream till the end using a non-blocking get()
while True:
try:
chunk = self._buff.get(block=False)
if chunk is None:
return
data.append(chunk)
except Queue.Empty:
break
yield b''.join(data)
def _listen_print_loop(self, responses):
"""Iterates through server responses and prints them.
The responses passed is a generator that will block until a response
is provided by the server.
Each response may contain multiple results, and each result may contain
multiple alternatives; for details, see https://goo.gl/tjCPAU. Here we
print only the transcription for the top alternative of the top result.
"""
try:
for response in responses:
# If not a valid response, move on to next potential one
if not response.results:
continue
# The `results` list is consecutive. For streaming, we only care about
# the first result being considered, since once it's `is_final`, it
# moves on to considering the next utterance.
result = response.results[0]
if not result.alternatives:
continue
# Display the transcription of the top alternative.
transcript = result.alternatives[0].transcript
# Parse the final utterance
if result.is_final:
rospy.logdebug("Google Speech result: {}".format(transcript))
# Received data is Unicode, convert it to string
transcript = transcript.encode('utf-8')
# Strip the initial space if any
if transcript.startswith(' '):
transcript = transcript[1:]
# Exit if needed
if transcript.lower() == 'exit' or rospy.is_shutdown():
self.shutdown()
# Send the rest of the sentence to topic
self.text_pub.publish(result[1])
except InvalidArgument as e:
rospy.logwarn("{} caught in Mic. Client".format(e))
self.gspeech_client()
except OutOfRange as e:
rospy.logwarn("{} caught in Mic. Client".format(e))
self.gspeech_client()
def gspeech_client(self):
"""Creates the Google Speech API client, configures it, and sends/gets
audio/text data for parsing.
"""
language_code = 'en-US'
# Hints for the API
context = types.SpeechContext(phrases=self.context)
client = speech.SpeechClient()
# Create metadata object, helps processing
metadata = types.RecognitionMetadata()
# Interaction Type:
# VOICE_SEARCH: Transcribe spoken questions and queries into text.
# VOICE_COMMAND: Transcribe voice commands, such as for controlling a device.
metadata.interaction_type = (enums.RecognitionMetadata.InteractionType.VOICE_COMMAND)
# Microphone Distance:
# NEARFIELD: The audio was captured from a closely placed microphone.
# MIDFIELD: The speaker is within 3 meters of the microphone.
# FARFIELD: The speaker is more than 3 meters away from the microphone.
metadata.microphone_distance = (enums.RecognitionMetadata.MicrophoneDistance.MIDFIELD)
# Device Type:
# PC: Speech was recorded using a personal computer or tablet.
# VEHICLE: Speech was recorded in a vehicle.
# OTHER_OUTDOOR_DEVICE: Speech was recorded outdoors.
# OTHER_INDOOR_DEVICE: Speech was recorded indoors.
metadata.recording_device_type = (enums.RecognitionMetadata.RecordingDeviceType.PC)
# Media Type:
# AUDIO: The speech data is an audio recording.
# VIDEO: The speech data originally recorded on a video.
metadata.original_media_type = (enums.RecognitionMetadata.OriginalMediaType.AUDIO)
config = types.RecognitionConfig(
encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,
sample_rate_hertz=44100,
language_code=language_code,
speech_contexts=[context],
use_enhanced=True,
model='command_and_search',
metadata=metadata)
streaming_config = types.StreamingRecognitionConfig(
config=config,
single_utterance=False,
interim_results=False)
# Hack from Google Speech Python docs, very pythonic c:
requests = (types.StreamingRecognizeRequest(audio_content=content) for content in self.generator())
responses = client.streaming_recognize(streaming_config, requests)
self._listen_print_loop(responses)
def __del__(self):
"""Shut down as cleanly as possible"""
rospy.loginfo("Google STT shutting down")
self.closed = True
self._buff.put(None)
self.stream.close()
self.audio.terminate()
exit()
def start_client(self):
"""Entry function to start the client"""
try:
rospy.loginfo("Google STT started")
self.gspeech_client()
except KeyboardInterrupt:
self.__del__()
def signal_handler(signal, frame):
rospy.signal_shutdown("Order 66 Received")
exit("Order 66 Received")
if __name__ == '__main__':
# rospy.init_node('frasier_mic_client', log_level=rospy.DEBUG)
rospy.init_node('google_client')
signal.signal(signal.SIGINT, signal_handler)
g = GspeechClient()
g.start_client()
| [
"google.cloud.speech_v1p1beta1.types.StreamingRecognitionConfig",
"signal.signal",
"rospy.Publisher",
"rospy.signal_shutdown",
"google.cloud.speech_v1p1beta1.types.SpeechContext",
"rospy.is_shutdown",
"rospy.init_node",
"google.cloud.speech_v1p1beta1.types.RecognitionMetadata",
"yaml.load",
"googl... | [((7413, 7455), 'rospy.signal_shutdown', 'rospy.signal_shutdown', (['"""Order 66 Received"""'], {}), "('Order 66 Received')\n", (7434, 7455), False, 'import rospy\n'), ((7586, 7618), 'rospy.init_node', 'rospy.init_node', (['"""google_client"""'], {}), "('google_client')\n", (7601, 7618), False, 'import rospy\n'), ((7623, 7667), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handler'], {}), '(signal.SIGINT, signal_handler)\n', (7636, 7667), False, 'import signal\n'), ((589, 606), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (604, 606), False, 'import pyaudio\n'), ((898, 911), 'Queue.Queue', 'Queue.Queue', ([], {}), '()\n', (909, 911), False, 'import Queue\n'), ((1023, 1084), 'rospy.Publisher', 'rospy.Publisher', (['"""/google_client/text"""', 'String'], {'queue_size': '(10)'}), "('/google_client/text', String, queue_size=10)\n", (1038, 1084), False, 'import rospy\n'), ((1141, 1157), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (1155, 1157), False, 'import rospkg\n'), ((4697, 4738), 'google.cloud.speech_v1p1beta1.types.SpeechContext', 'types.SpeechContext', ([], {'phrases': 'self.context'}), '(phrases=self.context)\n', (4716, 4738), False, 'from google.cloud.speech_v1p1beta1 import types\n'), ((4756, 4777), 'google.cloud.speech_v1p1beta1.SpeechClient', 'speech.SpeechClient', ([], {}), '()\n', (4775, 4777), True, 'from google.cloud import speech_v1p1beta1 as speech\n'), ((4848, 4875), 'google.cloud.speech_v1p1beta1.types.RecognitionMetadata', 'types.RecognitionMetadata', ([], {}), '()\n', (4873, 4875), False, 'from google.cloud.speech_v1p1beta1 import types\n'), ((6125, 6366), 'google.cloud.speech_v1p1beta1.types.RecognitionConfig', 'types.RecognitionConfig', ([], {'encoding': 'enums.RecognitionConfig.AudioEncoding.LINEAR16', 'sample_rate_hertz': '(44100)', 'language_code': 'language_code', 'speech_contexts': '[context]', 'use_enhanced': '(True)', 'model': '"""command_and_search"""', 'metadata': 'metadata'}), "(encoding=enums.RecognitionConfig.AudioEncoding.\n LINEAR16, sample_rate_hertz=44100, language_code=language_code,\n speech_contexts=[context], use_enhanced=True, model=\n 'command_and_search', metadata=metadata)\n", (6148, 6366), False, 'from google.cloud.speech_v1p1beta1 import types\n'), ((6465, 6563), 'google.cloud.speech_v1p1beta1.types.StreamingRecognitionConfig', 'types.StreamingRecognitionConfig', ([], {'config': 'config', 'single_utterance': '(False)', 'interim_results': '(False)'}), '(config=config, single_utterance=False,\n interim_results=False)\n', (6497, 6563), False, 'from google.cloud.speech_v1p1beta1 import types\n'), ((6966, 7007), 'rospy.loginfo', 'rospy.loginfo', (['"""Google STT shutting down"""'], {}), "('Google STT shutting down')\n", (6979, 7007), False, 'import rospy\n'), ((1309, 1321), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (1318, 1321), False, 'import yaml\n'), ((6681, 6735), 'google.cloud.speech_v1p1beta1.types.StreamingRecognizeRequest', 'types.StreamingRecognizeRequest', ([], {'audio_content': 'content'}), '(audio_content=content)\n', (6712, 6735), False, 'from google.cloud.speech_v1p1beta1 import types\n'), ((7241, 7276), 'rospy.loginfo', 'rospy.loginfo', (['"""Google STT started"""'], {}), "('Google STT started')\n", (7254, 7276), False, 'import rospy\n'), ((4019, 4038), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (4036, 4038), False, 'import rospy\n')] |
# coding: utf-8
import pprint
import re
import six
class CallbackLifeCycleHookOption:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'lifecycle_action_key': 'str',
'instance_id': 'str',
'lifecycle_hook_name': 'str',
'lifecycle_action_result': 'str'
}
attribute_map = {
'lifecycle_action_key': 'lifecycle_action_key',
'instance_id': 'instance_id',
'lifecycle_hook_name': 'lifecycle_hook_name',
'lifecycle_action_result': 'lifecycle_action_result'
}
def __init__(self, lifecycle_action_key=None, instance_id=None, lifecycle_hook_name=None, lifecycle_action_result=None):
"""CallbackLifeCycleHookOption - a model defined in huaweicloud sdk"""
self._lifecycle_action_key = None
self._instance_id = None
self._lifecycle_hook_name = None
self._lifecycle_action_result = None
self.discriminator = None
if lifecycle_action_key is not None:
self.lifecycle_action_key = lifecycle_action_key
if instance_id is not None:
self.instance_id = instance_id
if lifecycle_hook_name is not None:
self.lifecycle_hook_name = lifecycle_hook_name
self.lifecycle_action_result = lifecycle_action_result
@property
def lifecycle_action_key(self):
"""Gets the lifecycle_action_key of this CallbackLifeCycleHookOption.
生命周期操作令牌,通过查询伸缩实例挂起信息接口获取。指定生命周期回调对象,当不传入instance_id字段时,该字段为必选。当该字段与instance_id字段都传入,优先使用该字段进行回调。
:return: The lifecycle_action_key of this CallbackLifeCycleHookOption.
:rtype: str
"""
return self._lifecycle_action_key
@lifecycle_action_key.setter
def lifecycle_action_key(self, lifecycle_action_key):
"""Sets the lifecycle_action_key of this CallbackLifeCycleHookOption.
生命周期操作令牌,通过查询伸缩实例挂起信息接口获取。指定生命周期回调对象,当不传入instance_id字段时,该字段为必选。当该字段与instance_id字段都传入,优先使用该字段进行回调。
:param lifecycle_action_key: The lifecycle_action_key of this CallbackLifeCycleHookOption.
:type: str
"""
self._lifecycle_action_key = lifecycle_action_key
@property
def instance_id(self):
"""Gets the instance_id of this CallbackLifeCycleHookOption.
实例ID。指定生命周期回调对象,当不传入lifecycle_action_key字段时,该字段为必选。
:return: The instance_id of this CallbackLifeCycleHookOption.
:rtype: str
"""
return self._instance_id
@instance_id.setter
def instance_id(self, instance_id):
"""Sets the instance_id of this CallbackLifeCycleHookOption.
实例ID。指定生命周期回调对象,当不传入lifecycle_action_key字段时,该字段为必选。
:param instance_id: The instance_id of this CallbackLifeCycleHookOption.
:type: str
"""
self._instance_id = instance_id
@property
def lifecycle_hook_name(self):
"""Gets the lifecycle_hook_name of this CallbackLifeCycleHookOption.
生命周期挂钩名称。指定生命周期回调对象,当不传入lifecycle_action_key字段时,该字段为必选。
:return: The lifecycle_hook_name of this CallbackLifeCycleHookOption.
:rtype: str
"""
return self._lifecycle_hook_name
@lifecycle_hook_name.setter
def lifecycle_hook_name(self, lifecycle_hook_name):
"""Sets the lifecycle_hook_name of this CallbackLifeCycleHookOption.
生命周期挂钩名称。指定生命周期回调对象,当不传入lifecycle_action_key字段时,该字段为必选。
:param lifecycle_hook_name: The lifecycle_hook_name of this CallbackLifeCycleHookOption.
:type: str
"""
self._lifecycle_hook_name = lifecycle_hook_name
@property
def lifecycle_action_result(self):
"""Gets the lifecycle_action_result of this CallbackLifeCycleHookOption.
生命周期回调操作。ABANDON:终止。CONTINUE:继续。EXTEND:延长超时时间,每次延长1小时。
:return: The lifecycle_action_result of this CallbackLifeCycleHookOption.
:rtype: str
"""
return self._lifecycle_action_result
@lifecycle_action_result.setter
def lifecycle_action_result(self, lifecycle_action_result):
"""Sets the lifecycle_action_result of this CallbackLifeCycleHookOption.
生命周期回调操作。ABANDON:终止。CONTINUE:继续。EXTEND:延长超时时间,每次延长1小时。
:param lifecycle_action_result: The lifecycle_action_result of this CallbackLifeCycleHookOption.
:type: str
"""
self._lifecycle_action_result = lifecycle_action_result
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CallbackLifeCycleHookOption):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
] | [((4775, 4808), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (4788, 4808), False, 'import six\n')] |
from collections import OrderedDict
from vp_suite.model_blocks import ConvLSTM
from vp_suite.models.precipitation_nowcasting.ef_blocks import Encoder_Forecaster
class EF_ConvLSTM(Encoder_Forecaster):
r"""
This is a reimplementation of the Encoder-Forecaster model based on ConvLSTMs, as introduced in
"Convolutional LSTM Network: A Machine Learning Approach for Precipitation Nowcasting" by Shi et al.
(https://arxiv.org/abs/1506.04214). This implementation is based on the PyTorch implementation on
https://github.com/Hzzone/Precipitation-Nowcasting which implements the encoder-forecaster structure from
"Deep Learning for Precipitation Nowcasting: A Benchmark and A New Model" by Shi et al.
(https://arxiv.org/abs/1706.03458).
The Encoder-Forecaster Network stacks multiple convolutional/up-/downsampling and recurrent layers
that operate on different spatial scales.
Note:
The default hyperparameter configuration is intended for input frames of size (64, 64).
For considerably larger or smaller image sizes, you might want to adjust the architecture.
"""
# model-specific constants
NAME = "EF-ConvLSTM (Shi et al.)"
PAPER_REFERENCE = "https://arxiv.org/abs/1506.04214"
CODE_REFERENCE = "https://github.com/Hzzone/Precipitation-Nowcasting"
MATCHES_REFERENCE = "Yes"
# model hyperparameters (c=channels, h=height, w=width, k=kernel_size, s=stride, p=padding)
num_layers = 3 #: Number of recurrent cell layers
enc_c = [16, 64, 64, 96, 96, 96] #: Channels for conv and rnn; Length should be 2*num_layers
dec_c = [96, 96, 96, 96, 64, 16] #: Channels for conv and rnn; Length should be 2*num_layers
# convs
enc_conv_names = ["conv1_leaky_1", "conv2_leaky_1", "conv3_leaky_1"] #: Encoder conv block layer names (for internal initialization)
enc_conv_k = [3, 3, 3] #: Encoder conv block kernel sizes per layer
enc_conv_s = [1, 2, 2] #: Encoder conv block strides per layer
enc_conv_p = [1, 1, 1] #: Encoder conv block paddings per layer
dec_conv_names = ["deconv1_leaky_1", "deconv2_leaky_1", "deconv3_leaky_1"] #: Decoder conv block layer names (for internal initialization)
dec_conv_k = [4, 4, 3] #: Decoder conv block kernel sizes per layer
dec_conv_s = [2, 2, 1] #: Decoder conv block strides per layer
dec_conv_p = [1, 1, 1] #: Decoder conv block paddings per layer
# rnns
enc_rnn_k = [3, 3, 3] #: Encoder recurrent block kernel sizes per layer
enc_rnn_s = [1, 1, 1] #: Encoder recurrent block strides per layer
enc_rnn_p = [1, 1, 1] #: Encoder recurrent block paddings per layer
dec_rnn_k = [3, 3, 3] #: Decoder recurrent block kernel sizes per layer
dec_rnn_s = [1, 1, 1] #: Decoder recurrent block strides per layer
dec_rnn_p = [1, 1, 1] #: Decoder recurrent block paddings per layer
# final convs
final_conv_1_name = "identity" #: Final conv block 1 name
final_conv_1_c = 16 #: Final conv block 1 out channels
final_conv_1_k = 3 #: Final conv block 1 kernel size
final_conv_1_s = 1 #: Final conv block 1 stride
final_conv_1_p = 1 #: Final conv block 1 padding
final_conv_2_name = "conv3_3" #: Final conv block 2 name
final_conv_2_k = 1 #: Final conv block 2 kernel size
final_conv_2_s = 1 #: Final conv block 2 stride
final_conv_2_p = 0 #: Final conv block 2 padding
def __init__(self, device, **model_kwargs):
super(EF_ConvLSTM, self).__init__(device, **model_kwargs)
def _build_encoder_decoder(self):
# build enc layers and encoder
layer_in_c = self.img_c
enc_convs, enc_rnns = [], []
for n in range(self.num_layers):
layer_mid_c = self.enc_c[2 * n]
layer_out_c = self.enc_c[2 * n + 1]
enc_convs.append(OrderedDict(
{self.enc_conv_names[n]: [layer_in_c, layer_mid_c, self.enc_conv_k[n],
self.enc_conv_s[n], self.enc_conv_p[n]]}
))
enc_rnns.append(ConvLSTM(device=self.device, in_channels=layer_mid_c, enc_channels=layer_out_c,
state_h=self.enc_rnn_state_h[n], state_w=self.enc_rnn_state_w[n],
kernel_size=self.enc_rnn_k[n], stride=self.enc_rnn_s[n],
padding=self.enc_rnn_p[n]))
layer_in_c = layer_out_c
# build dec layers and decoder, including final convs
dec_convs, dec_rnns = [], []
for n in range(self.num_layers):
layer_mid_c = self.dec_c[2 * n]
layer_out_c = self.dec_c[2 * n + 1]
dec_rnns.append(ConvLSTM(device=self.device, in_channels=layer_in_c, enc_channels=layer_mid_c,
state_h=self.dec_rnn_state_h[n], state_w=self.dec_rnn_state_w[n],
kernel_size=self.dec_rnn_k[n], stride=self.dec_rnn_s[n],
padding=self.dec_rnn_p[n]))
dec_conv_dict = {
self.dec_conv_names[n]: [layer_mid_c, layer_out_c, self.dec_conv_k[n],
self.dec_conv_s[n], self.dec_conv_p[n]]
}
if n == self.num_layers - 1:
dec_conv_dict[self.final_conv_1_name] = [layer_out_c, self.final_conv_1_c, self.final_conv_1_k,
self.final_conv_1_s, self.final_conv_1_p]
dec_conv_dict[self.final_conv_2_name] = [self.final_conv_1_c, self.img_c, self.final_conv_2_k,
self.final_conv_2_s, self.final_conv_2_p]
dec_convs.append(OrderedDict(dec_conv_dict))
layer_in_c = layer_out_c
return enc_convs, enc_rnns, dec_convs, dec_rnns
| [
"collections.OrderedDict",
"vp_suite.model_blocks.ConvLSTM"
] | [((3836, 3965), 'collections.OrderedDict', 'OrderedDict', (['{self.enc_conv_names[n]: [layer_in_c, layer_mid_c, self.enc_conv_k[n], self\n .enc_conv_s[n], self.enc_conv_p[n]]}'], {}), '({self.enc_conv_names[n]: [layer_in_c, layer_mid_c, self.\n enc_conv_k[n], self.enc_conv_s[n], self.enc_conv_p[n]]})\n', (3847, 3965), False, 'from collections import OrderedDict\n'), ((4062, 4306), 'vp_suite.model_blocks.ConvLSTM', 'ConvLSTM', ([], {'device': 'self.device', 'in_channels': 'layer_mid_c', 'enc_channels': 'layer_out_c', 'state_h': 'self.enc_rnn_state_h[n]', 'state_w': 'self.enc_rnn_state_w[n]', 'kernel_size': 'self.enc_rnn_k[n]', 'stride': 'self.enc_rnn_s[n]', 'padding': 'self.enc_rnn_p[n]'}), '(device=self.device, in_channels=layer_mid_c, enc_channels=\n layer_out_c, state_h=self.enc_rnn_state_h[n], state_w=self.\n enc_rnn_state_w[n], kernel_size=self.enc_rnn_k[n], stride=self.\n enc_rnn_s[n], padding=self.enc_rnn_p[n])\n', (4070, 4306), False, 'from vp_suite.model_blocks import ConvLSTM\n'), ((4702, 4945), 'vp_suite.model_blocks.ConvLSTM', 'ConvLSTM', ([], {'device': 'self.device', 'in_channels': 'layer_in_c', 'enc_channels': 'layer_mid_c', 'state_h': 'self.dec_rnn_state_h[n]', 'state_w': 'self.dec_rnn_state_w[n]', 'kernel_size': 'self.dec_rnn_k[n]', 'stride': 'self.dec_rnn_s[n]', 'padding': 'self.dec_rnn_p[n]'}), '(device=self.device, in_channels=layer_in_c, enc_channels=\n layer_mid_c, state_h=self.dec_rnn_state_h[n], state_w=self.\n dec_rnn_state_w[n], kernel_size=self.dec_rnn_k[n], stride=self.\n dec_rnn_s[n], padding=self.dec_rnn_p[n])\n', (4710, 4945), False, 'from vp_suite.model_blocks import ConvLSTM\n'), ((5746, 5772), 'collections.OrderedDict', 'OrderedDict', (['dec_conv_dict'], {}), '(dec_conv_dict)\n', (5757, 5772), False, 'from collections import OrderedDict\n')] |
from openpyxl import load_workbook
import mysql.connector
# Excel
workbook = load_workbook('imported.xlsx')
sheet = workbook.active
values = []
for row in sheet.iter_rows(min_row=2, values_only=True):
print(row)
values.append(row)
# Database
db = mysql.connector.connect(
host='localhost',
port=3306,
user='root',
password='<PASSWORD>',
database='Plug_want_to_buy'
)
curses = db.cursor()
sql = '''
insert into products (title, price, is_necessary)
values (%s, %s, %s);
'''
curses.executemany(sql, values)
db.commit()
print('เพิ่มข้อมูลจำนวน ' + str(curses.rowcount) + ' แถว')
curses.close()
db.close() | [
"openpyxl.load_workbook"
] | [((78, 108), 'openpyxl.load_workbook', 'load_workbook', (['"""imported.xlsx"""'], {}), "('imported.xlsx')\n", (91, 108), False, 'from openpyxl import load_workbook\n')] |
# mypy: ignore-errors
import inspect
from typing import Tuple
import warnings
import torch
class LazyInitializationMixin:
"""A mixin for modules that lazily initialize buffers and parameters.
Unlike regular modules, subclasses of this module can initialize
buffers and parameters outside of the constructor (``__init__``).
This allows you to, for example, initialize parameters in ``forward``
method to determine the shape of the weight based on the initial input.
Be sure to run "dummy" forward once to initialize all parameters that
should be trained, before passing ``module.parameters()`` to an optimizer;
otherwise weights initialized after ``module.parameters()`` (e.g., in
``forward`` function) will never be trained.
Note that lazy modules cannot validate if the shape is correct during
deserialization. Also note that the initial weights may become different
from the original (non-lazy) module even if the random seed is manually
configured, as the order of initialization is different from the original
one; especially, ``module.cuda()`` may cause the initialization to run on
a GPU.
The default value of lazy buffers and parameters are ``torch.Tensor([])``
and ``UninitializedParameter()``, respectively.
"""
# Subclasses must override these fields and list names of all buffers /
# parameters that will be initialized lazily.
lazy_buffer_names: Tuple[str, ...] = ()
lazy_parameter_names: Tuple[str, ...] = ()
def __init__(self, *args, **kwargs):
self._lazy_ready = False
super().__init__(*args, **kwargs)
for name in self.lazy_buffer_names:
self.register_buffer(name, torch.Tensor([]))
for name in self.lazy_parameter_names:
self.register_parameter(name, UninitializedParameter())
self._register_load_state_dict_pre_hook(self._lazy_load_hook)
self._lazy_ready = True
@property
def lazy_parmeters_determined(self):
"""Returns if all lazy parameters are determined.
Subclasses can perform parameters initialization after all lazy
parameters are determined. Note that this may be called during
``__init__``.
"""
return self._lazy_ready and all([
not isinstance(getattr(self, x), UninitializedParameter)
for x in self.lazy_parameter_names])
def state_dict(self, *args, **kwargs):
"""Returns a dictionary containing a whole state of the module.
This function overrides the default behavior to exclude uninitialized
parameter from serialization. This is needed because we need to
discriminate lazy parameters (``UninitializedParameter()`) and
initialized empty parameters (``torch.nn.Parameter(torch.Tensor())``)
during deserialization.
See comments of ``_lazy_load_hook`` for details.
"""
destination = super().state_dict(*args, **kwargs)
for name in self.lazy_parameter_names:
if isinstance(getattr(self, name), UninitializedParameter):
del destination[name]
return destination
def _lazy_load_hook(
self, state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs):
"""load_state_dict pre-hook function for lazy buffers and parameters.
The purpose of this hook is to check the current state and/or
``state_dict`` being loaded and ensure that both are states
are properly initialized.
See comment in ``torch.nn.Module._register_load_state_dict_pre_hook``
for the details of the hook specification.
"""
for name in self.lazy_buffer_names:
key = prefix + name
module_initialized = getattr(self, name).shape != (0,)
state_initialized = state_dict[key].shape != (0,)
if module_initialized and not state_initialized:
raise RuntimeError(
'Can\'t load non-initialized buffers in already '
'initialized modules')
elif not module_initialized and state_initialized:
# Here we need to avoid a tensor size mismatch
# this is a regular tensor without a materialize
# method, so we can just resize for the load logic to copy
# the contents later to the correct device the module
# was moved to
getattr(self, name).resize_(state_dict[key].size())
for name in self.lazy_parameter_names:
# The parameter does not exist in the loaded ``state_dict`` if the
# original module was serialized before initializing lazy
# parameters (see comments of ``state_dict``).
key = prefix + name
module_initialized = not isinstance(
getattr(self, name), UninitializedParameter)
state_initialized = key in state_dict
if module_initialized and not state_initialized:
raise RuntimeError(
'Can\'t load uninitialized parameters in already '
'initialized modules')
elif not module_initialized and state_initialized:
getattr(self, name).materialize(state_dict[key].shape)
elif key not in state_dict and not module_initialized:
param = UninitializedParameter()
state_dict[key] = param
class UninitializedParameter(torch.nn.Parameter):
def __repr__(self):
return 'Uninitialized lazy parameter'
def share_memory_(self):
raise RuntimeError(
'Can\'t share memory on an unitialized parameter. '
'Run forward to initialize the network before calling '
'`module.share_memory()`.')
@property
def is_leaf(self):
# Hacky workaround to detect use of uninitialized lazy parameters.
# This overrides ``is_leaf`` attribute which should always be ``True``
# for parameters; optimizers check for this attribute and raise an
# error if non-leaf tensors are detected.
frame = inspect.currentframe()
if frame.f_back.f_globals['__package__'].startswith('torch.optim'):
warnings.warn('''
Use of uninitialized lazy parameter in Optimizer has been detected.
Maybe you forgot to run forward before passing `module.parameters()` to the optimizer?''') # NOQA
return True
def materialize(self, shape, device=None, dtype=None):
r"""Create a Parameter with the same properties of the uninitialized
one. Given a shape, it materializes a parameter in the same device
and with the same `dtype` as the current one or the specified ones in
the arguments.
Args:
shape : (tuple): the shape for the materialized tensor.
device (:class:`torch.device`): the desired device of the
parameters
and buffers in this module. Optional.
dtype (:class:`torch.dtype`): the desired floating point type of
the floating point parameters and buffers in this module.
Optional.
"""
if device is None:
device = self.data.device
if dtype is None:
dtype = self.data.dtype
self.data = torch.empty(shape, device=device, dtype=dtype)
self.__class__ = torch.nn.Parameter
| [
"warnings.warn",
"inspect.currentframe",
"torch.empty",
"torch.Tensor"
] | [((6194, 6216), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (6214, 6216), False, 'import inspect\n'), ((7401, 7447), 'torch.empty', 'torch.empty', (['shape'], {'device': 'device', 'dtype': 'dtype'}), '(shape, device=device, dtype=dtype)\n', (7412, 7447), False, 'import torch\n'), ((6305, 6499), 'warnings.warn', 'warnings.warn', (['"""\n Use of uninitialized lazy parameter in Optimizer has been detected.\n Maybe you forgot to run forward before passing `module.parameters()` to the optimizer?"""'], {}), '(\n """\n Use of uninitialized lazy parameter in Optimizer has been detected.\n Maybe you forgot to run forward before passing `module.parameters()` to the optimizer?"""\n )\n', (6318, 6499), False, 'import warnings\n'), ((1723, 1739), 'torch.Tensor', 'torch.Tensor', (['[]'], {}), '([])\n', (1735, 1739), False, 'import torch\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from copy import deepcopy
from functools import wraps
from flask import request
from src.misc.render import json_detail_render
from config.settings import YML_JSON, logger
import datetime,json
def transfer(column):
def dec(func):
@wraps(func)
def _(*args, **kwargs):
tmap = {
'?': "",
'!': "",
'@': [],
'#': {},
'$': False,
}
result = func(*args, **kwargs)
if not isinstance(result, list):
raise('should be a list')
cols = [i.strip() for i in column.split('|')]
pure_cols = map(lambda x : x[1:], cols)
template = {col[1:]: tmap.get(col[0]) for col in cols}
key_col = filter(lambda x: '?' in x, cols)[0][1:]
tdata = [{item: getattr(res, item) for item in pure_cols} for res in result]
data = []
for d in tdata:
tpl = deepcopy(template)
for k, v in d.iteritems():
if isinstance(tpl[k], basestring) and v:
tpl[k] = v
elif isinstance(tpl[k], list) and v:
tlist = deepcopy(tpl[k])
tlist.append(v)
tpl[k] = tlist
elif isinstance(tpl[k], dict) and v:
tdict = deepcopy(tpl[k])
tdict.update(v)
tpl[k] = tdict
elif isinstance(tpl[k], bool):
t = deepcopy(tpl[k])
t = bool(v)
tpl[k] = t
data.append(tpl)
return data
return _
return dec
def transfer2json(column):
"""
? : key
! : string
@ : list
# : dict
$ : bool
& : tuple
"""
def dec(func):
@wraps(func)
def _(*args, **kwargs):
tmap = {
'?': "",
'!': "",
'@': [],
'#': {},
'$': False,
'&': (),
'~': ['~'],
}
result = func(*args, **kwargs)
if not isinstance(result, list):
raise('should be a list')
cols = [i.strip() for i in column.split('|')]
# key的list形式数据
pure_cols = map(lambda x : x[1:], cols)
# 键值对中给value赋值tmap
template = {col[1:]: tmap.get(col[0]) for col in cols}
key_col = filter(lambda x: '?' in x, cols)[0][1:]
# 键值对中给value赋值数据库
tdata = [{item: getattr(res, item) for item in pure_cols} for res in result]
data = []
for d in tdata:
fu = [i for i in data if i.get(key_col) == d.get(key_col)]
if len(fu) == 0:
tpl = deepcopy(template)
for k,v in d.iteritems():
if isinstance(tpl[k], basestring) and v!=None:
tpl[k] = v
elif tpl[k]==['~']:
tjlist = json.loads(v) if v else []
tpl[k] = tjlist
elif isinstance(tpl[k], list) and v:
tlist = deepcopy(tpl[k])
tlist.append(v)
tpl[k] = tlist
elif isinstance(tpl[k], dict) and v:
tdict = deepcopy(tpl[k])
tdict.update(v)
tpl[k] = tdict
elif isinstance(tpl[k], bool):
t = deepcopy(tpl[k])
t = bool(v)
tpl[k] = t
elif isinstance(tpl[k], tuple) and v:
tlist = deepcopy(tpl[k])
tmp = []
tmp.append(v)
tlist += tuple(tmp)
tpl[k] = tlist
data.append(tpl)
else:
fu = fu[0]
for k,v in d.iteritems():
if isinstance(fu[k], basestring) and v:
fu[k] = v
elif isinstance(fu[k], list) and v:
tlist = deepcopy(fu[k])
tlist.append(v)
fu[k] = list(set(tlist))
fu[k].sort(key=tlist.index)
elif isinstance(fu[k], dict) and v:
tdict = deepcopy(fu[k])
tdict.update(v)
fu[k] = tdict
elif isinstance(tpl[k], bool):
t = deepcopy(tpl[k])
t = bool(v)
tpl[k] = t
elif isinstance(fu[k], tuple) and v:
tlist = deepcopy(fu[k])
tmp = []
tmp.append(v)
tlist += tuple(tmp)
fu[k] = tuple(tlist)
return data
return _
return dec
def transfer2jsonwithoutset(column, ispagination=False):
"""
? : key
! : string
@ : list
# : dict
$ : bool
& : tuple
"""
def dec(func):
@wraps(func)
def _(*args, **kwargs):
t_map = {
'?': "key",
'!': "",
'@': [],
'#': {},
'$': False,
'&': (),
'~': ['~'],
}
count = 0
if ispagination:
results, count = func(*args, **kwargs)
else:
results = func(*args, **kwargs)
if not isinstance(results, list):
raise TypeError('should be a list')
# 原始 键值
origin_keys = [key.strip() for key in column.split('|')]
# 键值和类型对应的 字典
result_map = {
key.strip()[1:]: t_map.get(key.strip()[0])
for key in column.split('|')
}
# 找到 ? 为前缀的键值作为 key
key_of_data = list(filter(lambda x: '?' in x, origin_keys))[0][1:]
# result dict
results_dict = [{item: getattr(result, item) for item in result_map.keys()} for result in results]
data = {}
for result in results_dict:
if result.get(key_of_data) in data.keys():
temp = data.get(result.get(key_of_data))
for key, value in result_map.items():
data_value = result.get(key)
if isinstance(value, str) and data_value:
temp[key] = data_value
elif isinstance(value, list) and data_value:
t_list = deepcopy(temp[key])
t_list.append(data_value)
temp[key] = list(t_list)
elif isinstance(value, dict) and data_value:
tdict = deepcopy(temp[key])
tdict.update(data_value)
temp[key] = tdict
elif isinstance(value, bool):
t = bool(data_value)
temp[key] = t
elif isinstance(value, tuple) and data_value:
t_list = deepcopy(temp[key])
tmp = [data_value]
t_list += tuple(tmp)
temp[key] = tuple(t_list)
else:
temp = deepcopy(result_map)
for key, value in result_map.items():
data_value = result.get(key)
if isinstance(value, str) and data_value is not None:
temp[key] = data_value
elif value == ['~']:
tjlist = json.loads(data_value) if data_value else []
temp[key] = tjlist
elif isinstance(value, list) and data_value:
temp[key] = [data_value]
elif isinstance(value, dict) and data_value:
temp[key] = {data_value}
elif isinstance(value, bool):
temp[key] = bool(data_value)
elif isinstance(value, tuple) and data_value:
temp[key] = tuple([data_value])
data.update({result.get(key_of_data): temp})
if ispagination:
return list(data.values()), count
else:
return list(data.values())
return _
return dec
def slicejson(settings):
def _slicejson(ret):
config = [setting.split('|') for setting in settings]
for conf in config:
na = [[dict(i) for i in map(lambda x: zip((conf[1],conf[2]), x), zip(r.get(conf[3]),r.get(conf[4])))] for r in ret]
for index, item in enumerate(ret):
for k in [conf[3], conf[4]]:
del item[k]
item[conf[0]] = na[index]
return ret
def wrapper(func):
@wraps(func)
def _(*args, **kwargs):
ret = func(*args, **kwargs)
return _slicejson(ret)
return _
return wrapper
def validation(validate_name = None):
def validate_required(key, value):
request_value = request.json.get(key)
expect_value = value
if request_value is None:
return False, json_detail_render(201, [], "{} is required".format(key))
return True, 1
def validate_min_length(key, value):
request_value = request.json.get(key)
expect_value = value
if request_value is not None and len(request_value) < expect_value:
return False, json_detail_render(202, [], "{} min length is {}".format(key, value))
return True, 1
def validate_max_length(key, value):
request_value = request.json.get(key)
expect_value = value
if request_value is not None and len(request_value) > expect_value:
return False, json_detail_render(202, [], "{} max length is {}".format(key, value))
return True, 1
def validate_type(key, value):
ttype_dict = {
'list': list,
'basestring': basestring,
'dict': dict,
'int': int,
'bool': bool,
}
request_value = request.json.get(key)
expect_value = value
if request_value is not None and not isinstance(request_value, ttype_dict.get(value)):
return False, json_detail_render(203, [], "{} should be a {}".format(key, value))
return True, 1
KEY_FUNC_MAP = {
'required': validate_required,
'min_length': validate_min_length,
'max_length': validate_max_length,
'type': validate_type,
}
def wrapper(func):
@wraps(func)
def _(*args, **kwargs):
protocol, vname = validate_name.split(':')
if request.method == protocol:
all_json = YML_JSON
validate_json = deepcopy(all_json.get(vname))
del validate_json['returnvalue']
for item, settings in validate_json.items():
for key, value in settings.items():
f = KEY_FUNC_MAP.get(key)
ret = f(item, value)
if not ret[0]:
return ret[1]
return func(*args, **kwargs)
return _
return wrapper
| [
"flask.request.json.get",
"json.loads",
"functools.wraps",
"copy.deepcopy"
] | [((299, 310), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (304, 310), False, 'from functools import wraps\n'), ((2046, 2057), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (2051, 2057), False, 'from functools import wraps\n'), ((5740, 5751), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (5745, 5751), False, 'from functools import wraps\n'), ((9864, 9875), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (9869, 9875), False, 'from functools import wraps\n'), ((10134, 10155), 'flask.request.json.get', 'request.json.get', (['key'], {}), '(key)\n', (10150, 10155), False, 'from flask import request\n'), ((10401, 10422), 'flask.request.json.get', 'request.json.get', (['key'], {}), '(key)\n', (10417, 10422), False, 'from flask import request\n'), ((10722, 10743), 'flask.request.json.get', 'request.json.get', (['key'], {}), '(key)\n', (10738, 10743), False, 'from flask import request\n'), ((11215, 11236), 'flask.request.json.get', 'request.json.get', (['key'], {}), '(key)\n', (11231, 11236), False, 'from flask import request\n'), ((11709, 11720), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (11714, 11720), False, 'from functools import wraps\n'), ((1073, 1091), 'copy.deepcopy', 'deepcopy', (['template'], {}), '(template)\n', (1081, 1091), False, 'from copy import deepcopy\n'), ((3064, 3082), 'copy.deepcopy', 'deepcopy', (['template'], {}), '(template)\n', (3072, 3082), False, 'from copy import deepcopy\n'), ((8180, 8200), 'copy.deepcopy', 'deepcopy', (['result_map'], {}), '(result_map)\n', (8188, 8200), False, 'from copy import deepcopy\n'), ((1325, 1341), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (1333, 1341), False, 'from copy import deepcopy\n'), ((1514, 1530), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (1522, 1530), False, 'from copy import deepcopy\n'), ((4629, 4644), 'copy.deepcopy', 'deepcopy', (['fu[k]'], {}), '(fu[k])\n', (4637, 4644), False, 'from copy import deepcopy\n'), ((7342, 7361), 'copy.deepcopy', 'deepcopy', (['temp[key]'], {}), '(temp[key])\n', (7350, 7361), False, 'from copy import deepcopy\n'), ((1693, 1709), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (1701, 1709), False, 'from copy import deepcopy\n'), ((3325, 3338), 'json.loads', 'json.loads', (['v'], {}), '(v)\n', (3335, 3338), False, 'import datetime, json\n'), ((3496, 3512), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (3504, 3512), False, 'from copy import deepcopy\n'), ((4899, 4914), 'copy.deepcopy', 'deepcopy', (['fu[k]'], {}), '(fu[k])\n', (4907, 4914), False, 'from copy import deepcopy\n'), ((7578, 7597), 'copy.deepcopy', 'deepcopy', (['temp[key]'], {}), '(temp[key])\n', (7586, 7597), False, 'from copy import deepcopy\n'), ((8529, 8551), 'json.loads', 'json.loads', (['data_value'], {}), '(data_value)\n', (8539, 8551), False, 'import datetime, json\n'), ((3701, 3717), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (3709, 3717), False, 'from copy import deepcopy\n'), ((5092, 5108), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (5100, 5108), False, 'from copy import deepcopy\n'), ((3896, 3912), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (3904, 3912), False, 'from copy import deepcopy\n'), ((5289, 5304), 'copy.deepcopy', 'deepcopy', (['fu[k]'], {}), '(fu[k])\n', (5297, 5304), False, 'from copy import deepcopy\n'), ((7956, 7975), 'copy.deepcopy', 'deepcopy', (['temp[key]'], {}), '(temp[key])\n', (7964, 7975), False, 'from copy import deepcopy\n'), ((4094, 4110), 'copy.deepcopy', 'deepcopy', (['tpl[k]'], {}), '(tpl[k])\n', (4102, 4110), False, 'from copy import deepcopy\n')] |
#!/usr/bin/env python3
###############################################################################
# Copyright 2018 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# @file to run it, change the modules/common/configs/config_gflags.cc to use sunnyvale_with_two_offices
from open_space_roi_interface import *
import matplotlib.pyplot as plt
# initialize object
open_space_roi = open_space_roi()
lane_id = "11564dup1_1_-1"
parking_id = "11543"
num_output_buffer = 50
unrotated_roi_boundary_x = (c_double * num_output_buffer)()
roi_boundary_x = (c_double * num_output_buffer)()
parking_spot_x = (c_double * num_output_buffer)()
unrotated_roi_boundary_y = (c_double * num_output_buffer)()
roi_boundary_y = (c_double * num_output_buffer)()
parking_spot_y = (c_double * num_output_buffer)()
end_pose = (c_double * num_output_buffer)()
xy_boundary = (c_double * num_output_buffer)()
origin_pose = (c_double * num_output_buffer)()
if not open_space_roi.ROITest(lane_id, parking_id,
unrotated_roi_boundary_x, unrotated_roi_boundary_y, roi_boundary_x, roi_boundary_y,
parking_spot_x, parking_spot_y, end_pose,
xy_boundary, origin_pose):
print("open_space_roi fail")
result_unrotated_roi_boundary_x = []
result_unrotated_roi_boundary_y = []
result_roi_boundary_x = []
result_roi_boundary_y = []
result_parking_spot_x = []
result_parking_spot_y = []
result_end_pose = []
result_xy_boundary = []
result_origin_pose = []
print("vertices of obstacles")
for i in range(0, 10):
result_unrotated_roi_boundary_x.append(float(unrotated_roi_boundary_x[i]))
result_unrotated_roi_boundary_y.append(float(unrotated_roi_boundary_y[i]))
result_roi_boundary_x.append(float(roi_boundary_x[i]))
result_roi_boundary_y.append(float(roi_boundary_y[i]))
print(str(float(roi_boundary_x[i])))
print(str(float(roi_boundary_y[i])))
print("parking spot")
for i in range(0, 4):
result_parking_spot_x.append(float(parking_spot_x[i]))
result_parking_spot_y.append(float(parking_spot_y[i]))
print("end_pose in x,y,phi,v")
for i in range(0, 4):
print(str(float(end_pose[i])))
print("xy_boundary in xmin xmax ymin ymax")
for i in range(0, 4):
print(str(float(xy_boundary[i])))
print("origin_pose")
for i in range(0, 2):
print(str(float(origin_pose[i])))
fig = plt.figure()
ax1 = fig.add_subplot(211)
ax1.scatter(result_unrotated_roi_boundary_x, result_unrotated_roi_boundary_y)
ax1.scatter(result_parking_spot_x, result_parking_spot_y)
ax2 = fig.add_subplot(212)
ax2.scatter(result_roi_boundary_x, result_roi_boundary_y)
plt.gca().set_aspect('equal', adjustable='box')
plt.show()
| [
"matplotlib.pyplot.gca",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.show"
] | [((2983, 2995), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2993, 2995), True, 'import matplotlib.pyplot as plt\n'), ((3292, 3302), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3300, 3302), True, 'import matplotlib.pyplot as plt\n'), ((3244, 3253), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3251, 3253), True, 'import matplotlib.pyplot as plt\n')] |
import sys
sys.path.append('./train_model')
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
import torchvision.transforms as transforms
import numpy as np
import os
import argparse
parser = argparse.ArgumentParser(description='Adaptive Network Slimming')
parser.add_argument('-net', type=str, help='pretrained pkl file')
parser.add_argument('--nonuniform', action='store_true', help='set non-uniform pruning rate')
args = parser.parse_args()
# from models import *
transform_test = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))])
testset = torchvision.datasets.CIFAR10(root='./cifar10',train=False,download=True,transform=transform_test)
testloader = torch.utils.data.DataLoader(testset,batch_size=128,shuffle=False)
classes = ('plane','car','bird','cat','deer','dog','frog','horse','ship','truck')
input_shape = (3,32,32)
if args.net == "resnet18":
START = 0.2
END = 0.81
netnum = 18
elif args.net == "resnet34":
START = 0.2
END = 0.81
netnum = 34
elif args.net == "resnet50":
START = 0.2
END = 0.8
netnum = 50
elif args.net == "resnet101":
START = 0.2
END = 0.8
netnum = 101
elif args.net == "resnet152":
START = 0.21
END = 0.79
netnum = 152
if args.nonuniform:
PRUNE_RATE = np.arange(START,END,(END-START)/(netnum-1))
FC_PRUNE_RATE = END
Model_Name = "ResNet" + str(netnum) + " (Non-uniform Pruning Rate)"
else:
PRUNE_RATE = np.zeros([netnum-1,1]) + 0.5
FC_PRUNE_RATE = 0.5
Model_Name = "ResNet" + str(netnum) + " (Uniform Pruning Rate)"
# -------------- Load Pretrained Model---------------
File_Name = "./model_pkl/" + args.net + ".pkl"
net = torch.load(File_Name, map_location= "cpu")
def RunData():
correct = 0
total = 0
with torch.no_grad():
net.eval()
net.cuda()
for (x,y) in testloader:
xa = x.cuda()
ya = y.cuda()
out = net(xa)
_,predicted = torch.max(out.data,1)
total += y.size(0)
correct += (predicted.cpu() == y).sum()
net.cpu()
Accuracy = 100*correct.cpu().numpy()/total
return Accuracy
def RunData2():
correct = 0
total = 0
for _,layer in net.named_modules():
if isinstance(layer,nn.BatchNorm2d):
layer.track_running_stats=False
with torch.no_grad():
net.eval()
net.cuda()
for (x,y) in testloader:
xa = x.cuda()
ya = y.cuda()
out = net(xa)
_,predicted = torch.max(out.data,1)
total += y.size(0)
correct += (predicted.cpu() == y).sum()
net.cpu()
Accuracy = 100*correct.cpu().numpy()/total
return Accuracy
def prune_filter(layer,PRUNE_RATE):
prune = np.sum(abs(layer),axis = (1,2,3))
sort_prune = np.sort(prune)
mask = np.ones(layer.shape)
for i in range(len(prune)):
if prune[i] < sort_prune[int(np.floor(PRUNE_RATE*len(prune)))]:
mask[i,:] = 0
return mask
def prune_weight(layer,PRUNE_RATE):
layer_flatten_sort = np.sort(abs(layer.flatten()))
mask = np.ones(layer.shape)
for i in range(layer.shape[0]):
for j in range(layer.shape[1]):
if abs(layer[i][j]) < layer_flatten_sort[int(np.floor(PRUNE_RATE*len(layer_flatten_sort)))]:
mask[i][j] = 0
return mask
def Calculate_flop():
FLOP = 0
shape = input_shape[1]
for name,layer in net.named_modules():
if isinstance(layer,nn.Conv2d) and 'shortcut' not in name:
filter_data = layer.weight.data.numpy()
skip = sum(np.sum(abs(filter_data),axis = (1,2,3)) == 0)
filter_shape = layer.weight.data.numpy().shape
padding = layer.padding
stride = layer.stride
n = filter_shape[1] * filter_shape[2] * filter_shape[3] # vector length
fpn = n + (n - 1) # n multiplication, n-1 addition
step_x = np.floor(((shape - filter_shape[2] + 2 * padding[0]) / stride[0]) + 1)
shape = step_x
step = step_x**2
fpf = fpn*step
FLOP += fpf*(filter_shape[0] - skip)
elif isinstance(layer,nn.Linear):
filter_data = layer.weight.data.numpy()
skip = sum(sum(filter_data == 0))
filter_shape = layer.weight.data.numpy().shape
FLOP += 2 * (filter_shape[0] * filter_shape[1] - skip)
return FLOP
ACC_before = RunData()
print("Model Name: " + Model_Name)
print("Accuracy : " + str(ACC_before) + "%")
FLOP_before = Calculate_flop()
if FLOP_before / 1e9 > 1: # for Giga Flops
print("FLOP : %4.2f GFLOP" % (FLOP_before / 1e9))
else:
print("FLOP : %4.2f MFLOP" % (FLOP_before / 1e6))
print(" ")
print(" Start Pruning ")
print("---------------------------------------------------")
print("|Layer| FLOP |#Filter or #Weight|Pruning |Filter|")
print("| No. | Save | before/after | Type | Size |")
print("|-----|--------|------------------|--------|------|")
# pruning
TOTAL_WEIGHT = 0
PRUNE_WEIGHT = 0
i = 0
for parname,layer in net.named_modules():
if isinstance(layer,nn.Conv2d) and 'shortcut' not in parname:
par = layer.weight.data.numpy()
par_size = par.shape
mask = prune_filter(par,PRUNE_RATE[i])
par = (par * mask)
print("| %3i" % (i+1), "|"+
" %5.2f" % float((1-(np.count_nonzero(mask)/mask.size)) * 100) + "% |"+
" %4i" % int((mask.size-np.count_nonzero(mask))/(par_size[1]*par_size[2]*par_size[2])),"/",
"%4i" % int(mask.size/(par_size[1]*par_size[2]*par_size[2])) + " | Filter |"+
" %1ix%1i |" % (par_size[2], par_size[3]))
TOTAL_WEIGHT = TOTAL_WEIGHT + (mask.size/(par_size[1]))
PRUNE_WEIGHT = PRUNE_WEIGHT + ((mask.size-np.count_nonzero(mask))/(par_size[1]))
i = i + 1
layer.weight.data = torch.from_numpy(par).type(torch.FloatTensor)
elif isinstance(layer,nn.Linear):
par = layer.weight.data.numpy()
par_size = par.shape
mask = prune_weight(par,FC_PRUNE_RATE)
par = (par * mask)
print("| %3i" % (i+1), "|"+
" %5.2f" % float((1-(np.count_nonzero(mask)/mask.size)) * 100) + "% |"+
" %5i" % int(mask.size-np.count_nonzero(mask)),"/",
"%5i" % int(mask.size) + " | Weight |" + " none |")
TOTAL_WEIGHT = TOTAL_WEIGHT + (mask.size)
PRUNE_WEIGHT = PRUNE_WEIGHT + (mask.size-np.count_nonzero(mask))
i = i + 1
layer.weight.data = torch.from_numpy(par).type(torch.FloatTensor)
print("---------------------------------------------------")
ACC_after = RunData2()
FLOP_after = Calculate_flop()
print(" ")
print(" After Pruning ")
print("Accuracy : " + str(ACC_before) + "% -> " + str(ACC_after) + "%")
if FLOP_after / 1e9 > 1: # for Giga Flops
if FLOP_before / 1e9 > 1: # for Giga Flops
print("FLOP : %4.2f GFLOP" % (FLOP_before / 1e9) + " -> %4.2f GFLOP" % (FLOP_after / 1e9))
else:
print("FLOP : %4.2f MFLOP" % (FLOP_before / 1e6) + " -> %4.2f GFLOP" % (FLOP_after / 1e9))
else:
if FLOP_before / 1e9 > 1: # for Giga Flops
print("FLOP : %4.2f GFLOP" % (FLOP_before / 1e9) + " -> %4.2f MFLOP" % (FLOP_after / 1e6))
else:
print("FLOP : %4.2f MFLOP" % (FLOP_before / 1e6) + " -> %4.2f MFLOP" % (FLOP_after / 1e6))
print("FLOP save: %5.2f" % (100*(FLOP_before - FLOP_after)/FLOP_before),"%") | [
"numpy.ones",
"argparse.ArgumentParser",
"torch.load",
"numpy.sort",
"torch.max",
"numpy.floor",
"torch.from_numpy",
"numpy.count_nonzero",
"torchvision.datasets.CIFAR10",
"numpy.zeros",
"torchvision.transforms.Normalize",
"torch.utils.data.DataLoader",
"torch.no_grad",
"torchvision.transf... | [((12, 44), 'sys.path.append', 'sys.path.append', (['"""./train_model"""'], {}), "('./train_model')\n", (27, 44), False, 'import sys\n'), ((232, 296), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Adaptive Network Slimming"""'}), "(description='Adaptive Network Slimming')\n", (255, 296), False, 'import argparse\n'), ((664, 768), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': '"""./cifar10"""', 'train': '(False)', 'download': '(True)', 'transform': 'transform_test'}), "(root='./cifar10', train=False, download=True,\n transform=transform_test)\n", (692, 768), False, 'import torchvision\n'), ((775, 842), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['testset'], {'batch_size': '(128)', 'shuffle': '(False)'}), '(testset, batch_size=128, shuffle=False)\n', (802, 842), False, 'import torch\n'), ((1761, 1802), 'torch.load', 'torch.load', (['File_Name'], {'map_location': '"""cpu"""'}), "(File_Name, map_location='cpu')\n", (1771, 1802), False, 'import torch\n'), ((1369, 1420), 'numpy.arange', 'np.arange', (['START', 'END', '((END - START) / (netnum - 1))'], {}), '(START, END, (END - START) / (netnum - 1))\n', (1378, 1420), True, 'import numpy as np\n'), ((2909, 2923), 'numpy.sort', 'np.sort', (['prune'], {}), '(prune)\n', (2916, 2923), True, 'import numpy as np\n'), ((2935, 2955), 'numpy.ones', 'np.ones', (['layer.shape'], {}), '(layer.shape)\n', (2942, 2955), True, 'import numpy as np\n'), ((3205, 3225), 'numpy.ones', 'np.ones', (['layer.shape'], {}), '(layer.shape)\n', (3212, 3225), True, 'import numpy as np\n'), ((551, 572), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (570, 572), True, 'import torchvision.transforms as transforms\n'), ((578, 649), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.4914, 0.4822, 0.4465)', '(0.2023, 0.1994, 0.201)'], {}), '((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.201))\n', (598, 649), True, 'import torchvision.transforms as transforms\n'), ((1532, 1557), 'numpy.zeros', 'np.zeros', (['[netnum - 1, 1]'], {}), '([netnum - 1, 1])\n', (1540, 1557), True, 'import numpy as np\n'), ((1859, 1874), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1872, 1874), False, 'import torch\n'), ((2423, 2438), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2436, 2438), False, 'import torch\n'), ((2051, 2073), 'torch.max', 'torch.max', (['out.data', '(1)'], {}), '(out.data, 1)\n', (2060, 2073), False, 'import torch\n'), ((2615, 2637), 'torch.max', 'torch.max', (['out.data', '(1)'], {}), '(out.data, 1)\n', (2624, 2637), False, 'import torch\n'), ((4049, 4117), 'numpy.floor', 'np.floor', (['((shape - filter_shape[2] + 2 * padding[0]) / stride[0] + 1)'], {}), '((shape - filter_shape[2] + 2 * padding[0]) / stride[0] + 1)\n', (4057, 4117), True, 'import numpy as np\n'), ((6100, 6121), 'torch.from_numpy', 'torch.from_numpy', (['par'], {}), '(par)\n', (6116, 6121), False, 'import torch\n'), ((6015, 6037), 'numpy.count_nonzero', 'np.count_nonzero', (['mask'], {}), '(mask)\n', (6031, 6037), True, 'import numpy as np\n'), ((6685, 6707), 'numpy.count_nonzero', 'np.count_nonzero', (['mask'], {}), '(mask)\n', (6701, 6707), True, 'import numpy as np\n'), ((6755, 6776), 'torch.from_numpy', 'torch.from_numpy', (['par'], {}), '(par)\n', (6771, 6776), False, 'import torch\n'), ((5681, 5703), 'numpy.count_nonzero', 'np.count_nonzero', (['mask'], {}), '(mask)\n', (5697, 5703), True, 'import numpy as np\n'), ((6490, 6512), 'numpy.count_nonzero', 'np.count_nonzero', (['mask'], {}), '(mask)\n', (6506, 6512), True, 'import numpy as np\n'), ((5589, 5611), 'numpy.count_nonzero', 'np.count_nonzero', (['mask'], {}), '(mask)\n', (5605, 5611), True, 'import numpy as np\n'), ((6400, 6422), 'numpy.count_nonzero', 'np.count_nonzero', (['mask'], {}), '(mask)\n', (6416, 6422), True, 'import numpy as np\n')] |
from flask import request
from ..utils.mcexceptions import AccessNotAllowedException
from . import apikeydb
from ..database.DB import DbConnection
_user_access_matrix = {}
_admins = []
def check(user, owner, project_id="Unknown"):
if not allowed(user, owner, project_id):
raise AccessNotAllowedException(project_id)
def reset():
global _admins
_user_access_matrix.clear()
_admins = []
def _user_in_owner_group(user, project_id):
if is_administrator(user):
return True
elif project_id not in _user_access_matrix:
_user_access_matrix[project_id] = []
_user_in_owner_group(user, project_id)
elif user not in _user_access_matrix[project_id]:
_load_user(project_id)
return _access_allowed(user, project_id)
def is_administrator(user):
if not _admins:
load_admins()
for u in _admins:
if u == user:
return True
return False
def get_admins():
if not _admins:
load_admins()
return _admins
def load_admins():
global _admins
conn = DbConnection().connection()
r = DbConnection().interface()
admin_group = list(r.table('users').get_all(True, index='admin')
.run(conn))
if not admin_group:
_admins = []
else:
for u in admin_group:
_admins.append(u['id'])
def _load_user(project_id):
conn = DbConnection().connection()
r = DbConnection().interface()
users = list(r.table('access')
.get_all(project_id, index='project_id')
.pluck('user_id')
.run(conn))
_user_access_matrix[project_id] = []
for u in users:
_user_access_matrix[project_id].append(u['user_id'])
def _access_allowed(user, project_id):
if user in _user_access_matrix[project_id]:
return True
else:
return False
def remove_user(user, project_id):
if user in _user_access_matrix[project_id]:
_user_access_matrix[project_id].pop(user, None)
def get_apiuser():
apikey = request.args.get('apikey')
api_user = apikeydb.apikey_user(apikey)
return api_user
def get_user():
return get_apiuser()
def allowed(user, owner, project_id):
if user == owner:
return True
if _user_in_owner_group(user, project_id):
return True
else:
return False
| [
"flask.request.args.get"
] | [((2056, 2082), 'flask.request.args.get', 'request.args.get', (['"""apikey"""'], {}), "('apikey')\n", (2072, 2082), False, 'from flask import request\n')] |
"""Represent AWS config settings"""
import json
from typing import Iterable, Mapping
import boto3
from datetime import datetime, timedelta
from data_model import OriginAndCallingPointNames
from .config import ConfigSettings
class AwsAppConfigSettings(ConfigSettings):
"""Represent a collection of config settings on AWS"""
def __init__(self):
"""Create an instance of `AwsAppConfigSettings`"""
self._origins_and_calling_points_config_session =\
AwsAppConfigSession('stations_and_services_scraper', 'PROD',
'origins_and_calling_points', 1800)
self._stations_crs_codes_config_session =\
AwsAppConfigSession('stations_and_services_scraper', 'PROD',
'station_name_crs_code_mapping', 1800)
self._darwin_config_session =\
AwsAppConfigSession('stations_and_services_scraper', 'PROD',
'darwin', 1800)
self._darwin_token =\
json.loads(
get_secret('darwin/token', 'eu-west-2')['SecretString']
)['darwin_token']
def get_services_origin_and_calling_point_names(self)\
-> Iterable[OriginAndCallingPointNames]:
"""Get a collection of services' origin and calling point names
:return: A collection of instances of `OriginAndCallingPointNames`
"""
setting = json.loads(
self._origins_and_calling_points_config_session.get_config())
return [OriginAndCallingPointNames(**s) for s in setting]
def get_data_access_config(self) -> Mapping:
"""Get config for setting up data access
:return: Data access configuration
"""
return {
'station_name_crs_code_mapping':
self._stations_crs_codes_config_session.get_config(),
'wsdl':
json.loads(self._darwin_config_session.get_config())['wsdl'],
'token': self._darwin_token
}
def get_data_publisher_config(self) -> Mapping:
"""Get config for setting up data publish
:return: Data publish configuration
"""
return {}
class AwsAppConfigSession:
"""Represent an AWS app config session"""
def __init__(self, application_identifier: str, environment: str,
profile_identifier: str, poll_interval_seconds: int):
"""Create an instance of `AwsAppConfigSession`
:param application_identifier: Identifier of application
related to the config
:param environment: Environment of the config
:param profile_identifier: Identifier of the config
:param poll_interval_seconds: interval in seconds to pull config
"""
self._application_identifier = application_identifier
self._environment = environment
self._profile_identifier = profile_identifier
self._poll_interval_seconds = poll_interval_seconds
self._config = None
self._config_token = None
def get_config(self) -> str:
"""Get and return config"""
if self._config is None or self._config_token is None:
self._start_session_and_get_latest_config()
elif self._is_poll_interval_passed():
self._get_latest_config()
return self._config
def _start_session_and_get_latest_config(self):
self._start_session()
self._get_latest_config()
def _start_session(self):
self._client = boto3.client('appconfigdata')
response = self._client.start_configuration_session(
ApplicationIdentifier=self._application_identifier,
EnvironmentIdentifier=self._environment,
ConfigurationProfileIdentifier=self._profile_identifier,
RequiredMinimumPollIntervalInSeconds=self._poll_interval_seconds
)
self._config_token = response['InitialConfigurationToken']
def _get_latest_config(self):
response = self._client.get_latest_configuration(
ConfigurationToken=self._config_token)
self._config_token = response['NextPollConfigurationToken']
self._poll_interval_seconds =\
int(response['NextPollIntervalInSeconds'])
self._last_get_latest_config_time = datetime.utcnow()
self._update_config(response)
def _update_config(self, response):
config_bytes = response['Configuration'].read()
if config_bytes != b'':
self._config = config_bytes.decode('utf-8')
def _is_poll_interval_passed(self):
config_expiry_time = self._last_get_latest_config_time\
+ timedelta(seconds=self._poll_interval_seconds)
return config_expiry_time < datetime.utcnow()
def get_secret(secret_name: str, region_name: str) -> dict:
session = boto3.session.Session()
client = session.client(service_name='secretsmanager',
region_name=region_name)
return client.get_secret_value(SecretId=secret_name)
| [
"boto3.client",
"boto3.session.Session",
"datetime.datetime.utcnow",
"datetime.timedelta",
"data_model.OriginAndCallingPointNames"
] | [((4863, 4886), 'boto3.session.Session', 'boto3.session.Session', ([], {}), '()\n', (4884, 4886), False, 'import boto3\n'), ((3528, 3557), 'boto3.client', 'boto3.client', (['"""appconfigdata"""'], {}), "('appconfigdata')\n", (3540, 3557), False, 'import boto3\n'), ((4309, 4326), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4324, 4326), False, 'from datetime import datetime, timedelta\n'), ((1518, 1549), 'data_model.OriginAndCallingPointNames', 'OriginAndCallingPointNames', ([], {}), '(**s)\n', (1544, 1549), False, 'from data_model import OriginAndCallingPointNames\n'), ((4686, 4732), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'self._poll_interval_seconds'}), '(seconds=self._poll_interval_seconds)\n', (4695, 4732), False, 'from datetime import datetime, timedelta\n'), ((4769, 4786), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4784, 4786), False, 'from datetime import datetime, timedelta\n')] |
import logging
logging.basicConfig(level=logging.DEBUG)
def log(*a):
logging.info(' '.join(map(str, a)))
warn = logging.warn
| [
"logging.basicConfig"
] | [((20, 60), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (39, 60), False, 'import logging\n')] |
#!/usr/bin/env python3
"""Named tuple example."""
from collections import namedtuple
Car = namedtuple('Car', 'color mileage')
# Our new "Car" class works as expected:
MY_CAR = Car('red', 3812.4)
print(MY_CAR.color)
print(MY_CAR.mileage)
# We get a nice string repr for free:
print(MY_CAR)
try:
MY_CAR.color = 'blue'
except AttributeError as inst:
print(type(inst)) # the exception instance
print(inst.args) # arguments stored in .args
print(inst)
finally:
print("Into finally")
| [
"collections.namedtuple"
] | [((92, 126), 'collections.namedtuple', 'namedtuple', (['"""Car"""', '"""color mileage"""'], {}), "('Car', 'color mileage')\n", (102, 126), False, 'from collections import namedtuple\n')] |
"""Test Open Peer Power config flow for BleBox devices."""
from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch
import blebox_uniapi
import pytest
from openpeerpower import config_entries, data_entry_flow
from openpeerpower.components.blebox import config_flow
from openpeerpower.setup import async_setup_component
from .conftest import mock_config, mock_only_feature, setup_product_mock
def create_valid_feature_mock(path="openpeerpower.components.blebox.Products"):
"""Return a valid, complete BleBox feature mock."""
feature = mock_only_feature(
blebox_uniapi.cover.Cover,
unique_id="BleBox-gateBox-1afe34db9437-0.position",
full_name="gateBox-0.position",
device_class="gate",
state=0,
async_update=AsyncMock(),
current=None,
)
product = setup_product_mock("covers", [feature], path)
type(product).name = PropertyMock(return_value="My gate controller")
type(product).model = PropertyMock(return_value="gateController")
type(product).type = PropertyMock(return_value="gateBox")
type(product).brand = PropertyMock(return_value="BleBox")
type(product).firmware_version = PropertyMock(return_value="1.23")
type(product).unique_id = PropertyMock(return_value="abcd0123ef5678")
return feature
@pytest.fixture(name="valid_feature_mock")
def valid_feature_mock_fixture():
"""Return a valid, complete BleBox feature mock."""
return create_valid_feature_mock()
@pytest.fixture(name="flow_feature_mock")
def flow_feature_mock_fixture():
"""Return a mocked user flow feature."""
return create_valid_feature_mock(
"openpeerpower.components.blebox.config_flow.Products"
)
async def test_flow_works(opp, valid_feature_mock, flow_feature_mock):
"""Test that config flow works."""
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "192.168.127.12", config_flow.CONF_PORT: 80},
)
assert result["type"] == "create_entry"
assert result["title"] == "My gate controller"
assert result["data"] == {
config_flow.CONF_HOST: "192.168.127.12",
config_flow.CONF_PORT: 80,
}
@pytest.fixture(name="product_class_mock")
def product_class_mock_fixture():
"""Return a mocked feature."""
path = "openpeerpower.components.blebox.config_flow.Products"
patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True)
yield patcher
async def test_flow_with_connection_failure(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.ConnectionError
)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "192.168.127.12", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_with_api_failure(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.Error
)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "192.168.127.12", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_with_unknown_failure(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(side_effect=RuntimeError)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "192.168.127.12", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "unknown"}
async def test_flow_with_unsupported_version(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.UnsupportedBoxVersion
)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "192.168.127.12", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "unsupported_version"}
async def test_async_setup(opp):
"""Test async_setup (for coverage)."""
assert await async_setup_component(opp, "blebox", {"host": "192.168.127.12"})
await opp.async_block_till_done()
async def test_already_configured(opp, valid_feature_mock):
"""Test that same device cannot be added twice."""
config = mock_config("192.168.127.12")
config.add_to_opp(opp)
await opp.config_entries.async_setup(config.entry_id)
await opp.async_block_till_done()
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "192.168.127.12", config_flow.CONF_PORT: 80},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "address_already_configured"
async def test_async_setup_entry(opp, valid_feature_mock):
"""Test async_setup_entry (for coverage)."""
config = mock_config()
config.add_to_opp(opp)
assert await opp.config_entries.async_setup(config.entry_id)
await opp.async_block_till_done()
assert opp.config_entries.async_entries() == [config]
assert config.state is config_entries.ConfigEntryState.LOADED
async def test_async_remove_entry(opp, valid_feature_mock):
"""Test async_setup_entry (for coverage)."""
config = mock_config()
config.add_to_opp(opp)
assert await opp.config_entries.async_setup(config.entry_id)
await opp.async_block_till_done()
assert await opp.config_entries.async_remove(config.entry_id)
await opp.async_block_till_done()
assert opp.config_entries.async_entries() == []
assert config.state is config_entries.ConfigEntryState.NOT_LOADED
| [
"openpeerpower.setup.async_setup_component",
"unittest.mock.AsyncMock",
"unittest.mock.PropertyMock",
"pytest.fixture",
"unittest.mock.patch"
] | [((1316, 1357), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""valid_feature_mock"""'}), "(name='valid_feature_mock')\n", (1330, 1357), False, 'import pytest\n'), ((1490, 1530), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""flow_feature_mock"""'}), "(name='flow_feature_mock')\n", (1504, 1530), False, 'import pytest\n'), ((2490, 2531), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""product_class_mock"""'}), "(name='product_class_mock')\n", (2504, 2531), False, 'import pytest\n'), ((906, 953), 'unittest.mock.PropertyMock', 'PropertyMock', ([], {'return_value': '"""My gate controller"""'}), "(return_value='My gate controller')\n", (918, 953), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((980, 1023), 'unittest.mock.PropertyMock', 'PropertyMock', ([], {'return_value': '"""gateController"""'}), "(return_value='gateController')\n", (992, 1023), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((1049, 1085), 'unittest.mock.PropertyMock', 'PropertyMock', ([], {'return_value': '"""gateBox"""'}), "(return_value='gateBox')\n", (1061, 1085), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((1112, 1147), 'unittest.mock.PropertyMock', 'PropertyMock', ([], {'return_value': '"""BleBox"""'}), "(return_value='BleBox')\n", (1124, 1147), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((1185, 1218), 'unittest.mock.PropertyMock', 'PropertyMock', ([], {'return_value': '"""1.23"""'}), "(return_value='1.23')\n", (1197, 1218), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((1249, 1292), 'unittest.mock.PropertyMock', 'PropertyMock', ([], {'return_value': '"""abcd0123ef5678"""'}), "(return_value='abcd0123ef5678')\n", (1261, 1292), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((2681, 2746), 'unittest.mock.patch', 'patch', (['path', 'DEFAULT', 'blebox_uniapi.products.Products', '(True)', '(True)'], {}), '(path, DEFAULT, blebox_uniapi.products.Products, True, True)\n', (2686, 2746), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((2964, 3022), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {'side_effect': 'blebox_uniapi.error.ConnectionError'}), '(side_effect=blebox_uniapi.error.ConnectionError)\n', (2973, 3022), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((3548, 3596), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {'side_effect': 'blebox_uniapi.error.Error'}), '(side_effect=blebox_uniapi.error.Error)\n', (3557, 3596), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((4126, 4161), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {'side_effect': 'RuntimeError'}), '(side_effect=RuntimeError)\n', (4135, 4161), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((4665, 4729), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {'side_effect': 'blebox_uniapi.error.UnsupportedBoxVersion'}), '(side_effect=blebox_uniapi.error.UnsupportedBoxVersion)\n', (4674, 4729), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n'), ((5163, 5227), 'openpeerpower.setup.async_setup_component', 'async_setup_component', (['opp', '"""blebox"""', "{'host': '192.168.127.12'}"], {}), "(opp, 'blebox', {'host': '192.168.127.12'})\n", (5184, 5227), False, 'from openpeerpower.setup import async_setup_component\n'), ((778, 789), 'unittest.mock.AsyncMock', 'AsyncMock', ([], {}), '()\n', (787, 789), False, 'from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch\n')] |
## @class IntraCodec
# Module designed for encoding and decoding YUV videos using the intra-frame method
# That is considering adjacent pixels in the same frame and encoding their errors
# @author <NAME> 89005
# @author <NAME> 89262
import numpy as np
import math
from Golomb import *
from Bitstream import *
class IntraCodec:
## Initialization function
# @param[in] filename Path of the file to read
# @param[in] encoded A flag used to indicate if the video in the given path was encoded by this same class
# @param[in] limitFrames Optional parameter to limit the number of frames to considered
# Initializing and setting up some useful parameters and flags
def __init__(self, filename, encoded=False, limitFrames=None):
self.vid = filename
self.encoding='utf-8'
# Array of arrays containing each frame's components
self.frameY=[]
self.frameV=[]
self.frameU=[]
self.encoded=False
self.quantizationStep=None
self.colorSpace=None
np.seterr(over='ignore')
#calls read video on initialization
if not encoded:
self.read_video()
else:
self.encoded=True
self.read_encoded_video(limitFrames=limitFrames)
## read_video function
# Reads YUV video information from file, storing all its data in our structures, calculating different components lengths and shapes
def read_video(self):
f=open(self.vid,"rb")
c=1
for line in f:
# Processing header
if c==1:
line=line.decode(self.encoding)
self.header=line.strip()
self.handleHeader()
# Rest of the video
if c>=2:
frameY=f.read(self.yLength)
frameU=f.read(self.uLength)
frameV=f.read(self.vLength)
y=np.frombuffer(frameY, dtype=np.uint8)
u=np.frombuffer(frameU, dtype=np.uint8)
v=np.frombuffer(frameV, dtype=np.uint8)
y=y.reshape(self.shape)
u=u.reshape(self.other_shape)
v=v.reshape(self.other_shape)
self.frameY+=[y]
self.frameU+=[u]
self.frameV+=[v]
c+=1
self.TotalFrames=len(self.frameY)
f.close()
## read_encoded_video function
# @param[in] limitFrames Optional parameter to limit the number of frames to be decoded
# Reads video information (encoded by this class) from file
# Starts by decoding and interpreting the header, followed by decoding of all the pixel errors and recreating the original pixel based on the predictor that was used
def read_encoded_video(self,limitFrames=None):
bs=BitStream(self.vid,'READ')
headerlen=bs.read_n_bits(8)
chars=[]
for i in range(0,headerlen*8):
chars.append(str(bs._readbit()))
res=''.join(chars)
self.header=self.decode_binary_string(res)
#handle header
self.handleHeader()
g=Golomb(self.golombParam)
bitsResto=int(math.log(self.golombParam,2))
if limitFrames==None:
l=self.TotalFrames
else:
l=limitFrames
#
self.frameY=[None]*l
self.frameU=[None]*l
self.frameV=[None]*l
#
for frame in range(0,l):
print('decoding frame',frame)
y=np.zeros(shape=self.shape,dtype=np.uint8)
u=np.zeros(shape=self.other_shape,dtype=np.uint8)
v=np.zeros(shape=self.other_shape,dtype=np.uint8)
for line in range(0, self.height):
for column in range(0,self.width):
pixel=self.decodeWithBitstream(3,bs,g,bitsResto)
a=self.getYUVPixel(frame,line,column-1, resized=False)
c=self.getYUVPixel(frame,line-1,column-1, resized=False)
b=self.getYUVPixel(frame,line-1,column, resized=False)
x=self.predict(a,c,b)
pixel=self.sum(x,pixel)
pixel=tuple(pixel)
l,c=self.adjustCoord(line,column)
y[line,column]=pixel[0]
u[l,c]=pixel[1]
v[l,c]=pixel[2]
#
self.frameY[frame]=y
self.frameU[frame]=u
self.frameV[frame]=v
#por cada frame
self.frameY+=[y]
self.frameU+=[u]
self.frameV+=[v]
#
bs.close()
## handleHeader function
# Interpreting the header of the file, containing width, height, frames per second and color space, assigning them to class variables
# This header can also contain other parameters added while encoding, such as the parameter for Golomb and the quantization steps used for lossy coding
def handleHeader(self):
print(self.header)
fields=self.header.split(" ")
for field in fields:
c=field[0]
if c=='W':
self.width=int(field[1:])
elif c=='H':
self.height=int(field[1:])
elif c=='F':
self.fps=int(field[1:3])
elif c=='C':
self.colorSpace=int(field[1:])
elif c=='G':
self.golombParam=int(field[-1:])
self.encoded=True
elif c=='z':
self.TotalFrames=int(field[1:])
elif c=='q':
qlist=field[1:]
qsteps=qlist.split(':')
self.quantizationStep=[int(qsteps[0]),int(qsteps[1]),int(qsteps[2])]
self.computeShape()
print('width=',self.width, 'height=',self.height, self.fps, self.colorSpace, self.frameLength)
if self.encoded:
print('g=',self.golombParam, 'totalframes=',self.TotalFrames)
if self.quantizationStep!=None:
print('q=',self.quantizationStep)
## adjustCoord function
# @param[in] line Line where the pixel is located
# @param[in] column Column where the pixel is located
# @param[out] line Adjusted line number
# @param[out] column Adjusted column number
# Adjusts given line and column considering the different array shapes in different color spaces
# Useful when assigning new values to a certain pixel position
def adjustCoord(self,line,column):
if self.colorSpace=='4:2:2':
c=math.floor((column/2))
return line,c
elif self.colorSpace=='4:2:0':
c=math.floor((column/2))
l=math.floor((line/2))
return l,c
else:
return line,column
## computeShape function
# Calculating array shapes for YUV components based on the color space
def computeShape(self):
if self.colorSpace==444:
self.colorSpace='4:4:4'
self.frameLength=int(self.width*self.height*3)
self.yLength=self.uLength=self.vLength=int(self.frameLength/3)
self.shape = (int(self.height), self.width)
self.other_shape = (int(self.height), self.width)
elif self.colorSpace==422:
self.colorSpace='4:2:2'
self.frameLength=int(self.width*self.height*2)
self.yLength=int(self.frameLength/2)
self.vLength=self.uLength=int(self.frameLength/4)
self.shape = (int(self.height), self.width)
self.other_shape = (int(self.height), int(self.width/2))
else:
self.colorSpace='4:2:0'
self.frameLength=int(self.width*self.height*3/2)
self.yLength=int(self.frameLength*(2/3))
self.uLength=self.vLength=int(self.frameLength*(1/6))
self.shape = (int(self.height), self.width)
self.other_shape = (int(self.height/2), int(self.width/2))
## getYUVPixel function
# @param[in] frame Number of the frame from which to read the pixel from
# @param[in] line Line in which the pixel is located
# @param[in] column Column in which the pixel is located
# @param[in] resized A flag used to indicate if the arrays have been resized or not
# @param[out] p The pixel tuple in YUV format
# Returns 0,0,0 for non existent pixels, useful for the Codecs
# Adjust line and column numbers based on the color space (and array shapes)
def getYUVPixel(self, frame, line, column, resized):
yf=self.frameY[frame]
uf=self.frameU[frame]
vf=self.frameV[frame]
if resized==False:
if self.colorSpace=='4:2:2':
c=math.floor((column/2))
if line<0 or column<0 or c<0:
return 0,0,0
p=yf[line,column], uf[line,c], vf[line,c]
elif self.colorSpace=='4:2:0':
c=math.floor((column/2))
l=math.floor((line/2))
if line<0 or column<0 or c<0 or l<0:
return 0,0,0
p=yf[line,column], uf[l,c], vf[l,c]
else:
if line<0 or column<0:
return 0,0,0
p=yf[line,column], uf[line,column], vf[line,column]
else:
if line<0 or column<0:
return 0,0,0
p=yf[line,column], uf[line,column], vf[line,column]
return p
## updateYUVPixel function
# @param[in] compNumb Number of the pixel component to be changed (0=Y,1=U,2=V)
# @param[in] frame Number of the frame where the pixel is located
# @param[in] line Line in which the pixel is located
# @param[in] column Column in which the pixel is located
# @param[in] value New value of the pixel's component
# Used for avoiding error propagation in lossy coding
def updateYUVPixel(self,compNumb,frame,line,column,value):
l,c=self.adjustCoord(line,column)
if compNumb==0:
rf=self.frameY[frame]
rf.setflags(write=1)
rf[line,column]=value
elif compNumb==1:
rf=self.frameU[frame]
rf.setflags(write=1)
rf[l,c]=value
else:
rf=self.frameV[frame]
rf.setflags(write=1)
rf[l,c]=value
## encode_video function
# @param[in] filename Path of file to write with the encoded video information
# @param[in] golombparam Golomb's parameter M (factor)
# @param[in] q Optional parameter for specifying each components quantization steps for lossy coding
# @param[in] limitFrames Optional parameter for limiting number of frames to encode
# Starts by encoding the header, passing additional parameters such as the Golomb factor
# Proceeds to encode each pixel, by calculating each component's error according to the predictor function
def encode_video(self, filename, golombparam, q=None, limitFrames=None):
if limitFrames==None:
l=self.TotalFrames
else:
l=limitFrames
g=Golomb(golombparam)
bs=BitStream(filename,'WRITE')
header='ENCODED '+self.header+' Golomb'+str(golombparam)+' z'+str(self.TotalFrames)
if q!=None:
header+=' q'+str(q[0])+':'+str(q[1])+':'+str(q[2])
self.quantizationStep=q
headerlen=len(header)
bs.write_n_bits(headerlen,8)
bs.writeTxt(header)
for frame in range(0,l):
print('encoding frame',frame)
for line in range(0,self.height):
for column in range(0,self.width):
p=self.getYUVPixel(frame,line,column, resized=False)
a=self.getYUVPixel(frame,line,column-1, resized=False)
c=self.getYUVPixel(frame,line-1,column-1, resized=False)
b=self.getYUVPixel(frame,line-1,column, resized=False)
x=self.predict(a,c,b)
erro=self.diff(p,x)
self.encodeWithBitstream(erro,bs,g,pixel=p,frame=frame,line=line,column=column)
bs.close()
## predict function
# @param[in] a Adjacent pixel in position (line,col-1)
# @param[in] c Adjacent pixel in position (line-1,col-1)
# @param[in] b Adjacent pixel in position (line-1,col)
# @param[out] ret Most similar pixel
# The returned pixel is calculated using the JPEG-LS non-linear predictor formula
def predict(self,a,c,b):
y=[int(a[0]),int(c[0]),int(b[0])]
u=[int(a[1]),int(c[1]),int(b[1])]
v=[int(a[2]),int(c[2]),int(b[2])]
l=[y]+[u]+[v]
ret=[]
for component in l:
if component[1]>=max(component[0],component[2]):
x=min(component[0],component[2])
elif component[1]<=min(component[0],component[2]):
x=max(component[0],component[2])
else:
x=component[0]+component[2]-component[1]
ret.append(x)
return ret
## diff function
# @param[in] p First pixel
# @param[in] x Second pixel
# @param[out] r Pixel result of the difference between the two pixels
# Calculates the result pixel by calculating the difference between each yuv component
def diff(self,p,x):
ey=int(p[0])-int(x[0])
eu=int(p[1])-int(x[1])
ev=int(p[2])-int(x[2])
return(ey,eu,ev)
## sum function
# @param[in] p First pixel
# @param[in] x Second pixel
# @param[out] r Pixel result of the sum between the two pixels
# Calculates the result pixel by calculating the sum between each yuv component
def sum(self,p,x):
ey=p[0]+x[0]
eu=p[1]+x[1]
ev=p[2]+x[2]
return(ey,eu,ev)
## printPixels function
# Function for printing pixels, useful during development
def printPixels(self):
l=self.TotalFrames
l=1
h=self.height
#h=20
w=self.width
#w=20
for frame in range(0,l):
#print('processing frame',frame)
for line in range(0,h):
for column in range(0,w):
if line==0 and w-10<=column<w:
p=self.getYUVPixel(frame,line,column, resized=False)
print(p, end=';')
#print('')
## decode_binary_string function
# @param[in] s String
# @param[out] r Decoded binary string
# Additional function to decode binary strings
def decode_binary_string(self,s):
return ''.join(chr(int(s[i*8:i*8+8],2)) for i in range(len(s)//8))
## getFrames function
# @param[out] frames The data structures with all the frames of each component
# Useful to check data integrity
def getFrames(self):
return self.frameY, self.frameU,self.frameV
## encodeWithBitStream function
# @param[in] value Value to be encoded
# @param[in] bs Bitstream class object
# @param[in] g Golomb class object
# @param[in] pixel Current pixel values being encoded, used for lossy coding
# @param[in] frame Frame where the pixel being encoded is located
# @param[in] line Line where the pixel being encoded is located
# @param[in] column Column where the pixel being encoded is located
# Switches the value to be encoded to positive, writing a 1 or 0 according to the original value
# If using lossy coding functionality, divides color component by quantization step and updates pixel value
# Proceeds to write the encoded value by Golomb with the Bitstream
def encodeWithBitstream(self, value,bs,g, pixel=None, frame=None, line=None, column=None):
for i in range(0,len(value)):
if value[i]<0:
n=value[i]*-1
bs.writebits(1,1)
else:
bs.writebits(0,1)
n=value[i]
if self.quantizationStep!=None and self.quantizationStep[i]!=0:
#newValue=pixel[i]+(n)
n=math.floor(n/self.quantizationStep[i])
#if line!=0 and column!=0:
#self.updateYUVPixel(i,frame,line,column,newValue)
n=g.encode(n)
bs.writebits(int(n,2),len(n))
## decodeWithBitStream function
# @param[in] len Number of values to read
# @param[in] bs Bitstream class object
# @param[in] g Golomb class object
# @param[in] bitsResto Number of bits of the remainder = log(factor,2)
# @param[out] pixel Decoded value
# Starts by reading one bit 0 or 1, determing if number was negative
# Reads the bits from the Bitstream and decodes them with Golomb
# Multiplies by quantization step if using lossy coding
def decodeWithBitstream(self, len,bs,g,bitsResto):
pixel=[]
for i in range(0,len):
ay=bs.read_n_bits(1)
seq=''
while True:
r=str(bs.read_n_bits(1))
seq+=r
if r=='0':
break
seq+=str(bs.readbits(bitsResto))
comp=g.decode(seq)
if ay==1:
comp=comp*-1
if self.quantizationStep!=None and self.quantizationStep[i]!=0:
comp=comp*self.quantizationStep[i]
pixel.append(comp)
return pixel
## verifyData function
# @param[in] video Class containing video for comparison
# @param[in] numberoframes Limits number of frames to check
# Compares data between two videos
def verifyData(self,video,numberoframes):
m1,m2,m3=self.getFrames()
m4,m5,m6=video.getFrames()
for i in range(0,numberoframes):
if (np.array_equal(m1[i],m4[i])):
print('Y-',i,'correct')
for i in range(0,numberoframes):
if (np.array_equal(m2[i],m5[i])):
print('U-',i,'correct')
for i in range(0,numberoframes):
if (np.array_equal(m3[i],m6[i])):
print('V-',i,'correct')
| [
"math.floor",
"math.log",
"numpy.zeros",
"numpy.array_equal",
"numpy.frombuffer",
"numpy.seterr"
] | [((1042, 1066), 'numpy.seterr', 'np.seterr', ([], {'over': '"""ignore"""'}), "(over='ignore')\n", (1051, 1066), True, 'import numpy as np\n'), ((3174, 3203), 'math.log', 'math.log', (['self.golombParam', '(2)'], {}), '(self.golombParam, 2)\n', (3182, 3203), False, 'import math\n'), ((3503, 3545), 'numpy.zeros', 'np.zeros', ([], {'shape': 'self.shape', 'dtype': 'np.uint8'}), '(shape=self.shape, dtype=np.uint8)\n', (3511, 3545), True, 'import numpy as np\n'), ((3559, 3607), 'numpy.zeros', 'np.zeros', ([], {'shape': 'self.other_shape', 'dtype': 'np.uint8'}), '(shape=self.other_shape, dtype=np.uint8)\n', (3567, 3607), True, 'import numpy as np\n'), ((3621, 3669), 'numpy.zeros', 'np.zeros', ([], {'shape': 'self.other_shape', 'dtype': 'np.uint8'}), '(shape=self.other_shape, dtype=np.uint8)\n', (3629, 3669), True, 'import numpy as np\n'), ((6625, 6647), 'math.floor', 'math.floor', (['(column / 2)'], {}), '(column / 2)\n', (6635, 6647), False, 'import math\n'), ((17791, 17819), 'numpy.array_equal', 'np.array_equal', (['m1[i]', 'm4[i]'], {}), '(m1[i], m4[i])\n', (17805, 17819), True, 'import numpy as np\n'), ((17918, 17946), 'numpy.array_equal', 'np.array_equal', (['m2[i]', 'm5[i]'], {}), '(m2[i], m5[i])\n', (17932, 17946), True, 'import numpy as np\n'), ((18045, 18073), 'numpy.array_equal', 'np.array_equal', (['m3[i]', 'm6[i]'], {}), '(m3[i], m6[i])\n', (18059, 18073), True, 'import numpy as np\n'), ((1925, 1962), 'numpy.frombuffer', 'np.frombuffer', (['frameY'], {'dtype': 'np.uint8'}), '(frameY, dtype=np.uint8)\n', (1938, 1962), True, 'import numpy as np\n'), ((1981, 2018), 'numpy.frombuffer', 'np.frombuffer', (['frameU'], {'dtype': 'np.uint8'}), '(frameU, dtype=np.uint8)\n', (1994, 2018), True, 'import numpy as np\n'), ((2037, 2074), 'numpy.frombuffer', 'np.frombuffer', (['frameV'], {'dtype': 'np.uint8'}), '(frameV, dtype=np.uint8)\n', (2050, 2074), True, 'import numpy as np\n'), ((6727, 6749), 'math.floor', 'math.floor', (['(column / 2)'], {}), '(column / 2)\n', (6737, 6749), False, 'import math\n'), ((6764, 6784), 'math.floor', 'math.floor', (['(line / 2)'], {}), '(line / 2)\n', (6774, 6784), False, 'import math\n'), ((8782, 8804), 'math.floor', 'math.floor', (['(column / 2)'], {}), '(column / 2)\n', (8792, 8804), False, 'import math\n'), ((16107, 16147), 'math.floor', 'math.floor', (['(n / self.quantizationStep[i])'], {}), '(n / self.quantizationStep[i])\n', (16117, 16147), False, 'import math\n'), ((9003, 9025), 'math.floor', 'math.floor', (['(column / 2)'], {}), '(column / 2)\n', (9013, 9025), False, 'import math\n'), ((9044, 9064), 'math.floor', 'math.floor', (['(line / 2)'], {}), '(line / 2)\n', (9054, 9064), False, 'import math\n')] |
# -*- coding: utf-8 -*-
# Copyright 2019 The GraphicsFuzz Project Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test utility module.
A test directory contains a Test proto stored in "source/test.json", the reference and variant shader jobs, and various
other files, including results.
This module is used to read Test proto files and get various paths that exist in test directories.
"""
from pathlib import Path
from gfauto import proto_util, util
from gfauto.test_pb2 import Test
TEST_METADATA = "test.json"
REFERENCE_DIR = "reference"
VARIANT_DIR = "variant"
SHADER_JOB = "shader.json"
SHADER_JOB_RESULT = "shader.info.json"
def get_source_dir(test_dir: Path) -> Path:
return test_dir / "source"
def get_metadata_path(test_dir: Path) -> Path:
return get_metadata_path_from_source_dir(get_source_dir(test_dir))
def get_metadata_path_from_source_dir(source_dir: Path) -> Path:
return source_dir / TEST_METADATA
def metadata_write(metadata: Test, test_dir: Path) -> Path:
metadata_write_to_path(metadata, get_metadata_path(test_dir))
return test_dir
def metadata_read(test_dir: Path) -> Test:
return metadata_read_from_path(get_metadata_path(test_dir))
def metadata_read_from_source_dir(source_dir: Path) -> Test:
return metadata_read_from_path(get_metadata_path_from_source_dir(source_dir))
def metadata_read_from_path(test_metadata_path: Path) -> Test:
text = util.file_read_text(test_metadata_path)
result = Test()
proto_util.json_to_message(text, result)
return result
def metadata_write_to_path(metadata: Test, test_metadata_path: Path) -> Path:
text = proto_util.message_to_json(metadata)
util.file_write_text(test_metadata_path, text)
return test_metadata_path
def get_shader_job_path(test_dir: Path, shader_name: str) -> Path:
return test_dir / "source" / shader_name / SHADER_JOB
def get_device_directory(test_dir: Path, device_name: str) -> Path:
return test_dir / "results" / device_name
def get_results_directory(test_dir: Path, device_name: str) -> Path:
return get_device_directory(test_dir, device_name) / "result"
def get_reductions_dir(test_dir: Path, device_name: str) -> Path:
return get_device_directory(test_dir, device_name) / "reductions"
def get_reduced_test_dir(test_dir: Path, device_name: str, reduction_name: str) -> Path:
return get_reductions_dir(test_dir, device_name) / reduction_name
def get_reduction_work_directory(reduced_test_dir: Path, name_of_shader: str) -> Path:
return reduced_test_dir / "reduction_work" / name_of_shader
| [
"gfauto.util.file_read_text",
"gfauto.proto_util.message_to_json",
"gfauto.test_pb2.Test",
"gfauto.util.file_write_text",
"gfauto.proto_util.json_to_message"
] | [((1926, 1965), 'gfauto.util.file_read_text', 'util.file_read_text', (['test_metadata_path'], {}), '(test_metadata_path)\n', (1945, 1965), False, 'from gfauto import proto_util, util\n'), ((1979, 1985), 'gfauto.test_pb2.Test', 'Test', ([], {}), '()\n', (1983, 1985), False, 'from gfauto.test_pb2 import Test\n'), ((1990, 2030), 'gfauto.proto_util.json_to_message', 'proto_util.json_to_message', (['text', 'result'], {}), '(text, result)\n', (2016, 2030), False, 'from gfauto import proto_util, util\n'), ((2140, 2176), 'gfauto.proto_util.message_to_json', 'proto_util.message_to_json', (['metadata'], {}), '(metadata)\n', (2166, 2176), False, 'from gfauto import proto_util, util\n'), ((2181, 2227), 'gfauto.util.file_write_text', 'util.file_write_text', (['test_metadata_path', 'text'], {}), '(test_metadata_path, text)\n', (2201, 2227), False, 'from gfauto import proto_util, util\n')] |
import socket
class DNSQuery:
def __init__(self, data):
self.data=data
self.dominio=''
tipo = (ord(data[2]) >> 3) & 15 # Opcode bits
if tipo == 0: # Standard query
ini=12
lon=ord(data[ini])
while lon != 0:
self.dominio+=data[ini+1:ini+lon+1]+'.'
ini+=lon+1
lon=ord(data[ini])
def respuesta(self, ip):
packet=''
if self.dominio:
packet+=self.data[:2] + "\x81\x80"
packet+=self.data[4:6] + self.data[4:6] + '\x00\x00\x00\x00' # Questions and Answers Counts
packet+=self.data[12:] # Original Domain Name Question
packet+='\xc0\x0c' # Pointer to domain name
packet+='\x00\x01\x00\x01\x00\x00\x00\x3c\x00\x04' # Response type, ttl and resource data length -> 4 bytes
packet+=str.join('',[chr(int(x)) for x in ip.split('.')]) # 4bytes of IP
return packet
if __name__ == '__main__':
ip='192.168.1.1'
print('pyminifakeDNS:: dom.query. 60 IN A %s' % ip)
udps = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udps.bind(('',53))
try:
while 1:
data, addr = udps.recvfrom(1024)
p=DNSQuery(data)
udps.sendto(p.respuesta(ip), addr)
print('Respuesta: %s -> %s' % (p.dominio, ip))
except KeyboardInterrupt:
print('Finalizando')
udps.close()
| [
"socket.socket"
] | [((1095, 1143), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1108, 1143), False, 'import socket\n')] |
from math import ceil
from numpy_financial import nper, pmt, rate
from typing import List, Tuple
from .calculator import Calculator
# noinspection PyTypeChecker
class LoanCalculator(Calculator):
def __init__(self, **kwargs):
super(LoanCalculator, self).__init__(**kwargs)
self.loan = self.get_float(kwargs.get("loan", 0))
self.reg_pmt = self.get_float(kwargs.get("reg_pmt", 0))
self.extra_pmt = self.get_float(kwargs.get("extra_pmt", 0))
self.extra_pmt_start = self.get_int(kwargs.get("extra_pmt_start", 0))
self.extra_pmt_f = self.get_int(kwargs.get("extra_pmt_f", 0))
self.pmt_when = self.get_int(kwargs.get("pmt_when", 0))
self.payments = []
self.payments_e = []
self.payments_r = []
self.payments_p = []
def get_balances_loans(self) -> List[float]:
balances = []
for x in self.periods:
bal = self.loan - sum(self.payments[:x]) + sum(self.interests[:x])
if bal < 0:
if self.reg_pmt + bal >= 0:
self.payments_r[x - 1] = self.reg_pmt + bal
self.payments[x - 1] = (
self.payments_r[x - 1] + self.payments_e[x - 1]
)
balances.append(0)
self.trunc_periods(x)
else:
self.payments_r[x - 1] = 0
self.payments_e[x - 1] = (
bal + self.payments_e[x - 1] + self.reg_pmt
)
self.payments[x - 1] = self.payments_e[x - 1]
balances.append(0)
self.trunc_periods(x)
return balances
else:
balances.append(
self.loan
- sum(self.payments[:x])
+ sum(self.interests[:x])
)
return balances
def get_interests_loans(self) -> List[float]:
_rate = self.rate / (100 * self.freq)
interests = [
round(
(self.loan - self.payments[0]) * _rate
if self.pmt_when
else self.loan * _rate,
4,
)
]
for x in self.periods[1:]:
if self.pmt_when:
interest = round(
(self.loan - sum(self.payments[:x]) + sum(interests[:x]))
* _rate,
4,
)
else:
interest = round(
(
self.loan
- sum(self.payments[: x - 1])
+ sum(interests[:x])
)
* _rate,
4,
)
if interest < 0:
interests.append(0)
else:
interests.append(interest)
return interests
def get_nper_loans(self) -> int:
_nper = ceil(
nper(
self.rate / (100 * self.freq),
-self.reg_pmt,
self.loan,
when=self.pmt_when,
)
)
self.num_of_years = round(_nper / self.freq, 2)
self.periods = self.get_periods()
self.periods_a = self.get_periods_a()
self.periods_m = self.get_periods_m()
return _nper
def get_payments(self) -> Tuple[List[float]]:
self.payments_r = self.get_payments_r()
self.payments_e = self.get_payments_e()
self.payments = [
round(self.payments_r[x - 1] + self.payments_e[x - 1], 4)
for x in self.periods
]
self.interests = self.get_interests_loans()
self.balances = self.get_balances_loans()
return self.payments, self.payments_e, self.payments_r
def get_payments_e(self) -> List[float]:
extra_pmt_p = []
if self.extra_pmt:
extra_pmt_p.append(self.extra_pmt_start)
if self.extra_pmt_f:
for x in self.periods[self.extra_pmt_start :]:
if not (x - self.extra_pmt_start) % (
12 / self.extra_pmt_f
):
extra_pmt_p.append(x)
return [
self.extra_pmt if x in extra_pmt_p else 0 for x in self.periods
]
def get_payments_r(self) -> List[float]:
return [self.reg_pmt for _ in self.periods]
def get_rate_loans(self):
return (
rate(
self.freq * self.num_of_years,
-self.reg_pmt,
self.loan,
0,
self.pmt_when,
)
* self.freq
* 100
)
def get_reg_pmt(self) -> float:
self.reg_pmt = round(
-pmt(
self.rate / (100 * self.freq),
self.freq * self.num_of_years,
self.loan,
when=self.pmt_when,
),
4,
)
return self.reg_pmt
def trunc_periods(self, p: int) -> None:
self.periods = self.periods[:p]
self.payments_r = self.payments_r[:p]
self.payments_e = self.payments_e[:p]
self.payments = self.payments[:p]
self.interests = self.interests[:p]
self.nper_t = p
self.num_of_years_t = p / self.freq
| [
"numpy_financial.pmt",
"numpy_financial.nper",
"numpy_financial.rate"
] | [((3114, 3200), 'numpy_financial.nper', 'nper', (['(self.rate / (100 * self.freq))', '(-self.reg_pmt)', 'self.loan'], {'when': 'self.pmt_when'}), '(self.rate / (100 * self.freq), -self.reg_pmt, self.loan, when=self.\n pmt_when)\n', (3118, 3200), False, 'from numpy_financial import nper, pmt, rate\n'), ((4686, 4765), 'numpy_financial.rate', 'rate', (['(self.freq * self.num_of_years)', '(-self.reg_pmt)', 'self.loan', '(0)', 'self.pmt_when'], {}), '(self.freq * self.num_of_years, -self.reg_pmt, self.loan, 0, self.pmt_when)\n', (4690, 4765), False, 'from numpy_financial import nper, pmt, rate\n'), ((5006, 5106), 'numpy_financial.pmt', 'pmt', (['(self.rate / (100 * self.freq))', '(self.freq * self.num_of_years)', 'self.loan'], {'when': 'self.pmt_when'}), '(self.rate / (100 * self.freq), self.freq * self.num_of_years, self.loan,\n when=self.pmt_when)\n', (5009, 5106), False, 'from numpy_financial import nper, pmt, rate\n')] |
import unittest
from pydundas import Api
class TestProject(unittest.TestCase):
def test_no_syntax_error(self):
self.assertIsNotNone(Api(None).project())
| [
"pydundas.Api"
] | [((147, 156), 'pydundas.Api', 'Api', (['None'], {}), '(None)\n', (150, 156), False, 'from pydundas import Api\n')] |
import time
from bs4 import BeautifulSoup
from base import *
from db_info import *
# 构建映射url->article
_url2atc = dict()
month = dict({'Jan':1, 'Feb':2, 'Mar':3, 'Apr':4, 'May':5, 'Jun':6,
'Jul':7, 'Aug':8, 'Sep':9, 'Oct':10, 'Nov':11, 'Dec':12})
class REUTERURLManager(BaseURLManager):
def __init__(self, start_page=1, end_page=-1):
super().__init__(start_page, end_page)
def parse(self, page_cnt) -> list:
# 构造目录页url
# https://www.reuters.com/news/archive/china-news?view=page&page=1&pageSize=10
dir_url = f'https://www.reuters.com/news/archive/china-news?view=page&page={page_cnt}&pageSize=10'
# 获取目录页html
html = get_html(dir_url)
soup = BeautifulSoup(html, features="html.parser")
articles = soup.find(attrs={'class':'news-headline-list'}).find_all(attrs={'class':'story'})
urls = []
for i in articles:
# 子tag中的href属性
try:
url = 'https://www.reuters.com' + i.find('div', attrs={'story-content'}).find('a')['href']
except:
url = None
try:
pic_url = i.find('div', attrs={'story-photo lazy-photo'}).find('img')['org-src']
except:
pic_url = None
try:
title = i.find('div', attrs={'story-content'}).find('h3').text.strip()
except:
title = None
try:
date = i.find('div', attrs={'story-content'}).find('time').find('span').text
if date[0].isdigit():
date = time.strftime("%Y-%m-%d", time.localtime())
else:
temp = date.split()
date = temp[2] + '-' + str(month[temp[0]]) + '-' + temp[1]
except:
date = None
try:
abstract = i.find('div', attrs={'story-content'}).find('p').text.strip()
except:
abstract = None
act = Article(
publisher='REUTER',
url=url,
title=title,
date=date,
authors=None, # 这一项交给REUTERSpider填
content=None, # 这一项交给REUTERSpider填
abstract=abstract,
location=None, # 这一项交给REUTERSpider填
section=None,
category=None, # 这一项交给REUTERSpider填,
pic_url=pic_url,
type='passage'
)
urls.append(url)
print(url)
_url2atc[url] = act
return urls
class REUTERSpider(BaseSpider):
def __init__(self, server: str, database: str, url_manager: BaseURLManager, maximum=-1):
super().__init__(server, database, url_manager, maximum)
def parse(self, url) -> Article:
html = get_html(url)
# 构造解析器
soup = BeautifulSoup(html, features="html.parser")
# 获取内容
try:
authors_text = soup.find('div', attrs={'class', 'TwoColumnsLayout-body-86gsE ArticlePage-body-container-10RhS'}).find('div', attrs={'clss', 'Attribution-attribution-Y5JpY'}).text
authors_text = authors_text.split(';')[0].split(' ')[2:]
authors = []
location = ""
name = ""
for i in authors_text:
if i == 'and':
authors.append(name.strip())
name = ""
elif i == 'in':
authors.append(name.strip())
break
# l = authors_text.index(i)
# location = " ".join(authors_text[l+1:])
# break
elif authors_text.index(i) == (len(authors_text) - 1):
name = name + i + " "
authors.append(name.strip())
else:
name = name + i + " "
# if location == "":
# location = None
except:
authors = None
try:
raw_text = soup.find('div', attrs={'class', 'TwoColumnsLayout-body-86gsE ArticlePage-body-container-10RhS'}).\
find('div', attrs={'clss', 'ArticleBodyWrapper'}).\
find_all('p', attrs={'class', 'Paragraph-paragraph-2Bgue ArticleBody-para-TD_9x'})
try:
location = raw_text[0].text.split(' ')
loc_index = location.index('-')
if loc_index == 1:
location = None
else:
location = " ".join(location[:(loc_index-1)])
except:
location = None
raw_text = raw_text[1:] # 去掉abstract
text = ''
for i in raw_text:
text += f'{i.text}\n'
except:
text = None
try:
category = soup.find('div', attrs={'class', 'TwoColumnsLayout-hero-3H8pu'}).\
find('div', attrs={'clss', 'ArticleHeader-info-container-3-6YG'}).\
find('a').text
except:
category = None
atc = _url2atc[url]
atc.authors = str(authors)
atc.location = location
atc.content = text
atc.category = category
del _url2atc[url]
return atc
if __name__ == '__main__':
um = REUTERURLManager()
spider = REUTERSpider(
server=SERVER,
database=DATABASE,
url_manager=um,
)
spider.run() | [
"bs4.BeautifulSoup",
"time.localtime"
] | [((747, 790), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html'], {'features': '"""html.parser"""'}), "(html, features='html.parser')\n", (760, 790), False, 'from bs4 import BeautifulSoup\n'), ((3079, 3122), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html'], {'features': '"""html.parser"""'}), "(html, features='html.parser')\n", (3092, 3122), False, 'from bs4 import BeautifulSoup\n'), ((1716, 1732), 'time.localtime', 'time.localtime', ([], {}), '()\n', (1730, 1732), False, 'import time\n')] |
import djclick as click
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from .forms import AddOrganizerForm
from .slack_client import slack
# "Get organizers info" functions used in 'new_event' and 'copy_event' management commands.
def get_main_organizer():
"""
We're asking user for name and address of main organizer, and return
a list of dictionary.
"""
team = []
click.echo(_("Let's talk about the team. First the main organizer:"))
main_name = click.prompt(click.style(
"First and last name", bold=True, fg='yellow'
))
main_email = click.prompt(click.style(
"E-mail address", bold=True, fg='yellow'
))
team.append({'name': main_name, 'email': main_email})
click.echo("All right, the main organizer is {0} ({1})".format(main_name, main_email))
return team
def get_team(team):
"""
We're asking user for names and address of the rest of the team,
and append that to a list we got from get_main_organizer
"""
add_team = click.confirm(click.style(
"Do you want to add additional team members?", bold=True, fg='yellow'
), default=False)
i = 1
while add_team:
i += 1
name = click.prompt(click.style(
f"First and last name of #{i} member", bold=True, fg='yellow'
))
email = click.prompt(click.style(
f"E-mail address of #{i} member", bold=True, fg='yellow'
))
if len(name) > 0:
team.append({'name': name, 'email': email})
click.echo(
f"All right, the #{i} team member of Django Girls is {name} ({email})"
)
add_team = click.confirm(click.style(
"Do you want to add additional team members?", bold=True, fg='yellow'
), default=False)
return team
def create_users(team, event):
"""
Create or get User objects based on team list
"""
members = []
for member in team:
member['event'] = event.pk
form = AddOrganizerForm(member)
user = form.save()
members.append(user)
return members
def brag_on_slack_bang(city, country, team):
"""
This is posting a message about Django Girls new event to #general channel on Slack!
"""
if settings.ENABLE_SLACK_NOTIFICATIONS:
text = f":django_pony: :zap: Woohoo! :tada: New Django Girls alert! " \
f"Welcome Django Girls {city}, {country}. " \
f"Congrats {', '.join(['{} {}'.format(x.first_name, x.last_name) for x in team])}!"
slack.chat.post_message(
channel='#general',
text=text,
username='Django Girls',
icon_emoji=':django_heart:'
)
| [
"djclick.style",
"django.utils.translation.gettext_lazy",
"djclick.echo"
] | [((446, 503), 'django.utils.translation.gettext_lazy', '_', (['"""Let\'s talk about the team. First the main organizer:"""'], {}), '("Let\'s talk about the team. First the main organizer:")\n', (447, 503), True, 'from django.utils.translation import gettext_lazy as _\n'), ((534, 592), 'djclick.style', 'click.style', (['"""First and last name"""'], {'bold': '(True)', 'fg': '"""yellow"""'}), "('First and last name', bold=True, fg='yellow')\n", (545, 592), True, 'import djclick as click\n'), ((638, 691), 'djclick.style', 'click.style', (['"""E-mail address"""'], {'bold': '(True)', 'fg': '"""yellow"""'}), "('E-mail address', bold=True, fg='yellow')\n", (649, 691), True, 'import djclick as click\n'), ((1080, 1167), 'djclick.style', 'click.style', (['"""Do you want to add additional team members?"""'], {'bold': '(True)', 'fg': '"""yellow"""'}), "('Do you want to add additional team members?', bold=True, fg=\n 'yellow')\n", (1091, 1167), True, 'import djclick as click\n'), ((1266, 1340), 'djclick.style', 'click.style', (['f"""First and last name of #{i} member"""'], {'bold': '(True)', 'fg': '"""yellow"""'}), "(f'First and last name of #{i} member', bold=True, fg='yellow')\n", (1277, 1340), True, 'import djclick as click\n'), ((1393, 1462), 'djclick.style', 'click.style', (['f"""E-mail address of #{i} member"""'], {'bold': '(True)', 'fg': '"""yellow"""'}), "(f'E-mail address of #{i} member', bold=True, fg='yellow')\n", (1404, 1462), True, 'import djclick as click\n'), ((1580, 1667), 'djclick.echo', 'click.echo', (['f"""All right, the #{i} team member of Django Girls is {name} ({email})"""'], {}), "(\n f'All right, the #{i} team member of Django Girls is {name} ({email})')\n", (1590, 1667), True, 'import djclick as click\n'), ((1726, 1813), 'djclick.style', 'click.style', (['"""Do you want to add additional team members?"""'], {'bold': '(True)', 'fg': '"""yellow"""'}), "('Do you want to add additional team members?', bold=True, fg=\n 'yellow')\n", (1737, 1813), True, 'import djclick as click\n')] |
import datetime
import json
import os
import secrets
from importlib import import_module
# PATH = 'C:\\Users\\Zadigo\\Documents\\Apps\\zemailer\\app\\core\\settings.json'
PATH = os.path.join(os.getcwd(), 'app', 'core', 'conf', 'settings.json')
def deserialize(func):
"""A decorator that deserializes objects stored
in the file
"""
def get(self, name):
class_name = self.__class__.__name__
# Check if there is a softlink
try:
parent, child = name.split('__', 1)
except ValueError:
# We have a simple string
searched_item = func(self, name)
else:
# BUG: TypeError: string indices must be integers
# We have a parent__child link
searched_item = func(self, parent)[child]
# Can only work if dictionnary and
# that the value is not None
if isinstance(searched_item, (dict, list)) \
and searched_item is not None:
# Converts the timestamp in its
# original datetime class
if '__class__' in searched_item:
tag = searched_item['__class__']
if tag == 'datetime':
datetime_class = datetime.date.fromtimestamp(searched_item['access_date'])
searched_item.update({'access_date_class': datetime_class})
# return "%s([%s])" % (class_name, searched_item)
return searched_item
return get
class Settings:
"""Construct a simple dictionnary object with all the settings
to be used with the application
"""
email_class = None
def __init__(self, name_or_path=None, **kwargs):
if not name_or_path:
# If no custom path has been provided,
# default to the custom path
name_or_path = 'settings.json'
self.name_or_path = name_or_path
settings_file = self.handler()
settings_dict = json.load(settings_file)
# If the file does not have the
# base structure {_id, settings:{}},
# raise an error
if not settings_dict:
raise ValueError('The file is not valid.')
# Work on a cache version of the
# different items in the settings file
self.cache = settings_dict.copy()
# Checks that the file has an _id and proceeds
# to populate the settings section of the
# file with the default values
self.check_file_id(settings_dict['_id'], settings_file)
def __repr__(self):
return f'{self.__class__.__name__}([{self.cache}])'
def handler(self):
"""A handler for opening the settings file
"""
try:
settings_file = open(self.name_or_path, 'r+', encoding='utf-8')
except FileExistsError:
raise
return settings_file
def check_file_id(self, file_id, handle=None, **kwargs):
if not file_id:
# Assume the file is a new version and proceed
# to populate all the required elements
self.cache['_id'] = secrets.token_hex(nbytes=25)
if handle:
# We need the file handle to proceed to
# the next section of populating the settings
populated_settings = self.populate(handle)
return populated_settings
else:
# If none, lets just simply change
# do the file _id and return
return
else:
# The file has already been created and
# populated so no need to pursue
handle.close()
return
def populate(self, handle):
"""Populates a settings file with the base parameters
for running the applications
"""
base_dir = str(os.getcwd())
base = {
'base_path': base_dir,
'data_path': os.path.join(base_dir, 'app\\data'),
'email_class': 'zemailer.app.core.sender',
'settings_class': 'zemailer.app.core.settings'
}
self.cache['settings'] = base
self.cache['last_updated'] = self.serialize_date()
# Empty the file. For whatever
# reason, json does not do so
# beforehand resulting
# in an erroneous file
handle.writelines('')
# Populate the settings file
# with the extra settings
json.dump(self.cache, handle, indent=4)
handle.close()
return self.cache
def serialize_date(self):
"""Serializes the current date in order to be stored
in the backup file
"""
return {'__class__': datetime.__name__, 'access_date': self.current_timestamp()}
@staticmethod
def current_timestamp():
"""Get the current date as a timestamp
"""
return datetime.datetime.now().timestamp()
@deserialize
def get(self, name):
try:
result = self.cache[name]
except KeyError:
result = None
return result
@staticmethod
def load_module(dotted_path=None):
"""This loads some required modules in order
to complete the file settings
"""
module = import_module('zemailer.app.core.sender')
senders = []
for klass, value in module.__dict__.items():
if isinstance(value, type):
items = {klass: value}
senders.append(items)
# This is the constructed initialized settings
# to use within the app or outside
initialized_settings = Settings(name_or_path=PATH)
| [
"secrets.token_hex",
"importlib.import_module",
"os.path.join",
"os.getcwd",
"datetime.datetime.now",
"datetime.date.fromtimestamp",
"json.load",
"json.dump"
] | [((193, 204), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (202, 204), False, 'import os\n'), ((1954, 1978), 'json.load', 'json.load', (['settings_file'], {}), '(settings_file)\n', (1963, 1978), False, 'import json\n'), ((4415, 4454), 'json.dump', 'json.dump', (['self.cache', 'handle'], {'indent': '(4)'}), '(self.cache, handle, indent=4)\n', (4424, 4454), False, 'import json\n'), ((5231, 5272), 'importlib.import_module', 'import_module', (['"""zemailer.app.core.sender"""'], {}), "('zemailer.app.core.sender')\n", (5244, 5272), False, 'from importlib import import_module\n'), ((3086, 3114), 'secrets.token_hex', 'secrets.token_hex', ([], {'nbytes': '(25)'}), '(nbytes=25)\n', (3103, 3114), False, 'import secrets\n'), ((3819, 3830), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3828, 3830), False, 'import os\n'), ((3909, 3944), 'os.path.join', 'os.path.join', (['base_dir', '"""app\\\\data"""'], {}), "(base_dir, 'app\\\\data')\n", (3921, 3944), False, 'import os\n'), ((4850, 4873), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4871, 4873), False, 'import datetime\n'), ((1232, 1289), 'datetime.date.fromtimestamp', 'datetime.date.fromtimestamp', (["searched_item['access_date']"], {}), "(searched_item['access_date'])\n", (1259, 1289), False, 'import datetime\n')] |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Install tensorflow_model_optimization."""
import datetime
import os
import sys
from setuptools import find_packages
from setuptools import setup
from setuptools.command.install import install as InstallCommandBase
from setuptools.dist import Distribution
# To enable importing version.py directly, we add its path to sys.path.
version_path = os.path.join(
os.path.dirname(__file__), 'tensorflow_model_optimization', 'python/core')
sys.path.append(version_path)
from version import __version__ # pylint: disable=g-import-not-at-top
# TODO(alanchiao): add explicit Tensorflow requirement once Tensorflow
# moves from a tf and tf-gpu packaging approach (where a user installs
# one of the two) to one where a user installs the tf package and then
# also installs the gpu package if they need gpu support. The latter allows
# us (and our dependents) to maintain a single package instead of two.
REQUIRED_PACKAGES = [
'numpy~=1.14',
'six~=1.10',
'enum34~=1.1;python_version<"3.4"',
'dm-tree~=0.1.1',
]
if '--release' in sys.argv:
release = True
sys.argv.remove('--release')
else:
# Build a nightly package by default.
release = False
if release:
project_name = 'vai-q-tensorflow2'
else:
# Nightly releases use date-based versioning of the form
# '0.0.1.dev20180305'
project_name = 'vai-q-tensorflow2-nightly'
datestring = datetime.datetime.now().strftime('%Y%m%d')
__version__ += datestring
class BinaryDistribution(Distribution):
"""This class is needed in order to create OS specific wheels."""
def has_ext_modules(self):
return False
setup(
name=project_name,
version=__version__,
description='Xilinx Vitis AI Quantizer for Tensorflow 2.x. '
'This is customized based on tensorflow-model-optimization('
'https://github.com/tensorflow/model-optimization)'
'A suite of tools that users, both novice and advanced'
' can use to optimize machine learning models for deployment'
' and execution.',
author='<NAME>',
author_email='<EMAIL>',
license='Apache 2.0',
packages=find_packages(),
install_requires=REQUIRED_PACKAGES,
# Add in any packaged data.
include_package_data=True,
package_data={'': ['*.so', '*.json']},
exclude_package_data={'': ['BUILD', '*.h', '*.cc']},
zip_safe=False,
distclass=BinaryDistribution,
cmdclass={
'pip_pkg': InstallCommandBase,
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow model optimization machine learning',
)
| [
"setuptools.find_packages",
"sys.argv.remove",
"os.path.dirname",
"datetime.datetime.now",
"sys.path.append"
] | [((1129, 1158), 'sys.path.append', 'sys.path.append', (['version_path'], {}), '(version_path)\n', (1144, 1158), False, 'import sys\n'), ((1054, 1079), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1069, 1079), False, 'import os\n'), ((1761, 1789), 'sys.argv.remove', 'sys.argv.remove', (['"""--release"""'], {}), "('--release')\n", (1776, 1789), False, 'import sys\n'), ((2760, 2775), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (2773, 2775), False, 'from setuptools import find_packages\n'), ((2053, 2076), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2074, 2076), False, 'import datetime\n')] |
from collections import deque
class RecentCounter:
def __init__(self):
self.buffer = deque()
def ping(self, t: int) -> int:
while self.buffer and self.buffer[-1]<t-3000:
self.buffer.pop()
self.buffer.appendleft(t)
return len(self.buffer)
#Your RecentCounter object will be instantiated and called as such:
# Your RecentCounter object will be instantiated and called as such:
# obj = RecentCounter()
# param_1 = obj.ping(t) | [
"collections.deque"
] | [((97, 104), 'collections.deque', 'deque', ([], {}), '()\n', (102, 104), False, 'from collections import deque\n')] |
from unittest import TestCase
from necrypt import Necrypt
import os
class TestNecrypt(TestCase):
def test_unique_encryption(self):
n = Necrypt(1024)
plain = 'Text'
self.assertNotEqual(n.encrypt(plain), n.encrypt(plain))
def test_encrypt_decrypt(self):
n = Necrypt(1024)
plain = 'Text'
self.assertEqual(plain, n.decrypt(n.encrypt(plain)))
def test_sign_verify(self):
n = Necrypt(1024)
plain = 'Text'
signature = n.sign(plain + 's')
with self.assertRaises(ValueError) as context:
n.verify(plain, signature)
self.assertEqual('Invalid signature', str(context.exception))
def test_file_encryption_decryption(self):
plain_file_data = b'plain'
with open('plain_file', 'wb') as plain_file:
plain_file.write(plain_file_data)
n = Necrypt(1024)
n.encrypt_file('plain_file', 'cipher_file')
n.decrypt_file('cipher_file', 'decrypted_file')
with open('decrypted_file') as decrypted_file:
decrypted_file_data = decrypted_file.read()
files_to_remove = ['plain_file', 'cipher_file', 'decrypted_file']
for filename in files_to_remove:
if os.path.isfile(filename):
os.remove(filename)
self.assertEqual(plain_file_data, decrypted_file_data.encode())
def test_import_export_key(self):
n = Necrypt(1024)
plain = 'plain'
cipher = n.encrypt(plain)
n.export_key('key_file')
n.import_key('key_file')
decrypted_cipher = n.decrypt(cipher)
self.assertEqual(plain, decrypted_cipher)
| [
"os.path.isfile",
"necrypt.Necrypt",
"os.remove"
] | [((150, 163), 'necrypt.Necrypt', 'Necrypt', (['(1024)'], {}), '(1024)\n', (157, 163), False, 'from necrypt import Necrypt\n'), ((300, 313), 'necrypt.Necrypt', 'Necrypt', (['(1024)'], {}), '(1024)\n', (307, 313), False, 'from necrypt import Necrypt\n'), ((443, 456), 'necrypt.Necrypt', 'Necrypt', (['(1024)'], {}), '(1024)\n', (450, 456), False, 'from necrypt import Necrypt\n'), ((882, 895), 'necrypt.Necrypt', 'Necrypt', (['(1024)'], {}), '(1024)\n', (889, 895), False, 'from necrypt import Necrypt\n'), ((1435, 1448), 'necrypt.Necrypt', 'Necrypt', (['(1024)'], {}), '(1024)\n', (1442, 1448), False, 'from necrypt import Necrypt\n'), ((1249, 1273), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (1263, 1273), False, 'import os\n'), ((1291, 1310), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (1300, 1310), False, 'import os\n')] |
import os
import numpy as np
import pytest
from spectrum_overload import Spectrum
from mingle.utilities.spectrum_utils import load_spectrum, select_observation
@pytest.mark.parametrize("fname", ["HD30501-1-mixavg-tellcorr_1.fits", "HD30501-1-mixavg-h2otellcorr_1.fits"])
def test_load_spectrum(fname):
fname = os.path.join("tests", "testdata", "handy_spectra", fname)
results = load_spectrum(fname)
assert isinstance(results, Spectrum)
assert results.header["OBJECT"].upper() == "HD30501"
assert np.all(results.xaxis > 2110) # nm
assert np.all(results.xaxis < 2130) # nm
assert np.all(results.flux < 2)
assert np.all(results.flux >= 0)
def test_load_no_filename_fits():
"""Not a valid file."""
with pytest.raises(ValueError):
load_spectrum("")
@pytest.mark.parametrize("chip", [0, None, 5, 42])
def test_select_observation_with_bad_chip(chip):
with pytest.raises(ValueError):
select_observation("HD30501", "1", chip)
@pytest.mark.xfail()
def test_spectrum_plotter(spectra, label=None, show=False):
"""Plot a Spectrum object."""
assert False
@pytest.mark.xfail()
def test_plot_spectra(obs, model):
"""Plot two spectra."""
assert False
| [
"pytest.mark.xfail",
"mingle.utilities.spectrum_utils.select_observation",
"os.path.join",
"mingle.utilities.spectrum_utils.load_spectrum",
"pytest.mark.parametrize",
"pytest.raises",
"numpy.all"
] | [((165, 278), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""fname"""', "['HD30501-1-mixavg-tellcorr_1.fits', 'HD30501-1-mixavg-h2otellcorr_1.fits']"], {}), "('fname', ['HD30501-1-mixavg-tellcorr_1.fits',\n 'HD30501-1-mixavg-h2otellcorr_1.fits'])\n", (188, 278), False, 'import pytest\n'), ((803, 852), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""chip"""', '[0, None, 5, 42]'], {}), "('chip', [0, None, 5, 42])\n", (826, 852), False, 'import pytest\n'), ((990, 1009), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {}), '()\n', (1007, 1009), False, 'import pytest\n'), ((1124, 1143), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {}), '()\n', (1141, 1143), False, 'import pytest\n'), ((318, 375), 'os.path.join', 'os.path.join', (['"""tests"""', '"""testdata"""', '"""handy_spectra"""', 'fname'], {}), "('tests', 'testdata', 'handy_spectra', fname)\n", (330, 375), False, 'import os\n'), ((390, 410), 'mingle.utilities.spectrum_utils.load_spectrum', 'load_spectrum', (['fname'], {}), '(fname)\n', (403, 410), False, 'from mingle.utilities.spectrum_utils import load_spectrum, select_observation\n'), ((520, 548), 'numpy.all', 'np.all', (['(results.xaxis > 2110)'], {}), '(results.xaxis > 2110)\n', (526, 548), True, 'import numpy as np\n'), ((566, 594), 'numpy.all', 'np.all', (['(results.xaxis < 2130)'], {}), '(results.xaxis < 2130)\n', (572, 594), True, 'import numpy as np\n'), ((612, 636), 'numpy.all', 'np.all', (['(results.flux < 2)'], {}), '(results.flux < 2)\n', (618, 636), True, 'import numpy as np\n'), ((648, 673), 'numpy.all', 'np.all', (['(results.flux >= 0)'], {}), '(results.flux >= 0)\n', (654, 673), True, 'import numpy as np\n'), ((747, 772), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (760, 772), False, 'import pytest\n'), ((782, 799), 'mingle.utilities.spectrum_utils.load_spectrum', 'load_spectrum', (['""""""'], {}), "('')\n", (795, 799), False, 'from mingle.utilities.spectrum_utils import load_spectrum, select_observation\n'), ((911, 936), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (924, 936), False, 'import pytest\n'), ((946, 986), 'mingle.utilities.spectrum_utils.select_observation', 'select_observation', (['"""HD30501"""', '"""1"""', 'chip'], {}), "('HD30501', '1', chip)\n", (964, 986), False, 'from mingle.utilities.spectrum_utils import load_spectrum, select_observation\n')] |
# 开源项目 https://github.com/jones2000/HQChart
import sys
import codecs
import webbrowser
from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE
from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem
from umychart_webtemplate import *
from umychart_complier_pandas_help import JSComplierPandasHelper
class TestCase :
def __init__(self, code, option=SymbolOption()) :
self.Code=code
self.Option=option
def Run(self):
testCode=''
for item in self.Code:
testCode+=item
testCode+='\n'
result=JSComplier.Execute(testCode,self.Option)
return True if result else False
def Test_Tokenize():
code1='VARHIGH:=IF(VAR1<=REF(HH,-1),REF(H,BARSLAST(VAR1>=REF(HH,1))),DRAWNULL),COLORYELLOW;'
code2='VAR1=((SMA(MAX((CLOSE - LC),0),3,1) / SMA(ABS((CLOSE - LC)),3,1)) * 100);'
tokens=JSComplier.Tokenize(code1+code2)
return True if tokens else False
def Test_Parse():
code1='VARHIGH:=IF(VAR1<=REF(HH,-1),REF(H,BARSLAST(VAR1>=REF(HH,1))),DRAWNULL),COLORYELLOW;'
code2='VAR1=((SMA(MAX((CLOSE - LC),0),3,1) / SMA(ABS((CLOSE - LC)),3,1)) * 100);'
ast=JSComplier.Parse(code1+code2)
return True if ast else False
def Test_REF():
result=JSComplier.Execute('VAR2:C-REF(O,1)')
return True if result else False
def Test_Add() :
result=JSComplier.Execute('VAR2:C+100')
return True if result else False
def Test_Multiply():
code=[
'VAR2:C*O;',
"VAR3:100*100;"
]
result=JSComplier.Execute(code[0]+code[1])
return True if result else False
def Test_MAX_MIN():
code=[
'VAR2:MAX(C,O);',
"VAR3:MAX(C,100);",
"VAR4:MAX(100,C);",
'VAR5:MIN(C,O);',
'VAR5:MIN(C,4);'
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[4]+code[3])
return True if result else False
def Test_MA() :
code=[
'VAR2:MA(C,5);',
'VAR3:MA(C,10);',
'VAR4:MA(C,15);',
'VAR4:MA(C,30);',
'VAR4:MA(C,33);',
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[3])
return True if result else False
def Test_EMA():
code=[
'VAR2:EMA(C,5);',
'VAR3:EMA(C,10);',
'VAR4:EMA(C,15);',
'VAR4:EMA(C,30);',
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[3])
return True if result else False
def Test_SMA():
code=[
'VAR2:SMA(C,5,10);',
'VAR3:SMA(C,10,10);',
'VAR4:SMA(C,15,10);',
'VAR4:SMA(C,30,10);',
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[3])
return True if result else False
def Test_DMA():
code=[
'VAR3:C;',
'VAR2:DMA(C,O/C);',
]
result=JSComplier.Execute(code[0]+code[1])
return True if result else False
def Test_WMA() :
code=[
'VAR3:C;',
'VAR2:WMA(C,20);',
]
result=JSComplier.Execute(code[0]+code[1])
return True if result else False
def Test_SUMBARS() :
code=[
'VAR3:SUMBARS(C,O)',
'VAR2:C;',
]
option=SymbolOption()
option.Symbol='000001.sz'
option.HQDataType=HQ_DATA_TYPE.MINUTE_ID
result=JSComplier.Execute(code[0]+code[1],option)
return True if result else False
def Test_INDEX():
code=[
'VAR3:INDEXA;',
'VAR2:INDEXC;',
'VAR2:INDEXO;',
]
option=SymbolOption()
option.Period=5
result=JSComplier.Execute(code[0]+code[1]+code[2],option)
return True if result else False
def Test_COUNT():
code=[
'VAR3:COUNT(C,5);',
'VAR2:COUNT(O,10);',
'VAR2:COUNT(H,20);',
]
option=SymbolOption()
result=JSComplier.Execute(code[0]+code[1]+code[2],option)
return True if result else False
def Test_HHV_HHL() :
case =TestCase(
code=[
'VAR3:HHV(C,5);',
'VAR2:HHV(O,10);',
'VAR2:HHV(H,20);',
'VAR3:LLV(H,5);',
'VAR4:LLV(H,10);',
])
result=case.Run()
return result
def Test_STD():
case =TestCase(
code=[
'VAR3:STD(C,5);',
'VAR2:STD(O,10);',
'VAR2:STD(H,20);',
'VAR3:STD(H,15);',
'VAR4:STD(H,0);',
])
result=case.Run()
return result
def Test_AVEDEV():
case =TestCase(
code=[
'VAR3:AVEDEV(C,5);',
'VAR2:AVEDEV(O,10);',
'VAR2:AVEDEV(H,20);',
'VAR3:AVEDEV(H,15);',
'VAR4:AVEDEV(H,0);',
])
result=case.Run()
return result
def Test_CROSS() :
case =TestCase(
code=[
'VAR3:CROSS(C,O);',
'VAR2:CROSS(O,10);',
'VAR2:CROSS(O,C);',
])
result=case.Run()
return result
def Test_MULAR() :
case =TestCase(
code=[
'VAR3:MULAR(C,5);',
'VAR2:MULAR(O,10);',
'VAR2:MULAR(O,30);',
])
result=case.Run()
return result
def Test_SUM() :
case =TestCase(
code=[
'VAR3:SUM(C,5);',
'VAR2:SUM(O,0);',
'VAR2:BARSCOUNT(O);',
])
result=case.Run()
return result
def Test_DEVSQ():
case =TestCase(
code=[
'VAR3:DEVSQ(C,5);',
'VAR2:DEVSQ(O,0);',
'VAR2:DEVSQ(O,5);',
])
result=case.Run()
return result
def Test_FINANCE(): # 财务数据测试
case =TestCase(
code=[
'DRAWLINE(HIGH>=HHV(HIGH,20),HIGH,LOW<=LLV(LOW,20),LOW,1);'
'VAR4:CAPITAL;',
'VAR3:FINANCE(32);',
'VAR2:FINANCE(1);',
'VAR2:MA(FINANCE(33),5);',
"DRAWTEXT(C<=O,O,'xxxx');",
'STICKLINE(CLOSE>OPEN, CLOSE, OPEN, 0.8, 1);',
'DRAWNUMBER(CLOSE/OPEN>1.0001,LOW,C);',
'DRAWNUMBER(CLOSE/OPEN>1.0001,LOW,33);',
'DRAWICON(CLOSE>OPEN,LOW,1);',
'PLOYLINE(HIGH>=HHV(HIGH,20),HIGH);',
'CYW: SUM(VAR4,10)/10000, COLORSTICK;',
"DRAWCHANNEL(C>O,C,O,'rgb(20,20,20)',1,'3,4','rgb(40,40,40)');",
'SAR(10,2,20);',
'BACKSET(CLOSE>OPEN,2);',
'TT:DYNAINFO(13);',
'T2:MARGIN(1);',
'T5:MARGIN(6);',
"上涨家数:UPCOUNT('CNA.CI'),COLORRED;",
"下跌家数:DOWNCOUNT('CNA.CI'),COLORGREEN;",
"TTTT:NEWS(2)+NEWS(4);",
"TTT2:NEWS(1);",
'TT4:WINNER(CLOSE);',
'TT5:COST(10);',
])
result=case.Run()
return result
def Test_ScriptIndexConsole():
# 创建脚本, 及参数
scpritInfo=ScriptIndexItem(name='我的MA指标', id=888888,
script='MA1:MA(CLOSE,M1);\n' # 指标脚本代码
'MA2:MA(CLOSE,M2);\n'
'MA3:MA(CLOSE,M3);',
args=[ ArgumentItem(name='M1', value=5), ArgumentItem(name='M2', value=10), ArgumentItem(name='M3', value=20) ] # 参数
)
indexConsole = ScriptIndexConsole(scpritInfo)
option = SymbolOption(
symbol='000001.sz',
right=1, # 复权 0 不复权 1 前复权 2 后复权
period=0, # 周期 0=日线 1=周线 2=月线 3=年线 4=1分钟 5=5分钟 6=15分钟 7=30分钟 8=60分钟
request=RequestOption(maxDataCount=500,maxMinuteDayCount=3)
)
result=indexConsole.ExecuteScript(option)
if result.Error :
return
print('run successfully.')
JSComplierPandasHelper.ToDateTimeSeries(result) # 转化为pandas Series 数据格式
JSComplierPandasHelper.ToDataFrame(result) # 转化为pandas DataFrame 数据格式
jsonData=result.ToJson()
varName='jsonData' # 数据变量名字
HQChartOption= """g_KLineOption={
Symbol:'%(symbol)s', //股票代码
Right:%(right)d, //复权
Period:%(period)d, //周期
Windows:
[
{ Modify:false,Change:false,
Local:
{
Data:%(varName)s, //py执行以后的json数据
Type:'LocalJsonDataIndex' ,
Name:'%(name)s', //指标名字
Args:[ //指标参数
{ Name: '%(arg1)s', Value: %(argvalue1)d },
{ Name: '%(arg2)s', Value: %(argvalue2)d },
{ Name: '%(arg3)s', Value: %(argvalue3)d }]
}
},
//{Index:"VOL", Modify:false,Change:false},
]
}
""" %{"symbol":option.Symbol,'right':option.Right, 'period':option.Period, 'varName':varName, 'name':scpritInfo.Name,
'arg1':scpritInfo.Arguments[0].Name, 'argvalue1': scpritInfo.Arguments[0].Value,
'arg2':scpritInfo.Arguments[1].Name, 'argvalue2': scpritInfo.Arguments[1].Value,
'arg3':scpritInfo.Arguments[2].Name, 'argvalue3': scpritInfo.Arguments[2].Value }
localJsonData= varName + '=' + jsonData + '\n'
filePath='data.html'
# 生成图形化页面
with codecs.open(filePath,'w',"utf-8") as file:
file.write(HTML_PART1)
file.write(localJsonData)
file.write(HQChartOption)
file.write(HTML_PART_END)
file.close()
webbrowser.open(filePath,new = 1)
#Test_Add()
#Test_Multiply()
#Test_MAX_MIN()
#Test_FINANCE()
Test_ScriptIndexConsole() | [
"umychart_complier_jscomplier.JSComplier.Tokenize",
"umychart_complier_jscomplier.ScriptIndexConsole",
"webbrowser.open",
"umychart_complier_jscomplier.ArgumentItem",
"umychart_complier_jscomplier.JSComplier.Parse",
"umychart_complier_pandas_help.JSComplierPandasHelper.ToDateTimeSeries",
"umychart_compl... | [((999, 1033), 'umychart_complier_jscomplier.JSComplier.Tokenize', 'JSComplier.Tokenize', (['(code1 + code2)'], {}), '(code1 + code2)\n', (1018, 1033), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((1285, 1316), 'umychart_complier_jscomplier.JSComplier.Parse', 'JSComplier.Parse', (['(code1 + code2)'], {}), '(code1 + code2)\n', (1301, 1316), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((1381, 1418), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['"""VAR2:C-REF(O,1)"""'], {}), "('VAR2:C-REF(O,1)')\n", (1399, 1418), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((1489, 1521), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['"""VAR2:C+100"""'], {}), "('VAR2:C+100')\n", (1507, 1521), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((1668, 1705), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1])'], {}), '(code[0] + code[1])\n', (1686, 1705), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((1942, 2009), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1] + code[2] + code[4] + code[3])'], {}), '(code[0] + code[1] + code[2] + code[4] + code[3])\n', (1960, 2009), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((2234, 2291), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1] + code[2] + code[3])'], {}), '(code[0] + code[1] + code[2] + code[3])\n', (2252, 2291), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((2492, 2549), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1] + code[2] + code[3])'], {}), '(code[0] + code[1] + code[2] + code[3])\n', (2510, 2549), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((2762, 2819), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1] + code[2] + code[3])'], {}), '(code[0] + code[1] + code[2] + code[3])\n', (2780, 2819), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((2957, 2994), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1])'], {}), '(code[0] + code[1])\n', (2975, 2994), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((3136, 3173), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1])'], {}), '(code[0] + code[1])\n', (3154, 3173), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((3323, 3337), 'umychart_complier_jscomplier.SymbolOption', 'SymbolOption', ([], {}), '()\n', (3335, 3337), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((3427, 3472), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1])', 'option'], {}), '(code[0] + code[1], option)\n', (3445, 3472), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((3644, 3658), 'umychart_complier_jscomplier.SymbolOption', 'SymbolOption', ([], {}), '()\n', (3656, 3658), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((3694, 3749), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1] + code[2])', 'option'], {}), '(code[0] + code[1] + code[2], option)\n', (3712, 3749), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((3933, 3947), 'umychart_complier_jscomplier.SymbolOption', 'SymbolOption', ([], {}), '()\n', (3945, 3947), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((3962, 4017), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['(code[0] + code[1] + code[2])', 'option'], {}), '(code[0] + code[1] + code[2], option)\n', (3980, 4017), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((7110, 7140), 'umychart_complier_jscomplier.ScriptIndexConsole', 'ScriptIndexConsole', (['scpritInfo'], {}), '(scpritInfo)\n', (7128, 7140), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((7525, 7572), 'umychart_complier_pandas_help.JSComplierPandasHelper.ToDateTimeSeries', 'JSComplierPandasHelper.ToDateTimeSeries', (['result'], {}), '(result)\n', (7564, 7572), False, 'from umychart_complier_pandas_help import JSComplierPandasHelper\n'), ((7602, 7644), 'umychart_complier_pandas_help.JSComplierPandasHelper.ToDataFrame', 'JSComplierPandasHelper.ToDataFrame', (['result'], {}), '(result)\n', (7636, 7644), False, 'from umychart_complier_pandas_help import JSComplierPandasHelper\n'), ((9349, 9381), 'webbrowser.open', 'webbrowser.open', (['filePath'], {'new': '(1)'}), '(filePath, new=1)\n', (9364, 9381), False, 'import webbrowser\n'), ((467, 481), 'umychart_complier_jscomplier.SymbolOption', 'SymbolOption', ([], {}), '()\n', (479, 481), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((684, 725), 'umychart_complier_jscomplier.JSComplier.Execute', 'JSComplier.Execute', (['testCode', 'self.Option'], {}), '(testCode, self.Option)\n', (702, 725), False, 'from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE\n'), ((9140, 9175), 'codecs.open', 'codecs.open', (['filePath', '"""w"""', '"""utf-8"""'], {}), "(filePath, 'w', 'utf-8')\n", (9151, 9175), False, 'import codecs\n'), ((7335, 7387), 'umychart_complier_jscomplier.RequestOption', 'RequestOption', ([], {'maxDataCount': '(500)', 'maxMinuteDayCount': '(3)'}), '(maxDataCount=500, maxMinuteDayCount=3)\n', (7348, 7387), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((6967, 6999), 'umychart_complier_jscomplier.ArgumentItem', 'ArgumentItem', ([], {'name': '"""M1"""', 'value': '(5)'}), "(name='M1', value=5)\n", (6979, 6999), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((7001, 7034), 'umychart_complier_jscomplier.ArgumentItem', 'ArgumentItem', ([], {'name': '"""M2"""', 'value': '(10)'}), "(name='M2', value=10)\n", (7013, 7034), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n'), ((7036, 7069), 'umychart_complier_jscomplier.ArgumentItem', 'ArgumentItem', ([], {'name': '"""M3"""', 'value': '(20)'}), "(name='M3', value=20)\n", (7048, 7069), False, 'from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
some helper funcs
"""
import json
import logging
import os
import site
import subprocess
import sys
import tempfile
import exifread
from PIL import Image
PACKAGE_NAME = "einguteswerkzeug"
# --- configure logging
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
if log.hasHandlers():
log.andlers.clear()
handler = logging.StreamHandler() # console-handler
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.propagate=False
# ---
def get_resource_file(basefile, PACKAGE_NAME = PACKAGE_NAME):
"""
gets the fully qualified name of a resource file
"""
fqn = os.path.join(os.path.dirname(os.path.realpath(__file__)), basefile)
if not os.path.isfile(fqn):
# when installed via pip the package_data (see MANIFEST.in, setup.py)
# should be located somewhere in site-packages path of the (virtual-)env
for dir in site.getsitepackages():
fqn = dir + "/" + PACKAGE_NAME + "/" + basefile
if os.path.isfile(fqn):
return fqn
break
return fqn
def show_error(msg):
"""
Show an error message and exit
"""
log.critical("Error: %s" % msg)
sys.exit(1)
def editor(text=None, default_editor="vi"):
"""
Open an editor (environment variable EDITOR).
Allows to edit (given) text like the functionality
that happens when doing 'git commit' on the console.
args:
text (string): the text to edit
optional args:
default_editor (string) : editor to use if environment varibale
EDITOR is not set
returns:
string: the edited text
"""
fd, fname = tempfile.mkstemp()
with os.fdopen(fd, 'w') as f:
f.write(text)
cmd = os.environ.get('EDITOR', default_editor) + ' ' + fname
subprocess.call(cmd, shell=True)
with open(fname, 'r') as f:
res = f.read()
os.unlink(fname)
return res
def confirm_prompt(question: str, yes_by_default = True) -> bool:
"""
Shows a yes/no question.
example:
>>> reply = confirm_prompt("Are you sure?")
>>> print(reply)
args:
yes_by_default (bool) : if True just hitting return equals yes.
returns:
bool : True if ansered with yes. False if answerded with no.
"""
replies_prompt = "y/n"
replies_values = ["y", "n"]
yes = ("y")
if yes_by_default:
replies_values.append("")
replies_prompt = "Y/n"
yes = ("", "y")
reply = None
while reply not in ("", "y", "n"):
reply = input(f"{question} ({replies_prompt}): ").lower()
return (reply in yes)
| [
"logging.getLogger",
"logging.StreamHandler",
"logging.Formatter",
"os.environ.get",
"os.path.isfile",
"os.path.realpath",
"subprocess.call",
"os.unlink",
"sys.exit",
"os.fdopen",
"site.getsitepackages",
"tempfile.mkstemp"
] | [((272, 299), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (289, 299), False, 'import logging\n'), ((383, 406), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (404, 406), False, 'import logging\n'), ((437, 510), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (454, 510), False, 'import logging\n'), ((1310, 1321), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1318, 1321), False, 'import sys\n'), ((1804, 1822), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (1820, 1822), False, 'import tempfile\n'), ((1948, 1980), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (1963, 1980), False, 'import subprocess\n'), ((2040, 2056), 'os.unlink', 'os.unlink', (['fname'], {}), '(fname)\n', (2049, 2056), False, 'import os\n'), ((814, 833), 'os.path.isfile', 'os.path.isfile', (['fqn'], {}), '(fqn)\n', (828, 833), False, 'import os\n'), ((1013, 1035), 'site.getsitepackages', 'site.getsitepackages', ([], {}), '()\n', (1033, 1035), False, 'import site\n'), ((1832, 1850), 'os.fdopen', 'os.fdopen', (['fd', '"""w"""'], {}), "(fd, 'w')\n", (1841, 1850), False, 'import os\n'), ((764, 790), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (780, 790), False, 'import os\n'), ((1112, 1131), 'os.path.isfile', 'os.path.isfile', (['fqn'], {}), '(fqn)\n', (1126, 1131), False, 'import os\n'), ((1889, 1929), 'os.environ.get', 'os.environ.get', (['"""EDITOR"""', 'default_editor'], {}), "('EDITOR', default_editor)\n", (1903, 1929), False, 'import os\n')] |
__author__ = 'User'
from pytest_bdd import given, when, then
from model.contact import Contact
import random
@given('a contact list')
def contact_list(db):
return db.get_contact_list()
@given('a contact with <firstname>, <lastname>, <address> and <mobilephone>')
def new_contact(firstname, lastname, address, mobilephone):
return Contact(firstname=firstname, lastname=lastname, address=address, mobilephone=mobilephone)
@when('I add the contact to the list')
def add_new_contact(app, new_contact):
app.contact.add_new(new_contact)
@then('the new contact list is equal to the old contact list with the added contact')
def verify_contact_added(db, contact_list, new_contact, app, check_ui):
app.contact.check_add_new_success(db, new_contact, contact_list, check_ui)
@given('a non-empty contact list')
def non_empty_contact_list(app, db):
if len(db.get_contact_list()) < 0:
app.group.create(Contact(firstname='some firstname'))
return db.get_contact_list()
@given('a random contact from the list')
def random_contact(non_empty_contact_list):
return random.choice(non_empty_contact_list)
@when('I delete the contact from the list')
def delete_contact(app, random_contact):
app.contact.delete_contact_by_id(random_contact.id)
@then('the new contact list is equal to the old contact list without the contact')
def verify_contact_deleted(db, non_empty_contact_list, random_contact, app, check_ui):
app.contact.check_delete_success(db, random_contact, non_empty_contact_list, check_ui)
@when('I modify the contact from the list')
def modify_contact(app, new_contact, random_contact):
new_contact.id = random_contact.id
app.contact.modify_contact_by_id(new_contact)
@then('the new contact list is equal to the old contact list with the modified contact')
def verify_contact_deleted(db, non_empty_contact_list, new_contact, random_contact ,app, check_ui):
non_empty_contact_list.remove(random_contact)
random_contact.firstname = new_contact.firstname
random_contact.lastname = new_contact.lastname
random_contact.address = new_contact.address
random_contact.mobilephone =new_contact.mobilephone
non_empty_contact_list.append(random_contact)
app.contact.check_modify_contact_success(db, non_empty_contact_list, check_ui)
| [
"random.choice",
"pytest_bdd.then",
"pytest_bdd.when",
"pytest_bdd.given",
"model.contact.Contact"
] | [((112, 135), 'pytest_bdd.given', 'given', (['"""a contact list"""'], {}), "('a contact list')\n", (117, 135), False, 'from pytest_bdd import given, when, then\n'), ((193, 269), 'pytest_bdd.given', 'given', (['"""a contact with <firstname>, <lastname>, <address> and <mobilephone>"""'], {}), "('a contact with <firstname>, <lastname>, <address> and <mobilephone>')\n", (198, 269), False, 'from pytest_bdd import given, when, then\n'), ((433, 470), 'pytest_bdd.when', 'when', (['"""I add the contact to the list"""'], {}), "('I add the contact to the list')\n", (437, 470), False, 'from pytest_bdd import given, when, then\n'), ((549, 643), 'pytest_bdd.then', 'then', (['"""the new contact list is equal to the old contact list with the added contact"""'], {}), "(\n 'the new contact list is equal to the old contact list with the added contact'\n )\n", (553, 643), False, 'from pytest_bdd import given, when, then\n'), ((787, 820), 'pytest_bdd.given', 'given', (['"""a non-empty contact list"""'], {}), "('a non-empty contact list')\n", (792, 820), False, 'from pytest_bdd import given, when, then\n'), ((994, 1033), 'pytest_bdd.given', 'given', (['"""a random contact from the list"""'], {}), "('a random contact from the list')\n", (999, 1033), False, 'from pytest_bdd import given, when, then\n'), ((1129, 1171), 'pytest_bdd.when', 'when', (['"""I delete the contact from the list"""'], {}), "('I delete the contact from the list')\n", (1133, 1171), False, 'from pytest_bdd import given, when, then\n'), ((1271, 1362), 'pytest_bdd.then', 'then', (['"""the new contact list is equal to the old contact list without the contact"""'], {}), "(\n 'the new contact list is equal to the old contact list without the contact'\n )\n", (1275, 1362), False, 'from pytest_bdd import given, when, then\n'), ((1533, 1575), 'pytest_bdd.when', 'when', (['"""I modify the contact from the list"""'], {}), "('I modify the contact from the list')\n", (1537, 1575), False, 'from pytest_bdd import given, when, then\n'), ((1721, 1818), 'pytest_bdd.then', 'then', (['"""the new contact list is equal to the old contact list with the modified contact"""'], {}), "(\n 'the new contact list is equal to the old contact list with the modified contact'\n )\n", (1725, 1818), False, 'from pytest_bdd import given, when, then\n'), ((341, 434), 'model.contact.Contact', 'Contact', ([], {'firstname': 'firstname', 'lastname': 'lastname', 'address': 'address', 'mobilephone': 'mobilephone'}), '(firstname=firstname, lastname=lastname, address=address,\n mobilephone=mobilephone)\n', (348, 434), False, 'from model.contact import Contact\n'), ((1089, 1126), 'random.choice', 'random.choice', (['non_empty_contact_list'], {}), '(non_empty_contact_list)\n', (1102, 1126), False, 'import random\n'), ((922, 957), 'model.contact.Contact', 'Contact', ([], {'firstname': '"""some firstname"""'}), "(firstname='some firstname')\n", (929, 957), False, 'from model.contact import Contact\n')] |
from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir
from shminspector.util.cmd import try_capture_output, is_command
def collect_docker_files(user_home_dir_path, target_dir_path, ctx):
if is_command("docker"):
ctx.logger.info("Collecting Docker information...")
mkdir(target_dir_path)
_collect_version(target_dir_path, ctx)
_collect_info(target_dir_path, ctx)
if ctx.snapshot.docker_configured():
ctx.logger.info("Collecting Docker files...")
_collect_config_files(target_dir_path, user_home_dir_path, ctx)
else:
ctx.logger.warn("'docker' not installed")
def _collect_config_files(target_dir_path, user_home_dir_path, ctx):
ctx.logger.progress("Collecting configuration files...")
source_settings_file_path = file_path(user_home_dir_path, "Library/Group Containers/group.com.docker/settings.json")
target_settings_file_path = file_path(target_dir_path, "settings.json")
try_copyfile(source_settings_file_path, target_settings_file_path)
source_docker_config_file_path = file_path(user_home_dir_path, ".docker/config.json")
target_docker_config_file_path = file_path(target_dir_path, "config.json")
try_copyfile(source_docker_config_file_path, target_docker_config_file_path)
source_docker_daemon_file_path = file_path(user_home_dir_path, ".docker/daemon.json")
target_docker_daemon_file_path = file_path(target_dir_path, "daemon.json")
try_copyfile(source_docker_daemon_file_path, target_docker_daemon_file_path)
def _collect_version(target_dir, ctx):
ctx.logger.progress("Collecting version information...")
return try_capture_output(cmd=["docker", "version"],
target_dir_path=target_dir,
file_name="docker_version.txt",
logger=ctx.logger)
def _collect_info(target_dir, ctx):
ctx.logger.progress("Collecting docker information...")
return try_capture_output(cmd=["docker", "info"],
target_dir_path=target_dir,
file_name="docker_info.txt",
logger=ctx.logger)
| [
"dumpshmamp.collectors.files.try_copyfile",
"shminspector.util.cmd.is_command",
"dumpshmamp.collectors.files.file_path",
"shminspector.util.cmd.try_capture_output",
"dumpshmamp.collectors.files.mkdir"
] | [((213, 233), 'shminspector.util.cmd.is_command', 'is_command', (['"""docker"""'], {}), "('docker')\n", (223, 233), False, 'from shminspector.util.cmd import try_capture_output, is_command\n'), ((824, 916), 'dumpshmamp.collectors.files.file_path', 'file_path', (['user_home_dir_path', '"""Library/Group Containers/group.com.docker/settings.json"""'], {}), "(user_home_dir_path,\n 'Library/Group Containers/group.com.docker/settings.json')\n", (833, 916), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((945, 988), 'dumpshmamp.collectors.files.file_path', 'file_path', (['target_dir_path', '"""settings.json"""'], {}), "(target_dir_path, 'settings.json')\n", (954, 988), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((993, 1059), 'dumpshmamp.collectors.files.try_copyfile', 'try_copyfile', (['source_settings_file_path', 'target_settings_file_path'], {}), '(source_settings_file_path, target_settings_file_path)\n', (1005, 1059), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((1098, 1150), 'dumpshmamp.collectors.files.file_path', 'file_path', (['user_home_dir_path', '""".docker/config.json"""'], {}), "(user_home_dir_path, '.docker/config.json')\n", (1107, 1150), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((1188, 1229), 'dumpshmamp.collectors.files.file_path', 'file_path', (['target_dir_path', '"""config.json"""'], {}), "(target_dir_path, 'config.json')\n", (1197, 1229), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((1234, 1310), 'dumpshmamp.collectors.files.try_copyfile', 'try_copyfile', (['source_docker_config_file_path', 'target_docker_config_file_path'], {}), '(source_docker_config_file_path, target_docker_config_file_path)\n', (1246, 1310), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((1349, 1401), 'dumpshmamp.collectors.files.file_path', 'file_path', (['user_home_dir_path', '""".docker/daemon.json"""'], {}), "(user_home_dir_path, '.docker/daemon.json')\n", (1358, 1401), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((1439, 1480), 'dumpshmamp.collectors.files.file_path', 'file_path', (['target_dir_path', '"""daemon.json"""'], {}), "(target_dir_path, 'daemon.json')\n", (1448, 1480), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((1485, 1561), 'dumpshmamp.collectors.files.try_copyfile', 'try_copyfile', (['source_docker_daemon_file_path', 'target_docker_daemon_file_path'], {}), '(source_docker_daemon_file_path, target_docker_daemon_file_path)\n', (1497, 1561), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n'), ((1675, 1803), 'shminspector.util.cmd.try_capture_output', 'try_capture_output', ([], {'cmd': "['docker', 'version']", 'target_dir_path': 'target_dir', 'file_name': '"""docker_version.txt"""', 'logger': 'ctx.logger'}), "(cmd=['docker', 'version'], target_dir_path=target_dir,\n file_name='docker_version.txt', logger=ctx.logger)\n", (1693, 1803), False, 'from shminspector.util.cmd import try_capture_output, is_command\n'), ((1999, 2121), 'shminspector.util.cmd.try_capture_output', 'try_capture_output', ([], {'cmd': "['docker', 'info']", 'target_dir_path': 'target_dir', 'file_name': '"""docker_info.txt"""', 'logger': 'ctx.logger'}), "(cmd=['docker', 'info'], target_dir_path=target_dir,\n file_name='docker_info.txt', logger=ctx.logger)\n", (2017, 2121), False, 'from shminspector.util.cmd import try_capture_output, is_command\n'), ((304, 326), 'dumpshmamp.collectors.files.mkdir', 'mkdir', (['target_dir_path'], {}), '(target_dir_path)\n', (309, 326), False, 'from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir\n')] |
import cv2
import numpy as np
# Normal routines
img = cv2.imread('image3.png')
scale_percent = 30 # percent of original size
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image
img = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
ret,thresh = cv2.threshold(gray,50,255,1)
# Remove some small noise if any.
dilate = cv2.dilate(thresh,None)
erode = cv2.erode(dilate,None)
# Find contours with cv2.RETR_CCOMP
contours,hierarchy = cv2.findContours(erode,cv2.RETR_CCOMP,cv2.CHAIN_APPROX_SIMPLE)
for i,cnt in enumerate(contours):
# Check if it is an external contour and its area is more than 100
if hierarchy[0,i,3] == -1 and cv2.contourArea(cnt)>100:
x,y,w,h = cv2.boundingRect(cnt)
if w > 65 and w < 150 and h > 65 and h < 150:
cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
# m = cv2.moments(cnt)
# cx,cy = m['m10']/m['m00'],m['m01']/m['m00']
# cv2.circle(img,(int(cx),int(cy)),3,255,-1)
cv2.imshow('img',img)
# cv2.imwrite('sofsqure.png',img)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
"cv2.rectangle",
"cv2.resize",
"cv2.threshold",
"cv2.erode",
"cv2.imshow",
"cv2.contourArea",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.cvtColor",
"cv2.findContours",
"cv2.dilate",
"cv2.imread",
"cv2.boundingRect"
] | [((55, 79), 'cv2.imread', 'cv2.imread', (['"""image3.png"""'], {}), "('image3.png')\n", (65, 79), False, 'import cv2\n'), ((269, 319), 'cv2.resize', 'cv2.resize', (['img', 'dim'], {'interpolation': 'cv2.INTER_AREA'}), '(img, dim, interpolation=cv2.INTER_AREA)\n', (279, 319), False, 'import cv2\n'), ((331, 368), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (343, 368), False, 'import cv2\n'), ((382, 413), 'cv2.threshold', 'cv2.threshold', (['gray', '(50)', '(255)', '(1)'], {}), '(gray, 50, 255, 1)\n', (395, 413), False, 'import cv2\n'), ((455, 479), 'cv2.dilate', 'cv2.dilate', (['thresh', 'None'], {}), '(thresh, None)\n', (465, 479), False, 'import cv2\n'), ((487, 510), 'cv2.erode', 'cv2.erode', (['dilate', 'None'], {}), '(dilate, None)\n', (496, 510), False, 'import cv2\n'), ((568, 632), 'cv2.findContours', 'cv2.findContours', (['erode', 'cv2.RETR_CCOMP', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(erode, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)\n', (584, 632), False, 'import cv2\n'), ((1091, 1113), 'cv2.imshow', 'cv2.imshow', (['"""img"""', 'img'], {}), "('img', img)\n", (1101, 1113), False, 'import cv2\n'), ((1147, 1161), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1158, 1161), False, 'import cv2\n'), ((1162, 1185), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1183, 1185), False, 'import cv2\n'), ((815, 836), 'cv2.boundingRect', 'cv2.boundingRect', (['cnt'], {}), '(cnt)\n', (831, 836), False, 'import cv2\n'), ((771, 791), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (786, 791), False, 'import cv2\n'), ((904, 962), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x + w, y + h)', '(0, 255, 0)', '(2)'], {}), '(img, (x, y), (x + w, y + h), (0, 255, 0), 2)\n', (917, 962), False, 'import cv2\n')] |
import os
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from scpca_portal.config.common import Common
class Production(Common):
INSTALLED_APPS = Common.INSTALLED_APPS
SECRET_KEY = os.getenv("DJANGO_SECRET_KEY")
# Site
# https://docs.djangoproject.com/en/2.0/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
INSTALLED_APPS += ("gunicorn",)
UPDATE_IMPORTED_DATA = True
# AWS
AWS_REGION = os.getenv("AWS_REGION")
# AWS S3
AWS_S3_BUCKET_NAME = os.getenv("AWS_S3_BUCKET_NAME")
# https://developers.google.com/web/fundamentals/performance/optimizing-content-efficiency/http-caching#cache-control
# Response can be cached by browser and any intermediary caches (i.e. it is "public") for up to 1 day
# 86400 = (60 seconds x 60 minutes x 24 hours)
AWS_HEADERS = {
"Cache-Control": "max-age=86400, s-maxage=86400, must-revalidate",
}
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": os.getenv("DATABASE_NAME"),
"USER": os.getenv("DATABASE_USER"),
"PASSWORD": os.getenv("DATABASE_PASSWORD"),
"HOST": os.getenv("DATABASE_HOST"),
"PORT": os.getenv("DATABASE_PORT"),
}
}
sentry_sdk.init(
dsn=os.getenv("SENTRY_IO_URL"),
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
environment=os.getenv("SENTRY_ENV"),
# If you wish to associate users to errors (assuming you are using
# django.contrib.auth) you may enable sending PII data.
send_default_pii=True,
)
| [
"sentry_sdk.integrations.django.DjangoIntegration",
"os.getenv"
] | [((225, 255), 'os.getenv', 'os.getenv', (['"""DJANGO_SECRET_KEY"""'], {}), "('DJANGO_SECRET_KEY')\n", (234, 255), False, 'import os\n'), ((462, 485), 'os.getenv', 'os.getenv', (['"""AWS_REGION"""'], {}), "('AWS_REGION')\n", (471, 485), False, 'import os\n'), ((525, 556), 'os.getenv', 'os.getenv', (['"""AWS_S3_BUCKET_NAME"""'], {}), "('AWS_S3_BUCKET_NAME')\n", (534, 556), False, 'import os\n'), ((1062, 1088), 'os.getenv', 'os.getenv', (['"""DATABASE_NAME"""'], {}), "('DATABASE_NAME')\n", (1071, 1088), False, 'import os\n'), ((1110, 1136), 'os.getenv', 'os.getenv', (['"""DATABASE_USER"""'], {}), "('DATABASE_USER')\n", (1119, 1136), False, 'import os\n'), ((1162, 1192), 'os.getenv', 'os.getenv', (['"""DATABASE_PASSWORD"""'], {}), "('DATABASE_PASSWORD')\n", (1171, 1192), False, 'import os\n'), ((1214, 1240), 'os.getenv', 'os.getenv', (['"""DATABASE_HOST"""'], {}), "('DATABASE_HOST')\n", (1223, 1240), False, 'import os\n'), ((1262, 1288), 'os.getenv', 'os.getenv', (['"""DATABASE_PORT"""'], {}), "('DATABASE_PORT')\n", (1271, 1288), False, 'import os\n'), ((1340, 1366), 'os.getenv', 'os.getenv', (['"""SENTRY_IO_URL"""'], {}), "('SENTRY_IO_URL')\n", (1349, 1366), False, 'import os\n'), ((1464, 1487), 'os.getenv', 'os.getenv', (['"""SENTRY_ENV"""'], {}), "('SENTRY_ENV')\n", (1473, 1487), False, 'import os\n'), ((1390, 1409), 'sentry_sdk.integrations.django.DjangoIntegration', 'DjangoIntegration', ([], {}), '()\n', (1407, 1409), False, 'from sentry_sdk.integrations.django import DjangoIntegration\n')] |
import gc
import math
import numpy as np
import torch
import torch.nn.functional as F
import timeit
import time
from gck_layer import GCK3x3Layer
kernel_dim = 3
def tensors_equal(a,b):
b = torch.allclose(a, b, atol=0.01)
if (b):
print('same: True')
else:
print('Same: False (diff:', ((a-b).max()), ')')
def compareResults(batch_size: int, in_channels: int, out_channels: int, input_dim: int):
input = torch.randint(0,100, (batch_size, in_channels, input_dim, input_dim), dtype=torch.float32)
kernels = torch.ones(out_channels, in_channels, kernel_dim, kernel_dim, dtype=torch.float32)
expected = F.conv2d(input, kernels)
layer = GCK3x3Layer(in_channels, out_channels, 3, False, input_dim - 2, kernels)
result = layer.forward(input)
tensors_equal(expected, result)
lst = [
(1,1,8,1024),
(1,1,16,256),
(1,1,64,512),
(1,1,128,512),
(1,8,16,128)
]
for batch_size, in_channels, out_channels, input_dim in lst:
compareResults(batch_size, in_channels, out_channels, input_dim) | [
"torch.nn.functional.conv2d",
"torch.randint",
"gck_layer.GCK3x3Layer",
"torch.allclose",
"torch.ones"
] | [((195, 226), 'torch.allclose', 'torch.allclose', (['a', 'b'], {'atol': '(0.01)'}), '(a, b, atol=0.01)\n', (209, 226), False, 'import torch\n'), ((436, 531), 'torch.randint', 'torch.randint', (['(0)', '(100)', '(batch_size, in_channels, input_dim, input_dim)'], {'dtype': 'torch.float32'}), '(0, 100, (batch_size, in_channels, input_dim, input_dim),\n dtype=torch.float32)\n', (449, 531), False, 'import torch\n'), ((541, 628), 'torch.ones', 'torch.ones', (['out_channels', 'in_channels', 'kernel_dim', 'kernel_dim'], {'dtype': 'torch.float32'}), '(out_channels, in_channels, kernel_dim, kernel_dim, dtype=torch.\n float32)\n', (551, 628), False, 'import torch\n'), ((639, 663), 'torch.nn.functional.conv2d', 'F.conv2d', (['input', 'kernels'], {}), '(input, kernels)\n', (647, 663), True, 'import torch.nn.functional as F\n'), ((677, 749), 'gck_layer.GCK3x3Layer', 'GCK3x3Layer', (['in_channels', 'out_channels', '(3)', '(False)', '(input_dim - 2)', 'kernels'], {}), '(in_channels, out_channels, 3, False, input_dim - 2, kernels)\n', (688, 749), False, 'from gck_layer import GCK3x3Layer\n')] |
"""
Prepare some stats from timelines
"""
# https://docs.python.org/3.7/library/collections.html#collections.Counter
from collections import Counter
def get_timeline_stats(timeline):
"""
:type timeline list[now_playing_graph.timeline.TimelineEntry]
:rtype: dict
"""
top_artists = Counter()
top_songs = Counter()
longest_songs = dict()
longest_artists = Counter() # artist whose songs are played for the longest time
for entry in timeline:
top_artists.update((entry.artist_name,))
top_songs.update((entry.song_title,))
if entry.song_title not in longest_songs:
longest_songs[entry.song_title] = entry.duration
longest_artists += Counter({entry.artist_name: entry.duration})
return dict(
top_artists=top_artists.most_common(10),
top_songs=top_songs.most_common(10),
longest_songs=Counter(longest_songs).most_common(10),
longest_artists=Counter(longest_artists).most_common(10),
)
| [
"collections.Counter"
] | [((302, 311), 'collections.Counter', 'Counter', ([], {}), '()\n', (309, 311), False, 'from collections import Counter\n'), ((328, 337), 'collections.Counter', 'Counter', ([], {}), '()\n', (335, 337), False, 'from collections import Counter\n'), ((387, 396), 'collections.Counter', 'Counter', ([], {}), '()\n', (394, 396), False, 'from collections import Counter\n'), ((718, 762), 'collections.Counter', 'Counter', (['{entry.artist_name: entry.duration}'], {}), '({entry.artist_name: entry.duration})\n', (725, 762), False, 'from collections import Counter\n'), ((897, 919), 'collections.Counter', 'Counter', (['longest_songs'], {}), '(longest_songs)\n', (904, 919), False, 'from collections import Counter\n'), ((961, 985), 'collections.Counter', 'Counter', (['longest_artists'], {}), '(longest_artists)\n', (968, 985), False, 'from collections import Counter\n')] |
import functools
from django.contrib import messages
from django.urls import reverse
from django.shortcuts import redirect
def full_profile_required(func):
@functools.wraps(func)
def wrapper(request, *args, **kwargs):
if (request.user
and request.user.id # FIXME test mocks mess with the above object so we have to check the id
and (not request.user.attendeeprofile or not request.user.attendeeprofile.gender)):
messages.warning(
request,
"Please update your profile to continue using the EuroPython website."
)
return redirect(reverse('user_panel:profile_settings'))
return func(request, *args, **kwargs)
return wrapper
| [
"django.contrib.messages.warning",
"functools.wraps",
"django.urls.reverse"
] | [((164, 185), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (179, 185), False, 'import functools\n'), ((477, 578), 'django.contrib.messages.warning', 'messages.warning', (['request', '"""Please update your profile to continue using the EuroPython website."""'], {}), "(request,\n 'Please update your profile to continue using the EuroPython website.')\n", (493, 578), False, 'from django.contrib import messages\n'), ((650, 688), 'django.urls.reverse', 'reverse', (['"""user_panel:profile_settings"""'], {}), "('user_panel:profile_settings')\n", (657, 688), False, 'from django.urls import reverse\n')] |
import numpy as np
from mldftdat.pyscf_utils import *
from mldftdat.workflow_utils import safe_mem_cap_mb
from pyscf.dft.numint import eval_ao, make_mask
from mldftdat.density import LDA_FACTOR,\
contract21_deriv, contract21, GG_AMIN
def dtauw(rho_data):
return - get_gradient_magnitude(rho_data)**2 / (8 * rho_data[0,:]**2 + 1e-16),\
1 / (8 * rho_data[0,:] + 1e-16)
def dsdp(s):
return 1 / (2 * s)
def dasinhsdp(s):
return arcsinh_deriv(s) / (2 * s + 1e-10)
def ds2(rho_data):
# s = |nabla n| / (b * n)
rho = rho_data[0,:]
b = 2 * (3 * np.pi * np.pi)**(1.0/3)
s = get_gradient_magnitude(rho_data) / (b * rho**(4.0/3) + 1e-16)
s2 = s**2
return -8.0 * s2 / (3 * rho + 1e-16),\
1 / (b * rho**(4.0/3) + 1e-16)**2
def dalpha(rho_data):
rho = rho_data[0,:]
tau = rho_data[5,:]
tau0 = get_uniform_tau(rho) + 1e-16
mag_grad = get_gradient_magnitude(rho_data)
tauw = get_single_orbital_tau(rho, mag_grad)
dwdn, dwds = dtauw(rho_data)
return 5.0 * (tauw - tau) / (3 * tau0 * rho + 1e-16) - dwdn / tau0,\
- dwds / tau0,\
1 / tau0
LDA_FACTOR = - 3.0 / 4.0 * (3.0 / np.pi)**(1.0/3)
def v_semilocal(rho_data, F, dfdp, dfdalpha):
# 0 - n, 1 - p, 2 - nabla^2, 3 - alpha
v = np.zeros((4, rho_data.shape[1]))
rho = rho_data[0,:]
elda = LDA_FACTOR * rho**(4.0/3)
# dE/dn line 1
v[0] = 4.0 / 3 * LDA_FACTOR * rho**(1.0/3) * F
# dE/dp line 1
v[1] = elda * dfdp
# dE/dalpha line 1
v[3] = elda * dfdalpha
return v
def v_basis_transform(rho_data, v_npalpha):
"""
Transforms the basis of the exchange potential from
density, reduced gradient, and alpha to
density, contracted gradient, and kinetic energy.
v_npalpha is a 3xN array:
0 - Functional derivative of the exchange energy
explicitly with respect to the density, i.e.
not accounting for derivatives of the XEF features
wrt density
1 - Functional derivative wrt the square of the reduced
gradient p
2 - ZERO (Functional derivative wrt normalized laplacian)
3 - Functional derivative wrt the isoorbital indicator
alpha
Returns a 3xN array:
0 - Full functional derivative of the exchange energy
wrt the density, accounting for dp/dn and dalpha/dn
1 - Derivative wrt sigma, the contracted gradient |nabla n|^2
2 - ZERO (Derivative wrt the laplacian fo the density)
3 - Derivative wrt tau, the kinetic energy density
"""
v_nst = np.zeros(v_npalpha.shape)
# dE/dn lines 1-3
v_nst[0] = v_npalpha[0]
dpdn, dpdsigma = ds2(rho_data)
# dE/dn line 4 term 1
v_nst[0] += v_npalpha[1] * dpdn
# dE/dsigma term 1
v_nst[1] += v_npalpha[1] * dpdsigma
dadn, dadsigma, dadtau = dalpha(rho_data)
# dE/dn line 4 term 2
v_nst[0] += v_npalpha[3] * dadn
# dE/dsigma term 2
v_nst[1] += v_npalpha[3] * dadsigma
# dE/dtau
v_nst[3] = v_npalpha[3] * dadtau
return v_nst
def v_nonlocal_general(rho_data, grid, dedg, density, auxmol,
g, gr2, ovlp, l = 0, mul = 1.0):
# g should have shape (2l+1, N)
N = grid.weights.shape[0]
lc = get_dft_input2(rho_data)[:3]
if l == 0:
dedb = dedg.reshape(1, -1)
elif l == 1:
#dedb = 2 * elda * g * dfdg
dedb = 2 * dedg * g #/ (np.linalg.norm(g, axis=0) + 1e-10)
elif l == 2:
dedb = 2 * dedg * g / np.sqrt(5)
elif l == -2:
dedb = dedg
l = 2
elif l == -1:
dedb = dedg
l = 1
else:
raise ValueError('angular momentum code l=%d unknown' % l)
rho, s, alpha = lc
a = np.pi * (mul * rho / 2 + 1e-16)**(2.0 / 3)
scale = 1
fac = (6 * np.pi**2)**(2.0/3) / (16 * np.pi)
scale += GG_SMUL * fac * s**2
scale += GG_AMUL * 0.6 * fac * (alpha - 1)
a = a * scale
cond = a < GG_AMIN
da = np.exp(a[cond] / GG_AMIN - 1)
a[cond] = GG_AMIN * np.exp(a[cond] / GG_AMIN - 1)
# (ngrid * (2l+1), naux)
dedb[:,rho<1e-8] = 0
dedaux = np.dot((dedb * grid.weights).T.flatten(), ovlp)
dgda = l / (2 * a) * g - gr2
#print(dgda.shape, gr2.shape)
dgda[:,rho<1e-8] = 0
dadn = mul * a / (3 * (mul * rho / 2 + 1e-16))
dadp = GG_SMUL * np.pi * fac * (mul * rho / 2 + 1e-16)**(2.0/3)
dadalpha = GG_AMUL * 0.6 * np.pi * fac * (mul * rho / 2 + 1e-16)**(2.0/3)
dadn[cond] *= da
dadp[cond] *= da
dadalpha[cond] *= da
# add in line 3 of dE/dn, line 2 of dE/dp and dE/dalpha
v_npa = np.zeros((4, N))
deda = np.einsum('mi,mi->i', dedb, dgda)
v_npa[0] = deda * dadn
v_npa[1] = deda * dadp
v_npa[3] = deda * dadalpha
return v_npa, dedaux
def v_nonlocal(rho_data, grid, dedg, density, auxmol,
g, gr2, ovlp, l=0, a0=8.0, fac_mul=0.25,
amin=GG_AMIN, l_add=0, **kwargs):
#print(l, l_add, a0, fac_mul, amin)
# g should have shape (2l+1, N)
N = grid.weights.shape[0]
lc = get_dft_input2(rho_data)[:3]
if l == 0:
dedb = dedg.reshape(1, -1)
elif l == 1:
dedb = 2 * dedg * g
elif l == 2:
dedb = 2 * dedg * g / np.sqrt(5)
elif l == -2:
dedb = dedg
l = 2
elif l == -1:
dedb = dedg
l = 1
else:
raise ValueError('angular momentum code l=%d unknown' % l)
rho, s, alpha = lc
ratio = alpha + 5./3 * s**2
fac = fac_mul * 1.2 * (6 * np.pi**2)**(2.0/3) / np.pi
a = np.pi * (rho / 2 + 1e-16)**(2.0 / 3)
scale = a0 + (ratio-1) * fac
a = a * scale
cond = a < amin
da = np.exp(a[cond] / amin - 1)
a[cond] = amin * np.exp(a[cond] / amin - 1)
# (ngrid * (2l+1), naux)
dedb[:,rho<1e-8] = 0
dedaux = np.dot((dedb * grid.weights).T.flatten(), ovlp)
dgda = (l + l_add) / (2 * a) * g - gr2
dgda[:,rho<1e-8] = 0
dadn = 2 * a / (3 * rho + 1e-16)
dadalpha = np.pi * fac * (rho / 2 + 1e-16)**(2.0/3)
dadp = 5./3 * dadalpha
dadn[cond] *= da
dadp[cond] *= da
dadalpha[cond] *= da
# add in line 3 of dE/dn, line 2 of dE/dp and dE/dalpha
v_npa = np.zeros((4, N))
deda = np.einsum('mi,mi->i', dedb, dgda)
v_npa[0] = deda * dadn
v_npa[1] = deda * dadp
v_npa[3] = deda * dadalpha
return v_npa, dedaux
def functional_derivative_loop(mol, mlfunc, dEddesc,
raw_desc, raw_desc_r2,
rho_data, density, ovlps, grid):
"""
Core functional derivative loop for the CIDER features,
called by NLNumInt.
Args:
mol (pyscf.gto.Mole): molecule object
mlfunc (MLFunctional): Exchange functional
dEddesc (np.ndarray): ngrid x ndesc array of energy derivatives
with respect to the descriptors.
raw_desc (np.ndarray): raw CIDER descriptor vectors
raw_desc_r2 (np.ndarray): raw CIDER descriptor vectors <r^2>
for use in functional derivative with respect to the Gaussian
exponents
rho_data (np.ndarray): 6 x ngrid
density (np.ndarray): density in DF basis space
ovlps (np.ndarray): Overlaps of the CIDER descriptor functions with
the DF basis
grid: contains coords and weights of the real-space grid
"""
gg_dict = {
'a0': mlfunc.a0,
'amin': mlfunc.amin,
'fac_mul': mlfunc.fac_mul
}
N = grid.weights.shape[0]
naux = mol.auxmol.nao_nr()
sprefac = 2 * (3 * np.pi * np.pi)**(1.0/3)
n43 = rho_data[0]**(4.0/3)
svec = rho_data[1:4] / (sprefac * n43 + 1e-20)
v_npa = np.zeros((4, N))
v_aniso = np.zeros((3, N))
v_aux = np.zeros(naux)
for i, d in enumerate(mlfunc.desc_order):
if d == 0:
v_npa[0] += dEddesc[:,i]
elif d == 1:
v_npa[1] += dEddesc[:,i]
elif d == 2:
v_npa[3] += dEddesc[:,i]
else:
gg_kwargs = gg_dict
l_add = 0
if d in [3, 10, 11]:
if d == 3:
g = raw_desc[6]
ovlp = ovlps[0]
gr2 = raw_desc_r2[6:7]
elif d == 10:
g = raw_desc[15]
ovlp = ovlps[3]
gr2 = raw_desc_r2[15:16]
if mlfunc.desc_version == 'c':
l_add = 2
mul = 1.0
else:
mul = 0.25**(2./3)
gg_kwargs = {
'a0': mlfunc.a0 * mul,
'fac_mul': mlfunc.fac_mul * mul,
'amin': mlfunc.amin * mul
}
else:
g = raw_desc[16]
ovlp = ovlps[4]
gr2 = raw_desc_r2[16:17]
if mlfunc.desc_version == 'c':
mul = 2.0
else:
mul = 4**(2./3)
gg_kwargs = {
'a0': mlfunc.a0 * mul,
'fac_mul': mlfunc.fac_mul * mul,
'amin': mlfunc.amin * mul
}
l = 0
elif d == 4:
g = raw_desc[7:10]
gr2 = raw_desc_r2[7:10]
ovlp = ovlps[1]
l = 1
elif d == 6:
g = raw_desc[10:15]
gr2 = raw_desc_r2[10:15]
ovlp = ovlps[2]
l = 2
elif d == 5:
g = raw_desc[7:10]
gr2 = raw_desc_r2[7:10]
ovlp = ovlps[1]
dfmul = svec
v_aniso += dEddesc[:,i] * g
l = -1
elif d == 7:
l = -2
g = raw_desc[10:15]
gr2 = raw_desc_r2[10:15]
ovlp = ovlps[2]
dfmul = contract21_deriv(svec)
ddesc_dsvec = contract21(g, svec)
v_aniso += dEddesc[:,i] * 2 * ddesc_dsvec
elif d == 8:
g2 = raw_desc[10:15]
g2r2 = raw_desc_r2[10:15]
ovlp2 = ovlps[2]
g1 = raw_desc[7:10]
g1r2 = raw_desc_r2[7:10]
ovlp1 = ovlps[1]
dfmul = contract21_deriv(svec, g1)
ddesc_dsvec = contract21(g2, g1)
ddesc_dg1 = contract21(g2, svec)
v_aniso += dEddesc[:,i] * ddesc_dsvec
vtmp1, dedaux1 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * ddesc_dg1,
density, mol.auxmol, g1,
g1r2, ovlp1, l=-1, **gg_kwargs)
vtmp2, dedaux2 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * dfmul,
density, mol.auxmol, g2,
g2r2, ovlp2, l=-2, **gg_kwargs)
vtmp = vtmp1 + vtmp2
dedaux = dedaux1 + dedaux2
elif d == 9:
g2 = raw_desc[10:15]
g2r2 = raw_desc_r2[10:15]
ovlp2 = ovlps[2]
g1 = raw_desc[7:10]
g1r2 = raw_desc_r2[7:10]
ovlp1 = ovlps[1]
dfmul = contract21_deriv(g1)
ddesc_dg1 = 2 * contract21(g2, g1)
vtmp1, dedaux1 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * ddesc_dg1,
density, mol.auxmol, g1,
g1r2, ovlp1, l=-1, **gg_kwargs)
vtmp2, dedaux2 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * dfmul,
density, mol.auxmol, g2,
g2r2, ovlp2, l=-2, **gg_kwargs)
vtmp = vtmp1 + vtmp2
dedaux = dedaux1 + dedaux2
else:
raise NotImplementedError('Cannot take derivative for code %d' % d)
if d in [5, 7]:
vtmp, dedaux = v_nonlocal(rho_data, grid,
dEddesc[:,i] * dfmul,
density, mol.auxmol, g,
gr2, ovlp, l=l, **gg_kwargs)
elif d in [8, 9]:
pass
else:
vtmp, dedaux = v_nonlocal(rho_data, grid,
dEddesc[:,i],
density, mol.auxmol, g,
gr2, ovlp, l=l, l_add=l_add,
**gg_kwargs)
v_npa += vtmp
v_aux += dedaux
vtmp = None
dedaux = None
vmol = np.einsum('a,aij->ij', v_aux, mol.ao_to_aux)
v_nst = v_basis_transform(rho_data, v_npa)
v_nst[0] += np.einsum('ap,ap->p', -4.0 * svec / (3 * rho_data[0] + 1e-20), v_aniso)
v_grad = v_aniso / (sprefac * n43 + 1e-20)
return v_nst, v_grad, vmol
def get_density_in_basis(ao_to_aux, rdm1):
return np.einsum('npq,pq->n', ao_to_aux, rdm1)
def arcsinh_deriv(x):
return 1 / np.sqrt(x * x + 1)
def get_chi(alpha):
return 1 / (1 + alpha**2)
def chi_deriv(alpha):
return -2 * alpha / (1 + alpha**2)**2
| [
"numpy.sqrt",
"mldftdat.density.contract21_deriv",
"numpy.exp",
"numpy.zeros",
"numpy.einsum",
"mldftdat.density.contract21"
] | [((1313, 1345), 'numpy.zeros', 'np.zeros', (['(4, rho_data.shape[1])'], {}), '((4, rho_data.shape[1]))\n', (1321, 1345), True, 'import numpy as np\n'), ((2617, 2642), 'numpy.zeros', 'np.zeros', (['v_npalpha.shape'], {}), '(v_npalpha.shape)\n', (2625, 2642), True, 'import numpy as np\n'), ((3994, 4023), 'numpy.exp', 'np.exp', (['(a[cond] / GG_AMIN - 1)'], {}), '(a[cond] / GG_AMIN - 1)\n', (4000, 4023), True, 'import numpy as np\n'), ((4624, 4640), 'numpy.zeros', 'np.zeros', (['(4, N)'], {}), '((4, N))\n', (4632, 4640), True, 'import numpy as np\n'), ((4652, 4685), 'numpy.einsum', 'np.einsum', (['"""mi,mi->i"""', 'dedb', 'dgda'], {}), "('mi,mi->i', dedb, dgda)\n", (4661, 4685), True, 'import numpy as np\n'), ((5673, 5699), 'numpy.exp', 'np.exp', (['(a[cond] / amin - 1)'], {}), '(a[cond] / amin - 1)\n', (5679, 5699), True, 'import numpy as np\n'), ((6193, 6209), 'numpy.zeros', 'np.zeros', (['(4, N)'], {}), '((4, N))\n', (6201, 6209), True, 'import numpy as np\n'), ((6221, 6254), 'numpy.einsum', 'np.einsum', (['"""mi,mi->i"""', 'dedb', 'dgda'], {}), "('mi,mi->i', dedb, dgda)\n", (6230, 6254), True, 'import numpy as np\n'), ((7667, 7683), 'numpy.zeros', 'np.zeros', (['(4, N)'], {}), '((4, N))\n', (7675, 7683), True, 'import numpy as np\n'), ((7698, 7714), 'numpy.zeros', 'np.zeros', (['(3, N)'], {}), '((3, N))\n', (7706, 7714), True, 'import numpy as np\n'), ((7727, 7741), 'numpy.zeros', 'np.zeros', (['naux'], {}), '(naux)\n', (7735, 7741), True, 'import numpy as np\n'), ((13019, 13063), 'numpy.einsum', 'np.einsum', (['"""a,aij->ij"""', 'v_aux', 'mol.ao_to_aux'], {}), "('a,aij->ij', v_aux, mol.ao_to_aux)\n", (13028, 13063), True, 'import numpy as np\n'), ((13127, 13198), 'numpy.einsum', 'np.einsum', (['"""ap,ap->p"""', '(-4.0 * svec / (3 * rho_data[0] + 1e-20))', 'v_aniso'], {}), "('ap,ap->p', -4.0 * svec / (3 * rho_data[0] + 1e-20), v_aniso)\n", (13136, 13198), True, 'import numpy as np\n'), ((13338, 13377), 'numpy.einsum', 'np.einsum', (['"""npq,pq->n"""', 'ao_to_aux', 'rdm1'], {}), "('npq,pq->n', ao_to_aux, rdm1)\n", (13347, 13377), True, 'import numpy as np\n'), ((4048, 4077), 'numpy.exp', 'np.exp', (['(a[cond] / GG_AMIN - 1)'], {}), '(a[cond] / GG_AMIN - 1)\n', (4054, 4077), True, 'import numpy as np\n'), ((5721, 5747), 'numpy.exp', 'np.exp', (['(a[cond] / amin - 1)'], {}), '(a[cond] / amin - 1)\n', (5727, 5747), True, 'import numpy as np\n'), ((13416, 13434), 'numpy.sqrt', 'np.sqrt', (['(x * x + 1)'], {}), '(x * x + 1)\n', (13423, 13434), True, 'import numpy as np\n'), ((3533, 3543), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (3540, 3543), True, 'import numpy as np\n'), ((5242, 5252), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (5249, 5252), True, 'import numpy as np\n'), ((9992, 10014), 'mldftdat.density.contract21_deriv', 'contract21_deriv', (['svec'], {}), '(svec)\n', (10008, 10014), False, 'from mldftdat.density import LDA_FACTOR, contract21_deriv, contract21, GG_AMIN\n'), ((10045, 10064), 'mldftdat.density.contract21', 'contract21', (['g', 'svec'], {}), '(g, svec)\n', (10055, 10064), False, 'from mldftdat.density import LDA_FACTOR, contract21_deriv, contract21, GG_AMIN\n'), ((10394, 10420), 'mldftdat.density.contract21_deriv', 'contract21_deriv', (['svec', 'g1'], {}), '(svec, g1)\n', (10410, 10420), False, 'from mldftdat.density import LDA_FACTOR, contract21_deriv, contract21, GG_AMIN\n'), ((10451, 10469), 'mldftdat.density.contract21', 'contract21', (['g2', 'g1'], {}), '(g2, g1)\n', (10461, 10469), False, 'from mldftdat.density import LDA_FACTOR, contract21_deriv, contract21, GG_AMIN\n'), ((10498, 10518), 'mldftdat.density.contract21', 'contract21', (['g2', 'svec'], {}), '(g2, svec)\n', (10508, 10518), False, 'from mldftdat.density import LDA_FACTOR, contract21_deriv, contract21, GG_AMIN\n'), ((11452, 11472), 'mldftdat.density.contract21_deriv', 'contract21_deriv', (['g1'], {}), '(g1)\n', (11468, 11472), False, 'from mldftdat.density import LDA_FACTOR, contract21_deriv, contract21, GG_AMIN\n'), ((11505, 11523), 'mldftdat.density.contract21', 'contract21', (['g2', 'g1'], {}), '(g2, g1)\n', (11515, 11523), False, 'from mldftdat.density import LDA_FACTOR, contract21_deriv, contract21, GG_AMIN\n')] |
import webbrowser
class Movie():
""" This class provides a way to store movie related information """
# Class Variable: These are the Movies Ratings
# G: General Audiences. All ages admitted.
# PG: Parental Guidance Suggested. Some material may not be suitable for children.
# PG-13: Parents Strongly Cautioned. Some material may be inappropriate for children under 13.
# R: Restricted. Under 17 requires accompanying parent or adult guardian.
# NC-17: No Children. No one 17 and under admitted.
VALID_RATINGS = ["G", "PG","PG-13","R","NC-17"]
def __init__(self, movie_title, poster_image, trailer_youku, rating):
self.title = movie_title
self.poster_image_url = poster_image
self.trailer_url = trailer_youku
self.valid_rating = rating
def show_trailer(self):
webbrowser.open(self.trailer_url) | [
"webbrowser.open"
] | [((786, 819), 'webbrowser.open', 'webbrowser.open', (['self.trailer_url'], {}), '(self.trailer_url)\n', (801, 819), False, 'import webbrowser\n')] |
import os
from geofeather import to_geofeather, from_geofeather
from pandas.testing import assert_frame_equal
import pytest
def test_points_geofeather(tmpdir, points_wgs84):
"""Confirm that we can round-trip points to / from feather file"""
filename = tmpdir / "points_wgs84.feather"
to_geofeather(points_wgs84, filename)
assert os.path.exists(filename)
df = from_geofeather(filename)
assert_frame_equal(df, points_wgs84)
assert df.crs == points_wgs84.crs
def test_points_geofeather_proj4(tmpdir, points_albers_conus_proj4):
"""Confirm that we can round-trip points to / from feather file with a proj4 defined CRS"""
filename = tmpdir / "points_albers_conus.feather"
to_geofeather(points_albers_conus_proj4, filename)
df = from_geofeather(filename)
assert_frame_equal(df, points_albers_conus_proj4)
# equality comparision fails for CRS object constructed from proj4, even though they are still the same
if hasattr(df.crs, "to_proj4"):
assert df.crs.to_proj4() == points_albers_conus_proj4.crs.to_proj4()
else:
assert df.crs == points_albers_conus_proj4.crs
def test_points_geofeather_wkt(tmpdir, points_albers_conus_wkt):
"""Confirm that we can round-trip points to / from feather file with a wkt defined CRS"""
filename = tmpdir / "points_albers_conus.feather"
to_geofeather(points_albers_conus_wkt, filename)
df = from_geofeather(filename)
assert_frame_equal(df, points_albers_conus_wkt)
assert df.crs == points_albers_conus_wkt.crs
def test_missing_crs_warning(tmpdir, points_wgs84):
"""Confirm that a warning is raised if the crs file is missing"""
filename = tmpdir / "points_wgs84.feather"
to_geofeather(points_wgs84, filename)
os.remove("{}.crs".format(filename))
with pytest.warns(UserWarning) as warning:
df = from_geofeather(filename)
assert (
"coordinate reference system file is missing" in warning[0].message.args[0]
)
assert df.crs is None
def test_lines_geofeather(tmpdir, lines_wgs84):
"""Confirm that we can round-trip lines to / from feather file"""
filename = tmpdir / "lines_wgs84.feather"
to_geofeather(lines_wgs84, filename)
assert os.path.exists(filename)
df = from_geofeather(filename)
assert_frame_equal(df, lines_wgs84)
assert df.crs == lines_wgs84.crs
def test_polygons_geofeather(tmpdir, polygons_wgs84):
"""Confirm that we can round-trip polygons to / from feather file"""
filename = tmpdir / "polygons_wgs84.feather"
to_geofeather(polygons_wgs84, filename)
assert os.path.exists(filename)
df = from_geofeather(filename)
assert_frame_equal(df, polygons_wgs84)
assert df.crs == polygons_wgs84.crs
| [
"os.path.exists",
"geofeather.to_geofeather",
"geofeather.from_geofeather",
"pandas.testing.assert_frame_equal",
"pytest.warns"
] | [((300, 337), 'geofeather.to_geofeather', 'to_geofeather', (['points_wgs84', 'filename'], {}), '(points_wgs84, filename)\n', (313, 337), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((350, 374), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (364, 374), False, 'import os\n'), ((385, 410), 'geofeather.from_geofeather', 'from_geofeather', (['filename'], {}), '(filename)\n', (400, 410), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((415, 451), 'pandas.testing.assert_frame_equal', 'assert_frame_equal', (['df', 'points_wgs84'], {}), '(df, points_wgs84)\n', (433, 451), False, 'from pandas.testing import assert_frame_equal\n'), ((716, 766), 'geofeather.to_geofeather', 'to_geofeather', (['points_albers_conus_proj4', 'filename'], {}), '(points_albers_conus_proj4, filename)\n', (729, 766), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((777, 802), 'geofeather.from_geofeather', 'from_geofeather', (['filename'], {}), '(filename)\n', (792, 802), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((807, 856), 'pandas.testing.assert_frame_equal', 'assert_frame_equal', (['df', 'points_albers_conus_proj4'], {}), '(df, points_albers_conus_proj4)\n', (825, 856), False, 'from pandas.testing import assert_frame_equal\n'), ((1364, 1412), 'geofeather.to_geofeather', 'to_geofeather', (['points_albers_conus_wkt', 'filename'], {}), '(points_albers_conus_wkt, filename)\n', (1377, 1412), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((1423, 1448), 'geofeather.from_geofeather', 'from_geofeather', (['filename'], {}), '(filename)\n', (1438, 1448), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((1453, 1500), 'pandas.testing.assert_frame_equal', 'assert_frame_equal', (['df', 'points_albers_conus_wkt'], {}), '(df, points_albers_conus_wkt)\n', (1471, 1500), False, 'from pandas.testing import assert_frame_equal\n'), ((1726, 1763), 'geofeather.to_geofeather', 'to_geofeather', (['points_wgs84', 'filename'], {}), '(points_wgs84, filename)\n', (1739, 1763), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((2210, 2246), 'geofeather.to_geofeather', 'to_geofeather', (['lines_wgs84', 'filename'], {}), '(lines_wgs84, filename)\n', (2223, 2246), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((2259, 2283), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (2273, 2283), False, 'import os\n'), ((2294, 2319), 'geofeather.from_geofeather', 'from_geofeather', (['filename'], {}), '(filename)\n', (2309, 2319), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((2324, 2359), 'pandas.testing.assert_frame_equal', 'assert_frame_equal', (['df', 'lines_wgs84'], {}), '(df, lines_wgs84)\n', (2342, 2359), False, 'from pandas.testing import assert_frame_equal\n'), ((2580, 2619), 'geofeather.to_geofeather', 'to_geofeather', (['polygons_wgs84', 'filename'], {}), '(polygons_wgs84, filename)\n', (2593, 2619), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((2632, 2656), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (2646, 2656), False, 'import os\n'), ((2667, 2692), 'geofeather.from_geofeather', 'from_geofeather', (['filename'], {}), '(filename)\n', (2682, 2692), False, 'from geofeather import to_geofeather, from_geofeather\n'), ((2697, 2735), 'pandas.testing.assert_frame_equal', 'assert_frame_equal', (['df', 'polygons_wgs84'], {}), '(df, polygons_wgs84)\n', (2715, 2735), False, 'from pandas.testing import assert_frame_equal\n'), ((1816, 1841), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (1828, 1841), False, 'import pytest\n'), ((1867, 1892), 'geofeather.from_geofeather', 'from_geofeather', (['filename'], {}), '(filename)\n', (1882, 1892), False, 'from geofeather import to_geofeather, from_geofeather\n')] |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
if not os.environ.get('DISABLE_EVENTLET_PATCHING'):
import eventlet
eventlet.monkey_patch()
from neutron.common import config as common_config
from neutron.db import models_v2
from neutron.db import securitygroups_db as sg_db
from neutron.plugins.ml2.common import exceptions as ml2_exc
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants as n_const
from neutron_lib import constants as p_const
from neutron_lib.context import get_admin_context
from neutron_lib.db import api as db_api
from neutron_lib.plugins.ml2 import api
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service import loopingcall
from networking_arista._i18n import _, _LI, _LE
from networking_arista.common import constants
from networking_arista.common import db
from networking_arista.common import db_lib
from networking_arista.common import exceptions as arista_exc
from networking_arista.common import util
from networking_arista.ml2 import arista_sync
from networking_arista.ml2.rpc.arista_eapi import AristaRPCWrapperEapi
from networking_arista.ml2.rpc import get_rpc_wrapper
from networking_arista.ml2 import sec_group_callback
LOG = logging.getLogger(__name__)
cfg.CONF.import_group('ml2_arista', 'networking_arista.common.config')
def pretty_log(tag, obj):
# import json
# log_data = json.dumps(obj, sort_keys=True, indent=4)
# LOG.debug(tag)
# LOG.debug(log_data)
pass
class AristaDriver(api.MechanismDriver):
"""Ml2 Mechanism driver for Arista networking hardware.
Remembers all networks and VMs that are provisioned on Arista Hardware.
Does not send network provisioning request if the network has already been
provisioned before for the given port.
"""
def __init__(self, rpc=None):
self.ndb = db_lib.NeutronNets()
confg = cfg.CONF.ml2_arista
self.segmentation_type = db_lib.VLAN_SEGMENTATION
self.timer = loopingcall.FixedIntervalLoopingCall(
self._synchronization_thread)
self.sync_timeout = confg['sync_interval']
if confg.save_config_interval > 0:
self._config_save_loop = loopingcall.FixedIntervalLoopingCall(
self._save_switch_configs_thread)
self.save_config_interval = confg.save_config_interval
self.managed_physnets = confg['managed_physnets']
self.eapi = None
if rpc:
LOG.info("Using passed in parameter for RPC")
self.rpc = rpc
self.eapi = rpc
else:
http_session = util.make_http_session()
api_type = confg['api_type'].upper()
self.rpc = get_rpc_wrapper(confg)(self.ndb,
http_session=http_session)
if api_type == 'NOCVX':
self.eapi = self.rpc
else:
self.eapi = AristaRPCWrapperEapi(self.ndb)
self.sync_service = arista_sync.SyncService(self.rpc, self.ndb)
self.rpc.sync_service = self.sync_service
self.sg_handler = None
def initialize(self):
if self.rpc.check_cvx_availability():
self.rpc.register_with_eos()
self.rpc.check_supported_features()
context = get_admin_context()
self._cleanup_db(context)
# Registering with EOS updates self.rpc.region_updated_time. Clear it
# to force an initial sync
self.rpc.clear_region_updated_time()
self.sg_handler = sec_group_callback.AristaSecurityGroupHandler(self)
self.timer.start(self.sync_timeout, stop_on_exception=False)
self._config_save_loop.start(self.save_config_interval,
stop_on_exception=False)
def create_network_precommit(self, context):
"""Remember the tenant, and network information."""
network = context.current
segments = context.network_segments
if not self.rpc.hpb_supported():
# Hierarchical port binding is not supported by CVX, only
# allow VLAN network type.
if segments[0][api.NETWORK_TYPE] != p_const.TYPE_VLAN:
return
network_id = network['id']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
plugin_context = context._plugin_context
db_lib.remember_tenant(plugin_context, tenant_id)
for segment in segments:
db_lib.remember_network_segment(plugin_context,
tenant_id,
network_id,
segment.get('segmentation_id'),
segment.get('id'))
def create_network_postcommit(self, context):
"""Provision the network on the Arista Hardware."""
network = context.current
network_id = network['id']
network_name = network['name']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
segments = context.network_segments
shared_net = network['shared']
plugin_context = context._plugin_context
if db_lib.is_network_provisioned(plugin_context, tenant_id,
network_id):
try:
network_dict = {
'network_id': network_id,
'segments': segments,
'network_name': network_name,
'shared': shared_net}
self.rpc.create_network(tenant_id, network_dict)
except arista_exc.AristaRpcError as err:
LOG.error(_LE("create_network_postcommit: Did not create "
"network %(name)s. Reason: %(err)s"),
{'name': network_name, 'err': err})
else:
LOG.info(_LI('Network %s is not created as it is not found in '
'Arista DB'), network_id)
def update_network_precommit(self, context):
"""At the moment we only support network name change
Any other change in network is not supported at this time.
We do not store the network names, therefore, no DB store
action is performed here.
"""
new_network = context.current
orig_network = context.original
if new_network['name'] != orig_network['name']:
LOG.info(_LI('Network name changed to %s'), new_network['name'])
def update_network_postcommit(self, context):
"""At the moment we only support network name change
If network name is changed, a new network create request is
sent to the Arista Hardware.
"""
new_network = context.current
orig_network = context.original
plugin_context = context._plugin_context
if (new_network['name'] != orig_network['name'] or
new_network['shared'] != orig_network['shared']):
network_id = new_network['id']
network_name = new_network['name']
tenant_id = (new_network['tenant_id'] or
constants.INTERNAL_TENANT_ID)
shared_net = new_network['shared']
if db_lib.is_network_provisioned(plugin_context,
tenant_id, network_id):
try:
network_dict = {
'network_id': network_id,
'segments': context.network_segments,
'network_name': network_name,
'shared': shared_net}
self.rpc.create_network(tenant_id, network_dict)
except arista_exc.AristaRpcError as err:
LOG.error(_LE('update_network_postcommit: Did not '
'update network %(name)s. '
'Reason: %(err)s'),
{'name': network_name, 'err': err})
else:
LOG.info(_LI('Network %s is not updated as it is not found'
' in Arista DB'), network_id)
def delete_network_precommit(self, context):
"""Delete the network information from the DB."""
network = context.current
network_id = network['id']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
plugin_context = context._plugin_context
if db_lib.is_network_provisioned(plugin_context, tenant_id,
network_id):
if db_lib.are_ports_attached_to_network(plugin_context,
network_id):
LOG.info(_LI('Network %s can not be deleted as it '
'has ports attached to it'), network_id)
raise ml2_exc.MechanismDriverError(
method='delete_network_precommit')
else:
db_lib.forget_network_segment(plugin_context,
tenant_id, network_id)
def delete_network_postcommit(self, context):
"""Send network delete request to Arista HW."""
network = context.current
segments = context.network_segments
if not self.rpc.hpb_supported():
# Hierarchical port binding is not supported by CVX, only
# send the request if network type is VLAN.
if segments[0][api.NETWORK_TYPE] != p_const.TYPE_VLAN:
# If network type is not VLAN, do nothing
return
network_id = network['id']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
# Succeed deleting network in case EOS is not accessible.
# EOS state will be updated by sync thread once EOS gets
# alive.
try:
self.rpc.delete_network(tenant_id, network_id, segments)
# if necessary, delete tenant as well.
self.delete_tenant(context, tenant_id)
except arista_exc.AristaRpcError as err:
LOG.error(_LE('delete_network_postcommit: Did not delete '
'network %(network_id)s. Reason: %(err)s'),
{'network_id': network_id, 'err': err})
def create_port_precommit(self, context):
"""Remember the information about a VM and its ports
A VM information, along with the physical host information
is saved.
"""
# Returning from here, since the update_port_precommit is performing
# same operation, and also need of port binding information to decide
# whether to react to a port create event which is not available when
# this method is called.
return
def _get_physnet_from_link_info(self, port, physnet_info):
binding_profile = port.get(portbindings.PROFILE)
if not binding_profile:
return
link_info = binding_profile.get('local_link_information')
if not link_info:
return
mac_to_hostname = physnet_info.get('mac_to_hostname', {})
for link in link_info:
if link.get('switch_id') in mac_to_hostname:
physnet = mac_to_hostname.get(link.get('switch_id'))
return self.rpc.mlag_pairs.get(physnet, physnet)
def _bind_port_to_baremetal(self, context, segment):
port = context.current
vnic_type = port.get('binding:vnic_type')
if vnic_type != portbindings.VNIC_BAREMETAL:
# We are only interested in binding baremetal ports.
return
binding_profile = port.get(portbindings.PROFILE)
if not binding_profile:
return
link_info = binding_profile.get('local_link_information')
if not link_info:
return
switch_list = []
for link in link_info:
switch_list.append(link.get('switch_id'))
if not switch_list:
return
vif_details = {
portbindings.VIF_DETAILS_VLAN: str(
segment[api.SEGMENTATION_ID])
}
context.set_binding(segment[api.ID],
portbindings.VIF_TYPE_OTHER,
vif_details,
p_const.ACTIVE)
LOG.debug("AristaDriver: bound port info- port ID %(id)s "
"on network %(network)s",
{'id': port['id'],
'network': context.network.current['id']})
def bind_port(self, context):
"""Bind port to a network segment.
Provisioning request to Arista Hardware to plug a host
into appropriate network is done when the port is created
this simply tells the ML2 Plugin that we are binding the port
"""
host_id = context.host
port = context.current
physnet_info = {}
for segment in context.segments_to_bind:
physnet = segment.get(api.PHYSICAL_NETWORK)
if not self._is_in_managed_physnets(physnet):
LOG.debug("bind_port for port %(port)s: physical_network "
"%(physnet)s is not managed by Arista "
"mechanism driver", {'port': port.get('id'),
'physnet': physnet})
continue
# If physnet is not set, we need to look it up using hostname
# and topology info
if not physnet:
if not physnet_info:
# We only need to get physnet_info once
physnet_info = self.eapi.get_physical_network(host_id)
if (port.get('binding:vnic_type') ==
portbindings.VNIC_BAREMETAL):
# Find physnet using link_information in baremetal case
physnet = self._get_physnet_from_link_info(port,
physnet_info)
else:
physnet = physnet_info.get('physnet')
# If physnet was not found, we cannot bind this port
if not physnet:
LOG.debug("bind_port for port %(port)s: no physical_network "
"found", {'port': port.get('id')})
continue
if segment[api.NETWORK_TYPE] == p_const.TYPE_VXLAN:
# Check if CVX supports HPB
if not self.rpc.hpb_supported():
LOG.debug("bind_port: HPB is not supported")
return
# The physical network is connected to arista switches,
# allocate dynamic segmentation id to bind the port to
# the network that the port belongs to.
try:
next_segment = context.allocate_dynamic_segment(
{'id': context.network.current['id'],
'network_type': p_const.TYPE_VLAN,
'physical_network': physnet})
except Exception as exc:
LOG.error(_LE("bind_port for port %(port)s: Failed to "
"allocate dynamic segment for physnet "
"%(physnet)s. %(exc)s"),
{'port': port.get('id'), 'physnet': physnet,
'exc': exc})
return
LOG.debug("bind_port for port %(port)s: "
"current_segment=%(current_seg)s, "
"next_segment=%(next_seg)s",
{'port': port.get('id'), 'current_seg': segment,
'next_seg': next_segment})
context.continue_binding(segment['id'], [next_segment])
elif port.get('binding:vnic_type') == portbindings.VNIC_BAREMETAL:
# The network_type is vlan, try binding process for baremetal.
self._bind_port_to_baremetal(context, segment)
else:
continue
def create_port_postcommit(self, context):
"""Plug a physical host into a network.
Send provisioning request to Arista Hardware to plug a host
into appropriate network.
"""
# Returning from here, since the update_port_postcommit is performing
# same operation, and also need of port binding information to decide
# whether to react to a port create event which is not available when
# this method is called.
return
def _network_owner_tenant(self, context, network_id, tenant_id):
tid = tenant_id
if network_id and tenant_id:
plugin_context = context._plugin_context
network_owner = self.ndb.get_network_from_net_id(
plugin_context,
network_id
)
if network_owner and network_owner[0]['tenant_id'] != tenant_id:
tid = network_owner[0]['tenant_id'] or tenant_id
return tid
def _is_in_managed_physnets(self, physnet):
if not self.managed_physnets:
# If managed physnet is empty, accept all.
return True
# managed physnet is not empty, find for matching physnet
return any(pn == physnet for pn in self.managed_physnets)
def _bound_segments(self, context):
"""Check if a given port is managed by the mechanism driver.
It returns bound segment dictionary, if physical network in the bound
segment is included in the managed physical network list.
"""
if not self.managed_physnets:
return [binding_level.get(api.BOUND_SEGMENT) for
binding_level in context.binding_levels or []]
bound_segments = []
for binding_level in (context.binding_levels or []):
bound_segment = binding_level.get(api.BOUND_SEGMENT)
if (bound_segment and
self._is_in_managed_physnets(
bound_segment.get(api.PHYSICAL_NETWORK))):
bound_segments.append(bound_segment)
return bound_segments
def _handle_port_migration_precommit(self, context):
"""Handles port migration in precommit
It updates the port's new host in the DB
"""
orig_port = context.original
orig_host = context.original_host
orig_status = context.original_status
new_status = context.status
new_host = context.host
port_id = orig_port['id']
if (new_host != orig_host and
orig_status == n_const.PORT_STATUS_ACTIVE and
new_status == n_const.PORT_STATUS_DOWN):
LOG.debug("Handling port migration for: %s " % orig_port)
network_id = orig_port['network_id']
tenant_id = orig_port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id,
tenant_id)
device_id = orig_port['device_id']
plugin_context = context._plugin_context
port_provisioned = db_lib.is_port_provisioned(plugin_context,
port_id,
orig_host)
if port_provisioned:
db_lib.update_port(plugin_context,
device_id, new_host, port_id,
network_id, tenant_id)
return True
def _handle_port_migration_postcommit(self, context):
"""Handles port migration in postcommit
In case of port migration, it removes the port from the original host
and also it release the segment id if no port is attached to the same
segment id that the port is attached to.
"""
orig_port = context.original
orig_host = context.original_host
orig_status = context.original_status
new_status = context.status
new_host = context.host
if (new_host != orig_host and
orig_status == n_const.PORT_STATUS_ACTIVE and
new_status == n_const.PORT_STATUS_DOWN):
self._try_to_release_dynamic_segment(context, migration=True)
# Handling migration case.
# 1. The port should be unplugged from network
# 2. If segment_id is provisioned and it not bound to any port it
# should be removed from EOS.
network_id = orig_port['network_id']
tenant_id = orig_port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id,
tenant_id)
for binding_level in context._original_binding_levels:
if self._network_provisioned(
context, tenant_id, network_id,
segment_id=binding_level.segment_id):
# Removing the port form original host
self._delete_port(context, orig_port, orig_host, tenant_id,
segments=[binding_level])
# If segment id is not bound to any port, then
# remove it from EOS
plugin_context = context._plugin_context
segment = self.ndb.get_segment_by_id(
plugin_context,
binding_level.segment_id)
if not segment:
try:
segment_info = [{
'id': binding_level.segment_id,
'network_id': network_id,
}]
LOG.debug("migration_postcommit:"
"deleting segment %s", segment_info)
self.rpc.delete_network_segments(tenant_id,
segment_info)
# Remove the segment from the provisioned
# network DB.
db_lib.forget_network_segment(
plugin_context, tenant_id, network_id,
binding_level.segment_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
return True
def update_port_precommit(self, context):
"""Update the name of a given port.
At the moment we only support port name change.
Any other change to port is not supported at this time.
We do not store the port names, therefore, no DB store
action is performed here.
"""
new_port = context.current
orig_port = context.original
if new_port['name'] != orig_port['name']:
LOG.info(_LI('Port name changed to %s'), new_port['name'])
device_id = new_port['device_id']
host = context.host
pretty_log("update_port_precommit: new", new_port)
pretty_log("update_port_precommit: orig", orig_port)
if new_port['device_owner'] == 'compute:probe':
return
# Check if the port is part of managed physical network
seg_info = self._bound_segments(context)
if not seg_info:
# Ignoring the update as the port is not managed by
# arista mechanism driver.
return
# Check if it is port migration case
if self._handle_port_migration_precommit(context):
return
# device_id and device_owner are set on VM boot
port_id = new_port['id']
network_id = new_port['network_id']
tenant_id = new_port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id, tenant_id)
plugin_context = context._plugin_context
for seg in seg_info:
if not self._network_provisioned(context, tenant_id, network_id,
seg[api.SEGMENTATION_ID],
seg[api.ID]):
LOG.info(
_LI("Adding %s to provisioned network database"), seg)
db_lib.remember_tenant(plugin_context, tenant_id)
db_lib.remember_network_segment(
plugin_context, tenant_id, network_id,
seg[api.SEGMENTATION_ID], seg[api.ID])
port_down = False
if (new_port['device_owner'] ==
n_const.DEVICE_OWNER_DVR_INTERFACE):
# We care about port status only for DVR ports because
# for DVR, a single port exists on multiple hosts. If a port
# is no longer needed on a host then the driver gets a
# port_update notification for that <port, host> with the
# port status as PORT_STATUS_DOWN.
port_down = context.status == n_const.PORT_STATUS_DOWN
if host and not port_down:
port_host_filter = None
if (new_port['device_owner'] ==
n_const.DEVICE_OWNER_DVR_INTERFACE):
# <port, host> uniquely identifies a DVR port. Other
# ports are identified by just the port id
port_host_filter = host
port_provisioned = db_lib.is_port_provisioned(
plugin_context,
port_id, port_host_filter)
if not port_provisioned:
LOG.info("Remembering the port")
# Create a new port in the DB
db_lib.remember_tenant(plugin_context, tenant_id)
db_lib.remember_vm(plugin_context,
device_id, host, port_id,
network_id, tenant_id)
else:
if (new_port['device_id'] != orig_port['device_id'] or
context.host != context.original_host or
new_port['network_id'] != orig_port['network_id'] or
new_port['tenant_id'] != orig_port['tenant_id']):
LOG.info("Updating the port")
# Port exists in the DB. Update it
db_lib.update_port(plugin_context, device_id, host,
port_id,
network_id, tenant_id)
else: # Unbound or down port does not concern us
orig_host = context.original_host
LOG.info("Forgetting the port on %s" % str(orig_host))
db_lib.forget_port(plugin_context, port_id, orig_host)
def _port_updated(self, context):
"""Returns true if any port parameters have changed."""
new_port = context.current
orig_port = context.original
return (new_port['device_id'] != orig_port['device_id'] or
context.host != context.original_host or
new_port['network_id'] != orig_port['network_id'] or
new_port['tenant_id'] != orig_port['tenant_id'])
def update_port_postcommit(self, context):
"""Update the name of a given port in EOS.
At the moment we only support port name change
Any other change to port is not supported at this time.
"""
port = context.current
orig_port = context.original
device_id = port['device_id']
device_owner = port['device_owner']
host = context.host
is_vm_boot = device_id and device_owner
vnic_type = port['binding:vnic_type']
binding_profile = port['binding:profile']
bindings = []
vlan_type = 'native' if vnic_type == 'baremetal' else 'allowed'
if binding_profile:
bindings = binding_profile.get('local_link_information', bindings)
vlan_type = binding_profile.get('vlan_type', vlan_type)
port_id = port['id']
port_name = port['name']
network_id = port['network_id']
tenant_id = port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id, tenant_id)
sg = port['security_groups']
orig_sg = orig_port['security_groups']
pretty_log("update_port_postcommit: new", port)
pretty_log("update_port_postcommit: orig", orig_port)
seg_info = self._bound_segments(context)
if not seg_info:
LOG.debug("Ignoring the update as the port %s is not managed by "
"Arista switches.", port_id)
return
# Check if it is port migration case
if self._handle_port_migration_postcommit(context):
# Return from here as port migration is already handled.
return
hostname = self._host_name(host)
port_host_filter = None
if port['device_owner'] == n_const.DEVICE_OWNER_DVR_INTERFACE:
# <port, host> uniquely identifies a DVR port. Other
# ports are identified by just the port id
port_host_filter = host
plugin_context = context._plugin_context
port_provisioned = db_lib.is_port_provisioned(plugin_context, port_id,
port_host_filter)
# If network does not exist under this tenant,
# it may be a shared network. Get shared network owner Id
net_provisioned = self._network_provisioned(context,
tenant_id, network_id)
for seg in seg_info:
if not self._network_provisioned(context, tenant_id, network_id,
segmentation_id=seg[
api.SEGMENTATION_ID]):
net_provisioned = False
break
segments = []
if net_provisioned:
if self.rpc.hpb_supported():
segments = seg_info
all_segments = self.ndb.get_all_network_segments(
plugin_context, network_id)
try:
self.rpc.create_network_segments(
tenant_id, network_id,
context.network.current['name'], all_segments)
except arista_exc.AristaRpcError:
LOG.error(_LE("Failed to create network segments"))
raise ml2_exc.MechanismDriverError()
else:
# For non HPB cases, the port is bound to the static
# segment
segments = self.ndb.get_network_segments(plugin_context,
network_id)
try:
orig_host = context.original_host
port_down = False
if port['device_owner'] == n_const.DEVICE_OWNER_DVR_INTERFACE:
# We care about port status only for DVR ports
port_down = context.status == n_const.PORT_STATUS_DOWN
if orig_host and (port_down or host != orig_host):
try:
LOG.info("Deleting the port %s" % str(orig_port))
# The port moved to a different host or the VM
# connected to the port was deleted or its in DOWN
# state. So delete the old port on the old host.
self._delete_port(context, orig_port, orig_host, tenant_id,
segments=segments)
except ml2_exc.MechanismDriverError:
# If deleting a port fails, then not much can be done
# about it. Log a warning and move on.
LOG.warning(constants.UNABLE_TO_DELETE_PORT_MSG)
if port_provisioned and net_provisioned and hostname and \
is_vm_boot and not port_down:
LOG.info(_LI("Port plugged into network"))
# Plug port into the network only if it exists in the db
# and is bound to a host and the port is up.
self.rpc.plug_port_into_network(device_id,
hostname,
port_id,
network_id,
tenant_id,
port_name,
device_owner,
sg, orig_sg,
vnic_type,
segments=segments,
switch_bindings=bindings,
vlan_type=vlan_type)
else:
LOG.info(_LI("Port not plugged into network"))
except arista_exc.AristaRpcError as err:
LOG.error(_LE('update_port_postcommit: Did not update '
'port %(port_id)s. Reason: %(err)s'),
{'port_id': port_id, 'err': err})
def delete_port_precommit(self, context):
"""Delete information about a VM and host from the DB."""
# Check if the port is part of managed physical network
seg_info = self._bound_segments(context)
if not seg_info:
# Ignoring the update as the port is not managed by
# arista mechanism driver.
return
port = context.current
pretty_log("delete_port_precommit:", port)
port_id = port['id']
host_id = context.host
if host_id:
plugin_context = context._plugin_context
db_lib.forget_port(plugin_context, port_id, host_id)
def delete_port_postcommit(self, context):
"""Unplug a physical host from a network.
Send provisioning request to Arista Hardware to unplug a host
from appropriate network.
"""
# Check if the port is part of managed physical network
seg_info = self._bound_segments(context)
if not seg_info:
# Ignoring the update as the port is not managed by
# arista mechanism driver.
return
port = context.current
host = context.host
network_id = port['network_id']
tenant_id = port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id, tenant_id)
pretty_log("delete_port_postcommit:", port)
# If this port is the last one using dynamic segmentation id,
# and the segmentation id was allocated by this driver, it needs
# to be released.
self._try_to_release_dynamic_segment(context)
try:
self._delete_port(context, port, host, tenant_id,
segments=seg_info)
self._delete_segment(context, tenant_id)
except ml2_exc.MechanismDriverError:
# Can't do much if deleting a port failed.
# Log a warning and continue.
LOG.warning(constants.UNABLE_TO_DELETE_PORT_MSG)
def _delete_port(self, context, port, host, tenant_id, segments=None):
"""Deletes the port from EOS.
param port: Port which is to be deleted
param host: The host on which the port existed
param tenant_id: The tenant to which the port belongs to. Some times
the tenant id in the port dict is not present (as in
the case of HA router).
"""
device_id = port['device_id']
port_id = port['id']
network_id = port['network_id']
device_owner = port['device_owner']
vnic_type = port['binding:vnic_type']
binding_profile = port['binding:profile']
switch_bindings = []
if binding_profile:
switch_bindings = binding_profile.get('local_link_information', [])
sg = port['security_groups']
if not device_id or not host:
LOG.warning(constants.UNABLE_TO_DELETE_DEVICE_MSG)
return
# sometimes segments are snapshot objects, let's resolve that here
if segments:
plugin_context = context._plugin_context
for n, segment in enumerate(segments):
if not isinstance(segment, dict) and \
not hasattr(segment, 'segmentation_id') and \
hasattr(segment, 'segment_id'):
segments[n] = db_lib.get_segmentation_id_by_segment_id(
plugin_context,
segment.segment_id
)
try:
device_ports = db_lib.get_bm_ports_for_device(
context._plugin_context, device_id)
port_net_in_use = False
for device_port in device_ports:
if device_port.id != port_id and \
device_port.network_id == network_id:
LOG.warning("Will not deprovision network %s on port %s "
"as port %s is still on this network",
network_id, port_id, device_port.id)
port_net_in_use = True
if not cfg.CONF.ml2_arista.skip_unplug and not port_net_in_use:
hostname = self._host_name(host)
self.rpc.unplug_port_from_network(
device_id, device_owner, hostname, port_id, network_id,
tenant_id, sg, vnic_type, switch_bindings=switch_bindings,
segments=segments)
if not cfg.CONF.ml2_arista.sec_group_background_only:
self.rpc.remove_security_group(sg, switch_bindings)
# if necessary, delete tenant as well.
self.delete_tenant(context, tenant_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
def _delete_segment(self, context, tenant_id):
"""Deletes a dynamic network segment from EOS.
param context: The port context
param tenant_id: The tenant which the port belongs to
"""
if not self.rpc.hpb_supported():
# Returning as HPB not supported by CVX
return
port = context.current
network_id = port.get('network_id')
if not context._binding_levels:
return
plugin_context = context._plugin_context
for binding_level in context._binding_levels:
LOG.debug("deleting segment %s", binding_level.segment_id)
if self._network_provisioned(context, tenant_id, network_id,
segment_id=binding_level.segment_id):
segment = self.ndb.get_segment_by_id(
plugin_context, binding_level.segment_id)
if not segment:
# The segment is already released. Delete it from EOS
LOG.debug("Deleting segment %s", binding_level.segment_id)
try:
segment_info = {
'id': binding_level.segment_id,
'network_id': network_id,
}
self.rpc.delete_network_segments(tenant_id,
[segment_info])
# Remove the segment from the provisioned network DB.
db_lib.forget_network_segment(plugin_context,
tenant_id, network_id,
binding_level.segment_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
else:
LOG.debug("Cannot delete segment_id %(segid)s "
"segment is %(seg)s",
{'segid': binding_level.segment_id,
'seg': segment})
def _try_to_release_dynamic_segment(self, context, migration=False):
"""Release dynamic segment allocated by the driver
If this port is the last port using the segmentation id allocated
by the driver, it should be released
"""
if migration:
host = context.original_host
else:
host = context.host
physnet_info = self.eapi.get_physical_network(host, context=context)
physnet = physnet_info.get('physnet')
if not physnet:
return
binding_levels = context.binding_levels
LOG.debug("_try_release_dynamic_segment: "
"binding_levels=%(bl)s", {'bl': binding_levels})
if not binding_levels:
return
segment_id = None
bound_drivers = []
for binding_level in binding_levels:
bound_segment = binding_level.get(api.BOUND_SEGMENT)
driver = binding_level.get(api.BOUND_DRIVER)
bound_drivers.append(driver)
if (bound_segment and
bound_segment.get('physical_network') == physnet and
bound_segment.get('network_type') == p_const.TYPE_VLAN):
segment_id = bound_segment.get('id')
break
plugin_context = context._plugin_context
# If the segment id is found and it is bound by this driver, and also
# the segment id is not bound to any other port, release the segment.
# When Arista driver participate in port binding by allocating dynamic
# segment and then calling continue_binding, the driver should the
# second last driver in the bound drivers list.
if (segment_id and bound_drivers[-2:-1] ==
[constants.MECHANISM_DRV_NAME]):
filters = {'segment_id': segment_id}
result = db_lib.get_port_binding_level(plugin_context, filters)
LOG.debug("Looking for entry with filters=%(filters)s "
"result=%(result)s ", {'filters': filters,
'result': result})
if not result:
# The requested segment_id does not exist in the port binding
# database. Release the dynamic segment.
context.release_dynamic_segment(segment_id)
LOG.debug("Released dynamic segment %(seg)s allocated "
"by %(drv)s", {'seg': segment_id,
'drv': bound_drivers[-2]})
def delete_tenant(self, context, tenant_id):
"""delete a tenant from DB.
A tenant is deleted only if there is no network or VM configured
configured for this tenant.
"""
plugin_context = context._plugin_context
objects_for_tenant = (
db_lib.num_nets_provisioned(plugin_context, tenant_id) +
db_lib.num_vms_provisioned(plugin_context, tenant_id)
)
if not objects_for_tenant:
db_lib.forget_tenant(plugin_context, tenant_id)
try:
self.rpc.delete_tenant(tenant_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
raise ml2_exc.MechanismDriverError(method='delete_tenant')
def _host_name(self, hostname):
fqdns_used = cfg.CONF.ml2_arista['use_fqdn']
return hostname if fqdns_used else hostname.split('.')[0]
def _save_switch_configs_thread(self):
if not self.sync_service.is_member_id_valid():
LOG.info("Switch config save thread was started unnecessarily "
"in this process, stopping it")
self._config_save_loop.stop()
return
self.sync_service.save_switch_configs()
def _synchronization_thread(self):
if not self.sync_service.is_member_id_valid():
LOG.info("Synchronization thread was started unnecessarily "
"in this process, stopping it")
self.timer.stop()
return
self.sync_service.do_synchronize()
def stop_synchronization_thread(self):
if self.timer:
self.timer.stop()
self.timer = None
@db_api.retry_db_errors
def _cleanup_db(self, context):
"""Clean up any unnecessary entries in our DB."""
session = context.session
with session.begin(subtransactions=True):
arista_vms = db.AristaProvisionedVms
arista_nets = db.AristaProvisionedNets
# DELETE FROM arista_provisioned_vms
# WHERE arista_provisioned_vms.port_id NOT IN (
# SELECT ports.id FROM ports)
all_ports = session.query(models_v2.Port.id)
session.query(arista_vms). \
filter(arista_vms.port_id.notin_(all_ports.subquery())). \
delete(synchronize_session=False)
# DELETE FROM arista_provisioned_nets
# WHERE arista_provisioned_nets.network_id NOT IN (
# SELECT networks.id FROM networks)
all_networks = session.query(models_v2.Network.id)
session.query(arista_nets). \
filter(arista_nets.network_id.notin_(all_networks.subquery())). \
delete(synchronize_session=False)
def _network_provisioned(self, context, tenant_id, network_id,
segmentation_id=None, segment_id=None):
# If network does not exist under this tenant,
# it may be a shared network.
plugin_context = context._plugin_context
return db_lib.is_network_provisioned(
plugin_context, tenant_id, network_id, segmentation_id,
segment_id) or \
self.ndb.get_shared_network_owner_id(plugin_context, network_id)
def create_security_group(self, context, sg):
pass
def delete_security_group(self, context, sg):
pass
def update_security_group(self, context, sg):
if (cfg.CONF.ml2_arista.sec_group_background_only or
not self._is_security_group_used(context, sg['id'])):
return
try:
self.rpc.create_acl(context, sg)
except Exception:
msg = (_('Failed to create ACL on EOS %s') % sg)
LOG.exception(msg)
raise arista_exc.AristaSecurityGroupError(msg=msg)
def create_security_group_rule(self, context, sgr):
if (cfg.CONF.ml2_arista.sec_group_background_only or
not self._is_security_group_used(context,
sgr['security_group_id'])):
return
try:
self.rpc.create_acl_rule(context, sgr)
except Exception:
msg = (_('Failed to create ACL rule on EOS %s') % sgr)
LOG.exception(msg)
raise arista_exc.AristaSecurityGroupError(msg=msg)
def delete_security_group_rule(self, context, sgr_id):
if cfg.CONF.ml2_arista.sec_group_background_only:
return
if not sgr_id:
return
sgr = self.ndb.get_security_group_rule(context, sgr_id)
if not sgr:
return
if not self._is_security_group_used(context, sgr['security_group_id']):
return
try:
self.rpc.delete_acl_rule(sgr)
except Exception:
msg = (_('Failed to delete ACL rule on EOS %s') % sgr)
LOG.exception(msg)
raise arista_exc.AristaSecurityGroupError(msg=msg)
@staticmethod
def _is_security_group_used(context, security_group_id):
sg_id = sg_db.SecurityGroupPortBinding.security_group_id
port_id = sg_db.SecurityGroupPortBinding.port_id
result = context.session.query(sg_id).filter(
sg_id == security_group_id).join(
db.AristaProvisionedVms, db.AristaProvisionedVms.port_id == port_id
).first()
return result is not None
def cli():
import json
import six
import sys
from collections import defaultdict
from neutron.db.models_v2 import Port
from neutron.plugins.ml2.models import NetworkSegment
from neutron.plugins.ml2.models import PortBindingLevel
from oslo_config import cfg
from sqlalchemy.orm import contains_eager, joinedload, relationship
cfg.CONF.register_cli_opts([
cfg.MultiStrOpt('port_id',
short='p',
default=[],
help=''),
cfg.BoolOpt('all_ports',
default=False,
help='Should we sync all ports'),
])
common_config.init(sys.argv[1:])
if not cfg.CONF.all_ports and not cfg.CONF.port_id:
LOG.error("Nothing to do, specify either port_id or all_ports")
return
context = get_admin_context()
ndb = db_lib.NeutronNets()
confg = cfg.CONF.ml2_arista
confg.http_pool_block = True
rpc = get_rpc_wrapper(confg)(ndb)
Port.port_binding_levels = relationship(PortBindingLevel)
PortBindingLevel.segment = relationship(NetworkSegment,
lazy='subquery')
items = defaultdict(list)
with context.session.begin():
session = context.session
ports = session.query(Port). \
join(Port.port_binding). \
join(Port.port_binding_levels). \
options(joinedload(Port.security_groups)). \
filter(PortBindingLevel.driver == constants.MECHANISM_DRV_NAME). \
options(contains_eager(Port.port_binding_levels))
if cfg.CONF.port_id:
ports = ports.filter(Port.id.in_(cfg.CONF.port_id))
for port in ports:
port_id = port.id
device_id = port.device_id
network_id = port.network_id
port_name = port.name
device_owner = port.device_owner
binding = port.port_binding
hostname = binding.host
vnic_type = binding.vnic_type
orig_sg = None
tenant_id = port.tenant_id
sg = [sg.security_group_id for sg in port.security_groups]
binding_profile = json.loads(binding.profile)
bindings = binding_profile.get('local_link_information', [])
vlan_type = binding_profile.get('vlan_type', 'native')
segments = [{'id': level.segment_id, 'level': level.level,
'physical_network': level.segment.physical_network,
'segmentation_id': level.segment.segmentation_id,
'network_type': level.segment.network_type,
'is_dynamic': level.segment.is_dynamic,
}
for level in port.port_binding_levels
if level.driver == 'arista'
]
items[device_id].append((hostname, port_id, network_id, tenant_id,
port_name, device_owner, sg, orig_sg, vnic_type,
segments, bindings, vlan_type))
from eventlet.greenpool import GreenPool as Pool
def plug(device_ports):
device_id, ports = device_ports
# Plug the ports, first the native, then allowed
for hostname, port_id, network_id, tenant_id, \
port_name, device_owner, sg, orig_sg, vnic_type, \
segments, bindings, vlan_type in \
sorted(ports, key=lambda x: x[-1] == 'allowed'):
print('Node: {}: Port {} {}'
.format(device_id, port_id, vlan_type))
rpc.plug_port_into_network(
device_id, hostname, port_id, network_id, tenant_id, port_name,
device_owner, sg, orig_sg, vnic_type,
segments=segments,
switch_bindings=bindings,
vlan_type=vlan_type)
p = Pool(8)
for item in p.imap(plug, six.iteritems(items)):
pass
| [
"networking_arista.common.db_lib.forget_network_segment",
"networking_arista.ml2.rpc.get_rpc_wrapper",
"networking_arista.common.db_lib.forget_port",
"networking_arista._i18n._",
"eventlet.greenpool.GreenPool",
"eventlet.monkey_patch",
"networking_arista.common.db_lib.remember_vm",
"networking_arista.... | [((1789, 1816), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1806, 1816), True, 'from oslo_log import log as logging\n'), ((1817, 1887), 'oslo_config.cfg.CONF.import_group', 'cfg.CONF.import_group', (['"""ml2_arista"""', '"""networking_arista.common.config"""'], {}), "('ml2_arista', 'networking_arista.common.config')\n", (1838, 1887), False, 'from oslo_config import cfg\n'), ((607, 650), 'os.environ.get', 'os.environ.get', (['"""DISABLE_EVENTLET_PATCHING"""'], {}), "('DISABLE_EVENTLET_PATCHING')\n", (621, 650), False, 'import os\n'), ((677, 700), 'eventlet.monkey_patch', 'eventlet.monkey_patch', ([], {}), '()\n', (698, 700), False, 'import eventlet\n'), ((49933, 49965), 'neutron.common.config.init', 'common_config.init', (['sys.argv[1:]'], {}), '(sys.argv[1:])\n', (49951, 49965), True, 'from neutron.common import config as common_config\n'), ((50125, 50144), 'neutron_lib.context.get_admin_context', 'get_admin_context', ([], {}), '()\n', (50142, 50144), False, 'from neutron_lib.context import get_admin_context\n'), ((50155, 50175), 'networking_arista.common.db_lib.NeutronNets', 'db_lib.NeutronNets', ([], {}), '()\n', (50173, 50175), False, 'from networking_arista.common import db_lib\n'), ((50312, 50342), 'sqlalchemy.orm.relationship', 'relationship', (['PortBindingLevel'], {}), '(PortBindingLevel)\n', (50324, 50342), False, 'from sqlalchemy.orm import contains_eager, joinedload, relationship\n'), ((50374, 50419), 'sqlalchemy.orm.relationship', 'relationship', (['NetworkSegment'], {'lazy': '"""subquery"""'}), "(NetworkSegment, lazy='subquery')\n", (50386, 50419), False, 'from sqlalchemy.orm import contains_eager, joinedload, relationship\n'), ((50477, 50494), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (50488, 50494), False, 'from collections import defaultdict\n'), ((53203, 53210), 'eventlet.greenpool.GreenPool', 'Pool', (['(8)'], {}), '(8)\n', (53207, 53210), True, 'from eventlet.greenpool import GreenPool as Pool\n'), ((2413, 2433), 'networking_arista.common.db_lib.NeutronNets', 'db_lib.NeutronNets', ([], {}), '()\n', (2431, 2433), False, 'from networking_arista.common import db_lib\n'), ((2549, 2615), 'oslo_service.loopingcall.FixedIntervalLoopingCall', 'loopingcall.FixedIntervalLoopingCall', (['self._synchronization_thread'], {}), '(self._synchronization_thread)\n', (2585, 2615), False, 'from oslo_service import loopingcall\n'), ((3551, 3594), 'networking_arista.ml2.arista_sync.SyncService', 'arista_sync.SyncService', (['self.rpc', 'self.ndb'], {}), '(self.rpc, self.ndb)\n', (3574, 3594), False, 'from networking_arista.ml2 import arista_sync\n'), ((3857, 3876), 'neutron_lib.context.get_admin_context', 'get_admin_context', ([], {}), '()\n', (3874, 3876), False, 'from neutron_lib.context import get_admin_context\n'), ((4095, 4146), 'networking_arista.ml2.sec_group_callback.AristaSecurityGroupHandler', 'sec_group_callback.AristaSecurityGroupHandler', (['self'], {}), '(self)\n', (4140, 4146), False, 'from networking_arista.ml2 import sec_group_callback\n'), ((4938, 4987), 'networking_arista.common.db_lib.remember_tenant', 'db_lib.remember_tenant', (['plugin_context', 'tenant_id'], {}), '(plugin_context, tenant_id)\n', (4960, 4987), False, 'from networking_arista.common import db_lib\n'), ((5768, 5836), 'networking_arista.common.db_lib.is_network_provisioned', 'db_lib.is_network_provisioned', (['plugin_context', 'tenant_id', 'network_id'], {}), '(plugin_context, tenant_id, network_id)\n', (5797, 5836), False, 'from networking_arista.common import db_lib\n'), ((9045, 9113), 'networking_arista.common.db_lib.is_network_provisioned', 'db_lib.is_network_provisioned', (['plugin_context', 'tenant_id', 'network_id'], {}), '(plugin_context, tenant_id, network_id)\n', (9074, 9113), False, 'from networking_arista.common import db_lib\n'), ((30208, 30277), 'networking_arista.common.db_lib.is_port_provisioned', 'db_lib.is_port_provisioned', (['plugin_context', 'port_id', 'port_host_filter'], {}), '(plugin_context, port_id, port_host_filter)\n', (30234, 30277), False, 'from networking_arista.common import db_lib\n'), ((50252, 50274), 'networking_arista.ml2.rpc.get_rpc_wrapper', 'get_rpc_wrapper', (['confg'], {}), '(confg)\n', (50267, 50274), False, 'from networking_arista.ml2.rpc import get_rpc_wrapper\n'), ((53240, 53260), 'six.iteritems', 'six.iteritems', (['items'], {}), '(items)\n', (53253, 53260), False, 'import six\n'), ((2760, 2830), 'oslo_service.loopingcall.FixedIntervalLoopingCall', 'loopingcall.FixedIntervalLoopingCall', (['self._save_switch_configs_thread'], {}), '(self._save_switch_configs_thread)\n', (2796, 2830), False, 'from oslo_service import loopingcall\n'), ((3170, 3194), 'networking_arista.common.util.make_http_session', 'util.make_http_session', ([], {}), '()\n', (3192, 3194), False, 'from networking_arista.common import util\n'), ((7817, 7885), 'networking_arista.common.db_lib.is_network_provisioned', 'db_lib.is_network_provisioned', (['plugin_context', 'tenant_id', 'network_id'], {}), '(plugin_context, tenant_id, network_id)\n', (7846, 7885), False, 'from networking_arista.common import db_lib\n'), ((9171, 9235), 'networking_arista.common.db_lib.are_ports_attached_to_network', 'db_lib.are_ports_attached_to_network', (['plugin_context', 'network_id'], {}), '(plugin_context, network_id)\n', (9207, 9235), False, 'from networking_arista.common import db_lib\n'), ((19862, 19924), 'networking_arista.common.db_lib.is_port_provisioned', 'db_lib.is_port_provisioned', (['plugin_context', 'port_id', 'orig_host'], {}), '(plugin_context, port_id, orig_host)\n', (19888, 19924), False, 'from networking_arista.common import db_lib\n'), ((26339, 26408), 'networking_arista.common.db_lib.is_port_provisioned', 'db_lib.is_port_provisioned', (['plugin_context', 'port_id', 'port_host_filter'], {}), '(plugin_context, port_id, port_host_filter)\n', (26365, 26408), False, 'from networking_arista.common import db_lib\n'), ((27586, 27640), 'networking_arista.common.db_lib.forget_port', 'db_lib.forget_port', (['plugin_context', 'port_id', 'orig_host'], {}), '(plugin_context, port_id, orig_host)\n', (27604, 27640), False, 'from networking_arista.common import db_lib\n'), ((34790, 34842), 'networking_arista.common.db_lib.forget_port', 'db_lib.forget_port', (['plugin_context', 'port_id', 'host_id'], {}), '(plugin_context, port_id, host_id)\n', (34808, 34842), False, 'from networking_arista.common import db_lib\n'), ((37899, 37965), 'networking_arista.common.db_lib.get_bm_ports_for_device', 'db_lib.get_bm_ports_for_device', (['context._plugin_context', 'device_id'], {}), '(context._plugin_context, device_id)\n', (37929, 37965), False, 'from networking_arista.common import db_lib\n'), ((43138, 43192), 'networking_arista.common.db_lib.get_port_binding_level', 'db_lib.get_port_binding_level', (['plugin_context', 'filters'], {}), '(plugin_context, filters)\n', (43167, 43192), False, 'from networking_arista.common import db_lib\n'), ((44112, 44166), 'networking_arista.common.db_lib.num_nets_provisioned', 'db_lib.num_nets_provisioned', (['plugin_context', 'tenant_id'], {}), '(plugin_context, tenant_id)\n', (44139, 44166), False, 'from networking_arista.common import db_lib\n'), ((44181, 44234), 'networking_arista.common.db_lib.num_vms_provisioned', 'db_lib.num_vms_provisioned', (['plugin_context', 'tenant_id'], {}), '(plugin_context, tenant_id)\n', (44207, 44234), False, 'from networking_arista.common import db_lib\n'), ((44292, 44339), 'networking_arista.common.db_lib.forget_tenant', 'db_lib.forget_tenant', (['plugin_context', 'tenant_id'], {}), '(plugin_context, tenant_id)\n', (44312, 44339), False, 'from networking_arista.common import db_lib\n'), ((46903, 47004), 'networking_arista.common.db_lib.is_network_provisioned', 'db_lib.is_network_provisioned', (['plugin_context', 'tenant_id', 'network_id', 'segmentation_id', 'segment_id'], {}), '(plugin_context, tenant_id, network_id,\n segmentation_id, segment_id)\n', (46932, 47004), False, 'from networking_arista.common import db_lib\n'), ((49667, 49725), 'oslo_config.cfg.MultiStrOpt', 'cfg.MultiStrOpt', (['"""port_id"""'], {'short': '"""p"""', 'default': '[]', 'help': '""""""'}), "('port_id', short='p', default=[], help='')\n", (49682, 49725), False, 'from oslo_config import cfg\n'), ((49807, 49879), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (['"""all_ports"""'], {'default': '(False)', 'help': '"""Should we sync all ports"""'}), "('all_ports', default=False, help='Should we sync all ports')\n", (49818, 49879), False, 'from oslo_config import cfg\n'), ((50843, 50883), 'sqlalchemy.orm.contains_eager', 'contains_eager', (['Port.port_binding_levels'], {}), '(Port.port_binding_levels)\n', (50857, 50883), False, 'from sqlalchemy.orm import contains_eager, joinedload, relationship\n'), ((51481, 51508), 'json.loads', 'json.loads', (['binding.profile'], {}), '(binding.profile)\n', (51491, 51508), False, 'import json\n'), ((3267, 3289), 'networking_arista.ml2.rpc.get_rpc_wrapper', 'get_rpc_wrapper', (['confg'], {}), '(confg)\n', (3282, 3289), False, 'from networking_arista.ml2.rpc import get_rpc_wrapper\n'), ((3492, 3522), 'networking_arista.ml2.rpc.arista_eapi.AristaRPCWrapperEapi', 'AristaRPCWrapperEapi', (['self.ndb'], {}), '(self.ndb)\n', (3512, 3522), False, 'from networking_arista.ml2.rpc.arista_eapi import AristaRPCWrapperEapi\n'), ((6467, 6531), 'networking_arista._i18n._LI', '_LI', (['"""Network %s is not created as it is not found in Arista DB"""'], {}), "('Network %s is not created as it is not found in Arista DB')\n", (6470, 6531), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((7019, 7052), 'networking_arista._i18n._LI', '_LI', (['"""Network name changed to %s"""'], {}), "('Network name changed to %s')\n", (7022, 7052), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((9449, 9512), 'neutron.plugins.ml2.common.exceptions.MechanismDriverError', 'ml2_exc.MechanismDriverError', ([], {'method': '"""delete_network_precommit"""'}), "(method='delete_network_precommit')\n", (9477, 9512), True, 'from neutron.plugins.ml2.common import exceptions as ml2_exc\n'), ((9568, 9636), 'networking_arista.common.db_lib.forget_network_segment', 'db_lib.forget_network_segment', (['plugin_context', 'tenant_id', 'network_id'], {}), '(plugin_context, tenant_id, network_id)\n', (9597, 9636), False, 'from networking_arista.common import db_lib\n'), ((20090, 20181), 'networking_arista.common.db_lib.update_port', 'db_lib.update_port', (['plugin_context', 'device_id', 'new_host', 'port_id', 'network_id', 'tenant_id'], {}), '(plugin_context, device_id, new_host, port_id, network_id,\n tenant_id)\n', (20108, 20181), False, 'from networking_arista.common import db_lib\n'), ((23785, 23815), 'networking_arista._i18n._LI', '_LI', (['"""Port name changed to %s"""'], {}), "('Port name changed to %s')\n", (23788, 23815), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((25238, 25287), 'networking_arista.common.db_lib.remember_tenant', 'db_lib.remember_tenant', (['plugin_context', 'tenant_id'], {}), '(plugin_context, tenant_id)\n', (25260, 25287), False, 'from networking_arista.common import db_lib\n'), ((25304, 25418), 'networking_arista.common.db_lib.remember_network_segment', 'db_lib.remember_network_segment', (['plugin_context', 'tenant_id', 'network_id', 'seg[api.SEGMENTATION_ID]', 'seg[api.ID]'], {}), '(plugin_context, tenant_id, network_id, seg[\n api.SEGMENTATION_ID], seg[api.ID])\n', (25335, 25418), False, 'from networking_arista.common import db_lib\n'), ((26591, 26640), 'networking_arista.common.db_lib.remember_tenant', 'db_lib.remember_tenant', (['plugin_context', 'tenant_id'], {}), '(plugin_context, tenant_id)\n', (26613, 26640), False, 'from networking_arista.common import db_lib\n'), ((26657, 26744), 'networking_arista.common.db_lib.remember_vm', 'db_lib.remember_vm', (['plugin_context', 'device_id', 'host', 'port_id', 'network_id', 'tenant_id'], {}), '(plugin_context, device_id, host, port_id, network_id,\n tenant_id)\n', (26675, 26744), False, 'from networking_arista.common import db_lib\n'), ((47632, 47676), 'networking_arista.common.exceptions.AristaSecurityGroupError', 'arista_exc.AristaSecurityGroupError', ([], {'msg': 'msg'}), '(msg=msg)\n', (47667, 47676), True, 'from networking_arista.common import exceptions as arista_exc\n'), ((48156, 48200), 'networking_arista.common.exceptions.AristaSecurityGroupError', 'arista_exc.AristaSecurityGroupError', ([], {'msg': 'msg'}), '(msg=msg)\n', (48191, 48200), True, 'from networking_arista.common import exceptions as arista_exc\n'), ((48781, 48825), 'networking_arista.common.exceptions.AristaSecurityGroupError', 'arista_exc.AristaSecurityGroupError', ([], {'msg': 'msg'}), '(msg=msg)\n', (48816, 48825), True, 'from networking_arista.common import exceptions as arista_exc\n'), ((50948, 50977), 'neutron.db.models_v2.Port.id.in_', 'Port.id.in_', (['cfg.CONF.port_id'], {}), '(cfg.CONF.port_id)\n', (50959, 50977), False, 'from neutron.db.models_v2 import Port\n'), ((8625, 8689), 'networking_arista._i18n._LI', '_LI', (['"""Network %s is not updated as it is not found in Arista DB"""'], {}), "('Network %s is not updated as it is not found in Arista DB')\n", (8628, 8689), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((9314, 9381), 'networking_arista._i18n._LI', '_LI', (['"""Network %s can not be deleted as it has ports attached to it"""'], {}), "('Network %s can not be deleted as it has ports attached to it')\n", (9317, 9381), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((10695, 10788), 'networking_arista._i18n._LE', '_LE', (['"""delete_network_postcommit: Did not delete network %(network_id)s. Reason: %(err)s"""'], {}), "('delete_network_postcommit: Did not delete network %(network_id)s. Reason: %(err)s'\n )\n", (10698, 10788), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((25167, 25215), 'networking_arista._i18n._LI', '_LI', (['"""Adding %s to provisioned network database"""'], {}), "('Adding %s to provisioned network database')\n", (25170, 25215), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((27241, 27328), 'networking_arista.common.db_lib.update_port', 'db_lib.update_port', (['plugin_context', 'device_id', 'host', 'port_id', 'network_id', 'tenant_id'], {}), '(plugin_context, device_id, host, port_id, network_id,\n tenant_id)\n', (27259, 27328), False, 'from networking_arista.common import db_lib\n'), ((32958, 32990), 'networking_arista._i18n._LI', '_LI', (['"""Port plugged into network"""'], {}), "('Port plugged into network')\n", (32961, 32990), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((33913, 33949), 'networking_arista._i18n._LI', '_LI', (['"""Port not plugged into network"""'], {}), "('Port not plugged into network')\n", (33916, 33949), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((34022, 34101), 'networking_arista._i18n._LE', '_LE', (['"""update_port_postcommit: Did not update port %(port_id)s. Reason: %(err)s"""'], {}), "('update_port_postcommit: Did not update port %(port_id)s. Reason: %(err)s')\n", (34025, 34101), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((37679, 37755), 'networking_arista.common.db_lib.get_segmentation_id_by_segment_id', 'db_lib.get_segmentation_id_by_segment_id', (['plugin_context', 'segment.segment_id'], {}), '(plugin_context, segment.segment_id)\n', (37719, 37755), False, 'from networking_arista.common import db_lib\n'), ((44531, 44583), 'neutron.plugins.ml2.common.exceptions.MechanismDriverError', 'ml2_exc.MechanismDriverError', ([], {'method': '"""delete_tenant"""'}), "(method='delete_tenant')\n", (44559, 44583), True, 'from neutron.plugins.ml2.common import exceptions as ml2_exc\n'), ((47541, 47576), 'networking_arista._i18n._', '_', (['"""Failed to create ACL on EOS %s"""'], {}), "('Failed to create ACL on EOS %s')\n", (47542, 47576), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((48059, 48099), 'networking_arista._i18n._', '_', (['"""Failed to create ACL rule on EOS %s"""'], {}), "('Failed to create ACL rule on EOS %s')\n", (48060, 48099), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((48684, 48724), 'networking_arista._i18n._', '_', (['"""Failed to delete ACL rule on EOS %s"""'], {}), "('Failed to delete ACL rule on EOS %s')\n", (48685, 48724), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((6253, 6340), 'networking_arista._i18n._LE', '_LE', (['"""create_network_postcommit: Did not create network %(name)s. Reason: %(err)s"""'], {}), "('create_network_postcommit: Did not create network %(name)s. Reason: %(err)s'\n )\n", (6256, 6340), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((31477, 31507), 'neutron.plugins.ml2.common.exceptions.MechanismDriverError', 'ml2_exc.MechanismDriverError', ([], {}), '()\n', (31505, 31507), True, 'from neutron.plugins.ml2.common import exceptions as ml2_exc\n'), ((40695, 40793), 'networking_arista.common.db_lib.forget_network_segment', 'db_lib.forget_network_segment', (['plugin_context', 'tenant_id', 'network_id', 'binding_level.segment_id'], {}), '(plugin_context, tenant_id, network_id,\n binding_level.segment_id)\n', (40724, 40793), False, 'from networking_arista.common import db_lib\n'), ((8358, 8445), 'networking_arista._i18n._LE', '_LE', (['"""update_network_postcommit: Did not update network %(name)s. Reason: %(err)s"""'], {}), "('update_network_postcommit: Did not update network %(name)s. Reason: %(err)s'\n )\n", (8361, 8445), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((15729, 15837), 'networking_arista._i18n._LE', '_LE', (['"""bind_port for port %(port)s: Failed to allocate dynamic segment for physnet %(physnet)s. %(exc)s"""'], {}), "('bind_port for port %(port)s: Failed to allocate dynamic segment for physnet %(physnet)s. %(exc)s'\n )\n", (15732, 15837), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((23010, 23108), 'networking_arista.common.db_lib.forget_network_segment', 'db_lib.forget_network_segment', (['plugin_context', 'tenant_id', 'network_id', 'binding_level.segment_id'], {}), '(plugin_context, tenant_id, network_id,\n binding_level.segment_id)\n', (23039, 23108), False, 'from networking_arista.common import db_lib\n'), ((31409, 31449), 'networking_arista._i18n._LE', '_LE', (['"""Failed to create network segments"""'], {}), "('Failed to create network segments')\n", (31412, 31449), False, 'from networking_arista._i18n import _, _LI, _LE\n'), ((50707, 50739), 'sqlalchemy.orm.joinedload', 'joinedload', (['Port.security_groups'], {}), '(Port.security_groups)\n', (50717, 50739), False, 'from sqlalchemy.orm import contains_eager, joinedload, relationship\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import os.path
import uuid
from yamlfred.utils import remove_default, merge_dicts
from yamlfred.utils import Include
defaults = {
'alfred.workflow.output.notification': {
'config': {'removeextension': False, 'output': 0, 'lastpathcomponent': False, 'onlyshowifquerypopulated': False, 'sticky': False},
'version': 0,
},
'alfred.workflow.trigger.hotkey': {
'config': {'leftcursor': False, 'argument': 0, 'relatedAppsMode': 0, 'action': 0, 'hotkey': 0, 'hotstring': '', 'hotmod': 0, 'modsmode': 0},
'version': 1, },
'alfred.workflow.action.openfile': {
'config': {},
'version': 1,
},
'alfred.workflow.input.keyword': {
'config': {'argumenttype': 0, 'withspace': True},
'version': 0,
},
'alfred.workflow.trigger.external': {
'config': {},
'version': 0,
},
'alfred.workflow.output.largetype': {
'version': 0,
},
'alfred.workflow.action.revealfile': {
'version': 0,
},
'alfred.workflow.input.filefilter': {
'config': {'scopes': [], 'includesystem': False, 'withspace': True, 'anchorfields': True, 'daterange': 0, 'types': []},
'version': 0,
},
'alfred.workflow.input.scriptfilter': {
'config': {'withspace': True, 'escaping': 102, 'script': '', 'argumenttype': 0, 'type': 0,
'queuedelaycustom': 3, 'queuedelayimmediatelyinitially': True, 'queuedelaymode': 0, 'queuemode': 1},
'version': 0,
},
'alfred.workflow.action.browseinalfred': {
'config': {},
'version': 0,
},
'alfred.workflow.trigger.action': {
'config': {'filetypes': [], 'acceptsmulti': False},
'version': 0,
},
'alfred.workflow.output.clipboard': {
'config': {'clipboardtext': '', 'autopaste': False},
'version': 0, },
'alfred.workflow.output.script': {
'config': {'escaping': 102, 'type': 0, 'script': '', 'concurrently': False},
'version': 0, },
'alfred.workflow.action.launchfiles': {
'config': {'paths': [], 'toggle': False},
'version': 0,
},
'alfred.workflow.trigger.contact': {
'config': {},
'version': 0,
},
'alfred.workflow.action.systemwebsearch': {
'config': {},
'version': 0,
},
'alfred.workflow.trigger.fallback': {
'config': {},
'version': 0,
},
'alfred.workflow.action.openurl': {
'config': {'utf8': True, 'plusspaces': False},
'version': 0,
},
'alfred.workflow.action.systemcommand': {
'config': {'command': 0, 'confirm': False},
'version': 1,
},
'alfred.workflow.action.itunescommand': {
'config': {'command': 0},
'version': 0,
},
'alfred.workflow.action.script': {
'config': {'escaping': 102, 'type': 0, 'script': '', 'concurrently': False},
'version': 0,
},
'alfred.workflow.action.applescript': {
'config': {'cachescript': False, 'applescript': ''},
'version': 0,
},
'alfred.workflow.action.terminalcommand': {
'config': {'escaping': 0},
'version': 0,
},
'alfred.workflow.trigger.remote': {
'config': {'argumenttype': 0, 'workflowonly': False},
'version': 0,
},
}
class AlfredObject(object):
def __init__(self, dic):
self.type = dic['type']
default = defaults[self.type] if self.type in defaults else {}
self.prop = merge_dicts(default, dic)
if 'uid' not in self.prop:
self.prop['uid'] = uuid.uuid4()
self.script_type = None
if self.type == 'alfred.workflow.action.applescript':
self.script_type = 'applescript'
elif self.type in ['alfred.workflow.input.scriptfilter',
'alfred.workflow.output.script',
'alfred.workflow.action.script']:
self.script_type = 'script'
return
def dump(self, script_dir='.'):
default = defaults[self.type] if self.type in defaults else {}
prop = remove_default(self.prop, default)
if self.script_type:
path = os.path.join(script_dir, self.prop['uid'])
with open(path, 'w') as f:
script = self.prop['config'].get(self.script_type)
f.write(script)
prop['config'][self.script_type] = Include(path)
return prop
| [
"yamlfred.utils.remove_default",
"yamlfred.utils.merge_dicts",
"yamlfred.utils.Include",
"uuid.uuid4"
] | [((3571, 3596), 'yamlfred.utils.merge_dicts', 'merge_dicts', (['default', 'dic'], {}), '(default, dic)\n', (3582, 3596), False, 'from yamlfred.utils import remove_default, merge_dicts\n'), ((4179, 4213), 'yamlfred.utils.remove_default', 'remove_default', (['self.prop', 'default'], {}), '(self.prop, default)\n', (4193, 4213), False, 'from yamlfred.utils import remove_default, merge_dicts\n'), ((3663, 3675), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3673, 3675), False, 'import uuid\n'), ((4490, 4503), 'yamlfred.utils.Include', 'Include', (['path'], {}), '(path)\n', (4497, 4503), False, 'from yamlfred.utils import Include\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import deepnlpf.log as log
from deepnlpf.core.util import Util
class PluginManager:
def __init__(self):
self.HOME = os.environ["HOME"]
self.PLUGIN_SERVER = "https://github.com/deepnlpf/"
self.PLUGIN_PATH = self.HOME + "/deepnlpf_data/plugins/"
self.EXTENSION = ".zip"
def load_plugin(self, plugin_name):
directory, module_name = os.path.split(plugin_name)
module_name = os.path.splitext(module_name)[0]
path = list(sys.path)
sys.path.insert(0, self.PLUGIN_PATH + plugin_name)
try:
module = __import__("plugin_%s" % module_name)
finally:
sys.path[:] = path # restore.
return module
def load_manifest(self):
file_manifest = "manifest"
plugins = []
for plugin in os.listdir(self.PLUGIN_PATH):
location = os.path.join(self.PLUGIN_PATH, plugin)
if not os.path.isdir(location) or not file_manifest + ".json" in os.listdir(
location
):
continue
path = self.PLUGIN_PATH + "/" + plugin + "/" + file_manifest + ".json"
plugins.append(Util().openfile_json(path))
return plugins
def call_plugin_nlp(self, plugin_name, document, pipeline):
plugin = self.load_plugin(plugin_name)
return plugin.Plugin(document, pipeline).run()
def call_plugin_db(
self, plugin_name, operation, collection, document=None, key=None
):
plugin = self.load_plugin(plugin_name)
log.logger.info("Plugin call: {}".format(plugin_name))
if operation == "insert":
result = plugin.Plugin().insert(collection, document)
elif operation == "select_one":
result = plugin.Plugin().select_one(collection, key)
elif operation == "select_all":
result = plugin.Plugin().select_all(collection)
elif operation == "select_all_key":
result = plugin.Plugin().select_all_key(collection, key)
elif operation == "update":
result = plugin.Plugin().update(collection, key, document)
elif operation == "delete":
result = plugin.Plugin().delete(collection, key)
return result
def install(self, plugin_name):
import zipfile
from homura import download # gestor fast download file.
# URL for download of plugin.
# https://github.com/deepnlpf/plugin_stanza/archive/master.zip
URL = (
self.PLUGIN_SERVER
+ "plugin_"
+ plugin_name
+ "/archive/master"
+ self.EXTENSION
)
# Path for save plugin.
PATH_DOWNLOAD_PLUGIN = (
self.PLUGIN_PATH + "plugin_" + plugin_name + "-master" + self.EXTENSION
)
# check folder plugin exist.
if not os.path.exists(self.PLUGIN_PATH):
os.makedirs(self.PLUGIN_PATH)
# Download plugin.
try:
print("Downloading plugin", plugin_name, "..")
# check url exists.
download(url=URL, path=PATH_DOWNLOAD_PLUGIN)
except Exception as err:
print("❗️Plugin no found!")
log.logger.error(err)
sys.exit(0)
# Extracting files plugin.
try:
fantasy_zip = zipfile.ZipFile(PATH_DOWNLOAD_PLUGIN)
fantasy_zip.extractall(self.PLUGIN_PATH)
fantasy_zip.close()
except Exception as err:
print("❗️Error extracting files!")
log.logger.error(err)
sys.exit(0)
# Config dir name plugin.
try:
os.rename(
self.PLUGIN_PATH + "plugin_" + plugin_name + "-master",
self.PLUGIN_PATH + plugin_name,
)
except Exception as err:
print("❗️Error config directory plugin!")
log.logger.error(err)
sys.exit(0)
# Install requirements.
try:
# Check in plugin file requirements.sh exist.
if os.path.isfile(self.PLUGIN_PATH + plugin_name + "/requeriments.sh"):
print("Install requirements..")
os.system(
"cd "
+ str(
self.PLUGIN_PATH
+ plugin_name
+ " && chmod 777 requeriments.sh && ./requeriments.sh"
)
)
except Exception as err:
print("❗Error when executing the requeriments.sh plugin file!")
log.logger.error(err)
sys.exit(0)
os.remove(PATH_DOWNLOAD_PLUGIN) # clear file zip.
print("🎉 Plugin", plugin_name, "installed!")
log.logger.info("Plugin installed: {}".format(plugin_name))
print("Path of installed plugins:", self.PLUGIN_PATH)
sys.exit(0)
def uninstall(self, plugin_name):
# Path for save plugin.
PATH_DOWNLOAD_PLUGIN = self.HOME + self.PLUGIN_PATH + plugin_name
try:
print("Uninstall plugin", plugin_name, "..")
os.remove(PATH_DOWNLOAD_PLUGIN)
print("Plugin", plugin_name, "unistalled!")
log.logger.info("Plugin unistalled: {}".format(plugin_name))
except Exception as err:
log.logger.error(err)
print("Plugin not found!")
def listplugins(self):
# Path for save plugin.
pass
| [
"os.path.exists",
"homura.download",
"os.listdir",
"sys.path.insert",
"os.makedirs",
"zipfile.ZipFile",
"os.rename",
"os.path.splitext",
"os.path.join",
"os.path.split",
"os.path.isfile",
"deepnlpf.core.util.Util",
"os.path.isdir",
"sys.exit",
"deepnlpf.log.logger.error",
"os.remove"
] | [((449, 475), 'os.path.split', 'os.path.split', (['plugin_name'], {}), '(plugin_name)\n', (462, 475), False, 'import os\n'), ((570, 620), 'sys.path.insert', 'sys.path.insert', (['(0)', '(self.PLUGIN_PATH + plugin_name)'], {}), '(0, self.PLUGIN_PATH + plugin_name)\n', (585, 620), False, 'import sys\n'), ((885, 913), 'os.listdir', 'os.listdir', (['self.PLUGIN_PATH'], {}), '(self.PLUGIN_PATH)\n', (895, 913), False, 'import os\n'), ((4711, 4742), 'os.remove', 'os.remove', (['PATH_DOWNLOAD_PLUGIN'], {}), '(PATH_DOWNLOAD_PLUGIN)\n', (4720, 4742), False, 'import os\n'), ((4953, 4964), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4961, 4964), False, 'import sys\n'), ((498, 527), 'os.path.splitext', 'os.path.splitext', (['module_name'], {}), '(module_name)\n', (514, 527), False, 'import os\n'), ((938, 976), 'os.path.join', 'os.path.join', (['self.PLUGIN_PATH', 'plugin'], {}), '(self.PLUGIN_PATH, plugin)\n', (950, 976), False, 'import os\n'), ((2939, 2971), 'os.path.exists', 'os.path.exists', (['self.PLUGIN_PATH'], {}), '(self.PLUGIN_PATH)\n', (2953, 2971), False, 'import os\n'), ((2985, 3014), 'os.makedirs', 'os.makedirs', (['self.PLUGIN_PATH'], {}), '(self.PLUGIN_PATH)\n', (2996, 3014), False, 'import os\n'), ((3159, 3203), 'homura.download', 'download', ([], {'url': 'URL', 'path': 'PATH_DOWNLOAD_PLUGIN'}), '(url=URL, path=PATH_DOWNLOAD_PLUGIN)\n', (3167, 3203), False, 'from homura import download\n'), ((3410, 3447), 'zipfile.ZipFile', 'zipfile.ZipFile', (['PATH_DOWNLOAD_PLUGIN'], {}), '(PATH_DOWNLOAD_PLUGIN)\n', (3425, 3447), False, 'import zipfile\n'), ((3731, 3833), 'os.rename', 'os.rename', (["(self.PLUGIN_PATH + 'plugin_' + plugin_name + '-master')", '(self.PLUGIN_PATH + plugin_name)'], {}), "(self.PLUGIN_PATH + 'plugin_' + plugin_name + '-master', self.\n PLUGIN_PATH + plugin_name)\n", (3740, 3833), False, 'import os\n'), ((4140, 4207), 'os.path.isfile', 'os.path.isfile', (["(self.PLUGIN_PATH + plugin_name + '/requeriments.sh')"], {}), "(self.PLUGIN_PATH + plugin_name + '/requeriments.sh')\n", (4154, 4207), False, 'import os\n'), ((5193, 5224), 'os.remove', 'os.remove', (['PATH_DOWNLOAD_PLUGIN'], {}), '(PATH_DOWNLOAD_PLUGIN)\n', (5202, 5224), False, 'import os\n'), ((3289, 3310), 'deepnlpf.log.logger.error', 'log.logger.error', (['err'], {}), '(err)\n', (3305, 3310), True, 'import deepnlpf.log as log\n'), ((3323, 3334), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3331, 3334), False, 'import sys\n'), ((3625, 3646), 'deepnlpf.log.logger.error', 'log.logger.error', (['err'], {}), '(err)\n', (3641, 3646), True, 'import deepnlpf.log as log\n'), ((3659, 3670), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3667, 3670), False, 'import sys\n'), ((3975, 3996), 'deepnlpf.log.logger.error', 'log.logger.error', (['err'], {}), '(err)\n', (3991, 3996), True, 'import deepnlpf.log as log\n'), ((4009, 4020), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4017, 4020), False, 'import sys\n'), ((4656, 4677), 'deepnlpf.log.logger.error', 'log.logger.error', (['err'], {}), '(err)\n', (4672, 4677), True, 'import deepnlpf.log as log\n'), ((4690, 4701), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4698, 4701), False, 'import sys\n'), ((5399, 5420), 'deepnlpf.log.logger.error', 'log.logger.error', (['err'], {}), '(err)\n', (5415, 5420), True, 'import deepnlpf.log as log\n'), ((996, 1019), 'os.path.isdir', 'os.path.isdir', (['location'], {}), '(location)\n', (1009, 1019), False, 'import os\n'), ((1054, 1074), 'os.listdir', 'os.listdir', (['location'], {}), '(location)\n', (1064, 1074), False, 'import os\n'), ((1241, 1247), 'deepnlpf.core.util.Util', 'Util', ([], {}), '()\n', (1245, 1247), False, 'from deepnlpf.core.util import Util\n')] |
#
# Copyright 2017 Wooga GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
def get_conf(context):
if 'conf' not in context:
context.conf = {
'targets': {},
'aggregations': {},
'context': {},
}
return context.conf
def get_model_conf(context, model):
conf = get_conf(context)
return conf.get('%ss' % model)
def get_aggregation_conf(context):
return get_model_conf(context, 'aggregation')
def get_target_conf(context):
return get_model_conf(context, 'target')
def get_context_conf(context):
conf = get_conf(context)
return conf.get('context')
def min_config():
return {
'targets': {
"test": {
'start_date': datetime.now(),
'schema': 'test',
'key_columns': [
'key_column',
],
'aggregated_columns': {
'test_agg': {
'test_val': None,
},
}
}
},
'aggregations': {
'test_agg': {
'query': "SELECT 'key' AS key_column, 'test_val' AS test_val FROM DUAL",
'time_key': 'time_key'
}
},
'context': {},
}
def min_dependency_config(dep_type):
conf = {
'tracking': {'schema': 'test', 'table': 'test'},
'delta': {'delta': 0},
'task': {'dag_id': 'test', 'task_id': 'test'},
'target': {'target': 'test'},
}[dep_type]
conf['type'] = dep_type
return conf
| [
"datetime.datetime.now"
] | [((1783, 1797), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1795, 1797), False, 'from datetime import datetime\n')] |
from django import test
from hexa.user_management.models import Membership, Team, User
from ..models import Database, DatabasePermission, Table
class PermissionTest(test.TestCase):
@classmethod
def setUpTestData(cls):
cls.DB1 = Database.objects.create(
hostname="host", username="user", password="<PASSWORD>", database="db1"
)
cls.DB2 = Database.objects.create(
hostname="host", username="user", password="<PASSWORD>", database="db2"
)
cls.TEAM1 = Team.objects.create(name="Test Team1")
cls.TEAM2 = Team.objects.create(name="Test Team2")
DatabasePermission.objects.create(database=cls.DB1, team=cls.TEAM1)
DatabasePermission.objects.create(database=cls.DB1, team=cls.TEAM2)
cls.USER_REGULAR = User.objects.create_user(
"<EMAIL>",
"regular",
)
Membership.objects.create(team=cls.TEAM1, user=cls.USER_REGULAR)
Membership.objects.create(team=cls.TEAM2, user=cls.USER_REGULAR)
cls.USER_SUPER = User.objects.create_user(
"<EMAIL>",
"super",
is_superuser=True,
)
for db in [cls.DB1, cls.DB2]:
for i in range(2):
Table.objects.create(name=f"table-{db.database}-{i}", database=db)
def test_instance_dedup(self):
"""
- user super see 2 db (all of them)
- user regular see only test db 1, one time
"""
self.assertEqual(
list(
Database.objects.filter_for_user(self.USER_REGULAR)
.order_by("database")
.values("database")
),
[{"database": "db1"}],
)
self.assertEqual(
list(
Database.objects.filter_for_user(self.USER_SUPER)
.order_by("database")
.values("database")
),
[{"database": "db1"}, {"database": "db2"}],
)
def test_table_dedup(self):
"""
regular user can see 2 tables
super user can see 4 tables
"""
self.assertEqual(
list(
Table.objects.filter_for_user(self.USER_REGULAR)
.order_by("name")
.values("name")
),
[{"name": "table-db1-0"}, {"name": "table-db1-1"}],
)
self.assertEqual(
list(
Table.objects.filter_for_user(self.USER_SUPER)
.order_by("name")
.values("name")
),
[
{"name": "table-db1-0"},
{"name": "table-db1-1"},
{"name": "table-db2-0"},
{"name": "table-db2-1"},
],
)
| [
"hexa.user_management.models.Team.objects.create",
"hexa.user_management.models.Membership.objects.create",
"hexa.user_management.models.User.objects.create_user"
] | [((524, 562), 'hexa.user_management.models.Team.objects.create', 'Team.objects.create', ([], {'name': '"""Test Team1"""'}), "(name='Test Team1')\n", (543, 562), False, 'from hexa.user_management.models import Membership, Team, User\n'), ((583, 621), 'hexa.user_management.models.Team.objects.create', 'Team.objects.create', ([], {'name': '"""Test Team2"""'}), "(name='Test Team2')\n", (602, 621), False, 'from hexa.user_management.models import Membership, Team, User\n'), ((801, 847), 'hexa.user_management.models.User.objects.create_user', 'User.objects.create_user', (['"""<EMAIL>"""', '"""regular"""'], {}), "('<EMAIL>', 'regular')\n", (825, 847), False, 'from hexa.user_management.models import Membership, Team, User\n'), ((891, 955), 'hexa.user_management.models.Membership.objects.create', 'Membership.objects.create', ([], {'team': 'cls.TEAM1', 'user': 'cls.USER_REGULAR'}), '(team=cls.TEAM1, user=cls.USER_REGULAR)\n', (916, 955), False, 'from hexa.user_management.models import Membership, Team, User\n'), ((964, 1028), 'hexa.user_management.models.Membership.objects.create', 'Membership.objects.create', ([], {'team': 'cls.TEAM2', 'user': 'cls.USER_REGULAR'}), '(team=cls.TEAM2, user=cls.USER_REGULAR)\n', (989, 1028), False, 'from hexa.user_management.models import Membership, Team, User\n'), ((1054, 1117), 'hexa.user_management.models.User.objects.create_user', 'User.objects.create_user', (['"""<EMAIL>"""', '"""super"""'], {'is_superuser': '(True)'}), "('<EMAIL>', 'super', is_superuser=True)\n", (1078, 1117), False, 'from hexa.user_management.models import Membership, Team, User\n')] |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('hello', views.hello),
path('world', views.world),
path('users', views.users),
path('user/<int:user_id>', views.user),
]
| [
"django.urls.path"
] | [((71, 92), 'django.urls.path', 'path', (['""""""', 'views.index'], {}), "('', views.index)\n", (75, 92), False, 'from django.urls import path\n'), ((98, 124), 'django.urls.path', 'path', (['"""hello"""', 'views.hello'], {}), "('hello', views.hello)\n", (102, 124), False, 'from django.urls import path\n'), ((130, 156), 'django.urls.path', 'path', (['"""world"""', 'views.world'], {}), "('world', views.world)\n", (134, 156), False, 'from django.urls import path\n'), ((162, 188), 'django.urls.path', 'path', (['"""users"""', 'views.users'], {}), "('users', views.users)\n", (166, 188), False, 'from django.urls import path\n'), ((194, 232), 'django.urls.path', 'path', (['"""user/<int:user_id>"""', 'views.user'], {}), "('user/<int:user_id>', views.user)\n", (198, 232), False, 'from django.urls import path\n')] |
import unittest
from pinq.transforms import identity
class predicate_true_tests(unittest.TestCase):
def test_identity_int(self):
self.assertEqual(identity(123), 123)
def test_identity_str(self):
self.assertEqual(identity("apple"), "apple")
def test_identity_list(self):
self.assertEqual(identity(["great", 100029]), ["great", 100029])
| [
"pinq.transforms.identity"
] | [((161, 174), 'pinq.transforms.identity', 'identity', (['(123)'], {}), '(123)\n', (169, 174), False, 'from pinq.transforms import identity\n'), ((240, 257), 'pinq.transforms.identity', 'identity', (['"""apple"""'], {}), "('apple')\n", (248, 257), False, 'from pinq.transforms import identity\n'), ((328, 355), 'pinq.transforms.identity', 'identity', (["['great', 100029]"], {}), "(['great', 100029])\n", (336, 355), False, 'from pinq.transforms import identity\n')] |
import slog
slog.quiet("debbug")
slog.debbug("Hi!")
slog.warning("Debbug?")
slog.unquiet()
slog.debbug("Aaaaaaaaa")
with slog.quiet("debbug"):
slog.debbug("Hello!")
slog.info("Hi!")
slog.info("Debbug?")
slog.debbug("Hey.")
| [
"slog.quiet",
"slog.unquiet",
"slog.warning",
"slog.info",
"slog.debbug"
] | [((13, 33), 'slog.quiet', 'slog.quiet', (['"""debbug"""'], {}), "('debbug')\n", (23, 33), False, 'import slog\n'), ((35, 53), 'slog.debbug', 'slog.debbug', (['"""Hi!"""'], {}), "('Hi!')\n", (46, 53), False, 'import slog\n'), ((54, 77), 'slog.warning', 'slog.warning', (['"""Debbug?"""'], {}), "('Debbug?')\n", (66, 77), False, 'import slog\n'), ((79, 93), 'slog.unquiet', 'slog.unquiet', ([], {}), '()\n', (91, 93), False, 'import slog\n'), ((95, 119), 'slog.debbug', 'slog.debbug', (['"""Aaaaaaaaa"""'], {}), "('Aaaaaaaaa')\n", (106, 119), False, 'import slog\n'), ((197, 217), 'slog.info', 'slog.info', (['"""Debbug?"""'], {}), "('Debbug?')\n", (206, 217), False, 'import slog\n'), ((218, 237), 'slog.debbug', 'slog.debbug', (['"""Hey."""'], {}), "('Hey.')\n", (229, 237), False, 'import slog\n'), ((127, 147), 'slog.quiet', 'slog.quiet', (['"""debbug"""'], {}), "('debbug')\n", (137, 147), False, 'import slog\n'), ((153, 174), 'slog.debbug', 'slog.debbug', (['"""Hello!"""'], {}), "('Hello!')\n", (164, 174), False, 'import slog\n'), ((179, 195), 'slog.info', 'slog.info', (['"""Hi!"""'], {}), "('Hi!')\n", (188, 195), False, 'import slog\n')] |
import setuptools
import os
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="supportr",
version="0.1",
author="<NAME> and <NAME>",
author_email="<EMAIL>",
description="Supportr",
long_description=long_description,
long_description_content_type="text/markdown",
python_requires=">=3.6",
install_requires=["numpy", "csv", "datrie", "nltk", "gensim",
"pandas", "spacy", "sklearn"],
url="https://github.com/davidjurgens/support",
include_package_data=True,
packages=setuptools.find_packages()
)
| [
"setuptools.find_packages"
] | [((577, 603), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (601, 603), False, 'import setuptools\n')] |
import logging
import os
from zipfile import ZipFile
import yaml
from pathlib import Path
logging.basicConfig(
filename=os.path.join("Logs", "running.log"),
format="[%(asctime)s: %(module)s: %(levelname)s]: %(message)s",
level=logging.INFO,
filemode="a"
)
def read_yaml(config_path):
with open(config_path, "r") as f:
config = yaml.safe_load(f)
logging.info(f"config file {config_path} loaded")
return config
def create_directory(dir_path: list):
try:
full_dir_path = ""
for path in dir_path:
full_dir_path = os.path.join(full_dir_path, path)
os.makedirs(full_dir_path, exist_ok=True)
logging.info(f"directory {dir_path} created")
except Exception as e:
logging.exception(e)
raise e
def unzip_File(source_dir:str, destination_dir:str):
try:
with ZipFile(source_dir, 'r') as zipfile:
zipfile.extractall(destination_dir)
logging.info(f"file {source_dir} unzipped to {destination_dir}")
except Exception as e:
logging.exception(e)
raise e | [
"os.makedirs",
"zipfile.ZipFile",
"os.path.join",
"logging.exception",
"yaml.safe_load",
"logging.info"
] | [((379, 428), 'logging.info', 'logging.info', (['f"""config file {config_path} loaded"""'], {}), "(f'config file {config_path} loaded')\n", (391, 428), False, 'import logging\n'), ((125, 160), 'os.path.join', 'os.path.join', (['"""Logs"""', '"""running.log"""'], {}), "('Logs', 'running.log')\n", (137, 160), False, 'import os\n'), ((357, 374), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (371, 374), False, 'import yaml\n'), ((622, 663), 'os.makedirs', 'os.makedirs', (['full_dir_path'], {'exist_ok': '(True)'}), '(full_dir_path, exist_ok=True)\n', (633, 663), False, 'import os\n'), ((672, 717), 'logging.info', 'logging.info', (['f"""directory {dir_path} created"""'], {}), "(f'directory {dir_path} created')\n", (684, 717), False, 'import logging\n'), ((959, 1023), 'logging.info', 'logging.info', (['f"""file {source_dir} unzipped to {destination_dir}"""'], {}), "(f'file {source_dir} unzipped to {destination_dir}')\n", (971, 1023), False, 'import logging\n'), ((580, 613), 'os.path.join', 'os.path.join', (['full_dir_path', 'path'], {}), '(full_dir_path, path)\n', (592, 613), False, 'import os\n'), ((753, 773), 'logging.exception', 'logging.exception', (['e'], {}), '(e)\n', (770, 773), False, 'import logging\n'), ((866, 890), 'zipfile.ZipFile', 'ZipFile', (['source_dir', '"""r"""'], {}), "(source_dir, 'r')\n", (873, 890), False, 'from zipfile import ZipFile\n'), ((1059, 1079), 'logging.exception', 'logging.exception', (['e'], {}), '(e)\n', (1076, 1079), False, 'import logging\n')] |
#!/usr/bin/env python
##### convert values to ranks, tie breaker as average #####
from __future__ import division
from sys import argv, stdin, stdout
from signal import signal, SIGPIPE, SIG_DFL
import argparse
signal(SIGPIPE, SIG_DFL)
# parse args
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--inVals", help="input values")
parser.add_argument("-d", "--delimiter", default="\n",
help="used to split and join input and output")
parser.add_argument("-n", "--normalize", action='store_true',
default=False, help="normalize over length of input to [0,1]")
parser.add_argument("-r", "--reverse", action='store_true',
default=False, help="high intial value leads to low ranks")
args = parser.parse_args()
sep = args.delimiter
### functions ###
def rank_simple(vector, reverse):
return sorted(range(len(vector)), key=vector.__getitem__, reverse=reverse)
def rankdata(a, sep, normalize, reverse):
n = len(a)
ivec = rank_simple(a, reverse)
svec = [a[rank] for rank in ivec]
sumranks = 0
dupcount = 0
newarray = [0]*n
for i in range(n):
sumranks += i
dupcount += 1
if i == n-1 or svec[i] != svec[i+1]:
averank = sumranks / float(dupcount) + 1
for j in range(i-dupcount+1, i+1):
if normalize:
newarray[ivec[j]] = str(averank/n)
else:
newarray[ivec[j]] = str(averank)
sumranks = 0
dupcount = 0
return sep.join(newarray)
inData = [float(i) for i in list(stdin.readlines())]
stdout.write(rankdata(inData, sep=sep, normalize=args.normalize, reverse=args.reverse))
| [
"sys.stdin.readlines",
"signal.signal",
"argparse.ArgumentParser"
] | [((212, 236), 'signal.signal', 'signal', (['SIGPIPE', 'SIG_DFL'], {}), '(SIGPIPE, SIG_DFL)\n', (218, 236), False, 'from signal import signal, SIGPIPE, SIG_DFL\n'), ((260, 285), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (283, 285), False, 'import argparse\n'), ((1606, 1623), 'sys.stdin.readlines', 'stdin.readlines', ([], {}), '()\n', (1621, 1623), False, 'from sys import argv, stdin, stdout\n')] |
# Generated by Django 2.0.6 on 2019-05-29 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('SAcore', '0008_user_avator'),
]
operations = [
migrations.AlterField(
model_name='user',
name='avator',
field=models.CharField(default='author_avator/default.jpg', max_length=255),
),
]
| [
"django.db.models.CharField"
] | [((326, 395), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""author_avator/default.jpg"""', 'max_length': '(255)'}), "(default='author_avator/default.jpg', max_length=255)\n", (342, 395), False, 'from django.db import migrations, models\n')] |
"""test_CLI_system_stats.py: tests expected behavior for CLI application"""
from os import path
import pytest
from plumbum import local
import pandas as pd
import navitron_crons.exceptions as exceptions
import navitron_crons._version as _version
import navitron_crons.navitron_system_stats as navitron_system_stats
import helpers
def test_get_system_jumps():
"""validate expected behavior for navitron_system_stats.get_system_jumps()"""
data = navitron_system_stats.get_system_jumps(
config=helpers.TEST_CONFIG,
logger=helpers.LOGGER
)
assert isinstance(data, pd.DataFrame)
expected_headers = ['ship_jumps', 'system_id']
unique_values, unique_expected = helpers.find_uniques(
data.columns.values,
expected_headers
)
assert unique_expected == []
if unique_values:
pytest.xfail(
'Unexpected values from get_system_jumps(): {}'.format(unique_values))
def test_get_system_kills():
"""validates expected behavior for navitron_system_stats.get_system_kills()"""
data = navitron_system_stats.get_system_kills(
config=helpers.TEST_CONFIG,
logger=helpers.LOGGER
)
assert isinstance(data, pd.DataFrame)
expected_headers = ['npc_kills', 'pod_kills', 'ship_kills', 'system_id']
unique_values, unique_expected = helpers.find_uniques(
data.columns.values,
expected_headers
)
assert unique_expected == []
if unique_values:
pytest.xfail(
'Unexpected values from get_system_kills(): {}'.format(unique_values))
class TestCLI:
"""validate cli launches and works as users expect"""
app_command = local['navitron_system_stats']
def test_help(self):
"""validate -h works"""
output = self.app_command('-h')
def test_version(self):
"""validate app name/version are as expected"""
output = self.app_command('--version')
assert output == '{app_name} {version}\n'.format(
app_name=navitron_system_stats.__app_name__,
version=navitron_system_stats.__app_version__
)
| [
"navitron_crons.navitron_system_stats.get_system_jumps",
"navitron_crons.navitron_system_stats.get_system_kills",
"helpers.find_uniques"
] | [((457, 551), 'navitron_crons.navitron_system_stats.get_system_jumps', 'navitron_system_stats.get_system_jumps', ([], {'config': 'helpers.TEST_CONFIG', 'logger': 'helpers.LOGGER'}), '(config=helpers.TEST_CONFIG, logger=\n helpers.LOGGER)\n', (495, 551), True, 'import navitron_crons.navitron_system_stats as navitron_system_stats\n'), ((702, 761), 'helpers.find_uniques', 'helpers.find_uniques', (['data.columns.values', 'expected_headers'], {}), '(data.columns.values, expected_headers)\n', (722, 761), False, 'import helpers\n'), ((1069, 1163), 'navitron_crons.navitron_system_stats.get_system_kills', 'navitron_system_stats.get_system_kills', ([], {'config': 'helpers.TEST_CONFIG', 'logger': 'helpers.LOGGER'}), '(config=helpers.TEST_CONFIG, logger=\n helpers.LOGGER)\n', (1107, 1163), True, 'import navitron_crons.navitron_system_stats as navitron_system_stats\n'), ((1340, 1399), 'helpers.find_uniques', 'helpers.find_uniques', (['data.columns.values', 'expected_headers'], {}), '(data.columns.values, expected_headers)\n', (1360, 1399), False, 'import helpers\n')] |
import numpy as np
import WDRT.ESSC as ESSC
import copy
import matplotlib.pyplot as plt
# Create buoy object, in this case for Station #46022
buoy46022 = ESSC.Buoy('46022', 'NDBC')
# Read data from ndbc.noaa.gov
#buoy46022.fetchFromWeb()
#buoy46022.saveAsTxt(savePath = "./Data")
#buoy46022.saveAsH5('NDBC46022.h5')
# Load data from .txt file if avilable
#buoy46022.loadFromTxt(r'C:\full\filepath\to\WDRT\examples\data\NDBC46022')
# Load data from .h5 file if available
buoy46022.loadFromH5(r'data\NDBC46022.h5')
# Declare required parameters
Time_SS = 1. # Sea state duration (hrs)
Time_R = 100 # Return periods (yrs) of interest
# Create PCA EA object for the buoy
pca46022 = ESSC.PCA(buoy46022)
# Calculate contour using PCA method
pca_Hs_Return, pca_T_Return = pca46022.getContours(Time_SS, Time_R)
# Show a plot of the data
pca46022.plotData()
# Sample Generation Example
num_contour_points = 20 # Number of points to be sampled for each
# contour interval.
contour_returns = np.array([0.001, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100])
# Probabilities defining sampling contour bounds.
random_seed = 2 # Random seed for sample generation
# Get samples for a full sea state long term analysis
Hs_sampleFSS, T_sampleFSS, Weight_sampleFSS = pca46022.getSamples(num_contour_points,
contour_returns, random_seed)
# Get samples for a contour approach long term analysis
T_sampleCA = np.arange(12, 26, 2)
Hs_sampleCA = pca46022.getContourPoints(T_sampleCA)
# Save data in h5 file
#pca46022.saveContour(r'C:\full\filepath\to\WDRT\examples\NDBC%s' % (pca46022.buoy.buoyNum))
#pca46022.saveContour(r'testNDBC%s' % (pca46022.buoy.buoyNum))
pca46022.saveContour(r'data\NDBC%s' % (pca46022.buoy.buoyNum))
import ipdb; ipdb.set_trace()
# Create EA objects for remaining contour methods
Gauss46022 = ESSC.GaussianCopula(buoy46022)
Gumbel46022 = ESSC.GumbelCopula(buoy46022)
Clayton46022 = ESSC.ClaytonCopula(buoy46022)
rosen46022 = ESSC.Rosenblatt(buoy46022)
NonParaGauss46022 = ESSC.NonParaGaussianCopula(buoy46022)
NonParaClay46022 = ESSC.NonParaClaytonCopula(buoy46022)
NonParaGum46022 = ESSC.NonParaGumbelCopula(buoy46022)
BivariateKDE46022 = ESSC.BivariateKDE(buoy46022, bw = [0.23, 0.23], logTransform = False)
BivariateLogKDE46022 = ESSC.BivariateKDE(buoy46022, bw = [0.02, 0.11], logTransform = True)
# Calculate contours for all remaining contour methods
Gauss_Hs_Return, Gauss_T_Return = Gauss46022.getContours(Time_SS, Time_R)
Gumbel_Hs_Return, Gumbel_T_Return = Gumbel46022.getContours(Time_SS, Time_R)
Clayton_Hs_Return, Clayton_T_Return = Clayton46022.getContours(Time_SS, Time_R)
rosen_Hs_Return, rosen_T_Return = rosen46022.getContours(Time_SS, Time_R)
NonParaGau_Hs_Return, NonParaGau_T_Return = NonParaGauss46022.getContours(Time_SS, Time_R)
NonParaClay_Hs_Return, NonParaClay_T_Return = NonParaClay46022.getContours(Time_SS, Time_R)
NonParaGum_Hs_Return, NonParaGum_T_Return = NonParaGum46022.getContours(Time_SS, Time_R)
KDE_Hs_Return, KDE_T_Return = BivariateKDE46022.getContours(Time_SS, Time_R)
logKDE_Hs_Return, logKDE_T_Return = BivariateLogKDE46022.getContours(Time_SS, Time_R)
# Plot all contour results for comparison
f = plt.figure()
f.canvas.set_window_title('NDBC%s, %i-year contours' % (buoy46022.buoyNum, Time_R))
plt.plot(buoy46022.T, buoy46022.Hs, 'bo', alpha=0.1, label='Data')
plt.plot(pca_T_Return, pca_Hs_Return, '-', label='PCA')
plt.plot(Gauss_T_Return, Gauss_Hs_Return, '-', label='Gaussian')
plt.plot(Gumbel_T_Return, Gumbel_Hs_Return, '-', label='Gumbel')
plt.plot(Clayton_T_Return, Clayton_Hs_Return, '-', label='Clayton')
plt.plot(rosen_T_Return, rosen_Hs_Return, '-', label='Rosenblatt')
plt.plot(NonParaGau_T_Return, NonParaGau_Hs_Return, 'g--', label='Non-Parametric Gaussian')
plt.plot(NonParaGum_T_Return, NonParaGum_Hs_Return, 'r--', label='Non-Parametric Gumbel')
plt.plot(NonParaClay_T_Return, NonParaClay_Hs_Return, 'c--', label='Non-Parametric Clayton')
plt.plot(KDE_T_Return, KDE_Hs_Return, 'm--', label = 'Bivariate KDE')
plt.plot(logKDE_T_Return, logKDE_Hs_Return, 'b--', label = 'Bivariate KDE (log)')
plt.xlabel('Energy period, $T_e$ [s]')
plt.ylabel('Sig. wave height, $H_s$ [m]')
plt.grid(True)
plt.legend(loc='center right', bbox_to_anchor=(1.4,0.5),fontsize=10, fancybox=True)
plt.show()
# Modify contour by steepness curve if they intersect
# Declare required parameters
depth = 391.4 # Depth at measurement point (m)
SteepMax = 0.07 # Optional: enter estimate of breaking steepness
T_vals = np.arange(0.1, np.amax(buoy46022.T), 0.1)
#Note, if depth is not inputted manually, it will automatically be retrieved from NDBC's website
SteepH = pca46022.steepness(SteepMax, T_vals,depth = depth)
SteepH_Return = pca46022.steepness(SteepMax, pca46022.T_ReturnContours, depth = depth)
Steep_correction = np.where(SteepH_Return < pca46022.Hs_ReturnContours)
Hs_Return_Steep = copy.deepcopy(pca46022.Hs_ReturnContours)
Hs_Return_Steep[Steep_correction] = SteepH_Return[Steep_correction]
pca46022.plotSampleData()
# Take a subset of 10 years of data and calculate a 20-year contour using the subset
Time_R = 20
subsetBuoy = buoy46022.createSubsetBuoy(10)
subsetPCA = ESSC.PCA(subsetBuoy)
Subset_Hs_Return, Subset_T_Return = subsetPCA.getContours(Time_SS, Time_R)
# Plot contour and subsetted data
f = plt.figure()
f.canvas.set_window_title('NDBC%s, %i-year contours' % (subsetBuoy.buoyNum, Time_R))
plt.plot(subsetBuoy.T, subsetBuoy.Hs, 'bo', alpha=0.1, label='Data')
plt.plot(Subset_T_Return, Subset_Hs_Return, '-', label = 'PCA')
plt.xlabel('Energy period, $T_e$ [s]')
plt.ylabel('Sig. wave height, $H_s$ [m]')
plt.grid(True)
plt.legend(loc='center right', bbox_to_anchor=(1.4,0.5),fontsize=10, fancybox=True)
plt.show()
# Determine which buoy observations are outside of the contour
outsideT, outsideHs = subsetPCA.outsidePoints()
# Determine the area of the contour
subsetPCAArea = subsetPCA.contourIntegrator()
# Calculate bootstrap confidence intervals, commented out due to long run time
# Note that stable bootstrap confidence intervals require large sample sizes
# pca46022.bootStrap(boot_size=10)
# Gauss46022.bootStrap(boot_size=10)
# Gumbel46022.bootStrap(boot_size=10)
# cc46022.bootStrap(boot_size=10)
# rosen46022.bootStrap(boot_size=10)
# NonParaGauss46022.bootStrap(boot_size=10)
# NonParaGauss46022.bootStrap(boot_size=10)
# NonParaGauss46022.bootStrap(boot_size=10)
| [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"WDRT.ESSC.ClaytonCopula",
"numpy.array",
"WDRT.ESSC.BivariateKDE",
"copy.deepcopy",
"WDRT.ESSC.NonParaGumbelCopula",
"numpy.arange",
"numpy.where",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"WDRT.ESSC.Buoy",
"WDRT.ESSC.Gumbel... | [((155, 181), 'WDRT.ESSC.Buoy', 'ESSC.Buoy', (['"""46022"""', '"""NDBC"""'], {}), "('46022', 'NDBC')\n", (164, 181), True, 'import WDRT.ESSC as ESSC\n'), ((686, 705), 'WDRT.ESSC.PCA', 'ESSC.PCA', (['buoy46022'], {}), '(buoy46022)\n', (694, 705), True, 'import WDRT.ESSC as ESSC\n'), ((993, 1051), 'numpy.array', 'np.array', (['[0.001, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100]'], {}), '([0.001, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100])\n', (1001, 1051), True, 'import numpy as np\n'), ((1448, 1468), 'numpy.arange', 'np.arange', (['(12)', '(26)', '(2)'], {}), '(12, 26, 2)\n', (1457, 1468), True, 'import numpy as np\n'), ((1777, 1793), 'ipdb.set_trace', 'ipdb.set_trace', ([], {}), '()\n', (1791, 1793), False, 'import ipdb\n'), ((1857, 1887), 'WDRT.ESSC.GaussianCopula', 'ESSC.GaussianCopula', (['buoy46022'], {}), '(buoy46022)\n', (1876, 1887), True, 'import WDRT.ESSC as ESSC\n'), ((1902, 1930), 'WDRT.ESSC.GumbelCopula', 'ESSC.GumbelCopula', (['buoy46022'], {}), '(buoy46022)\n', (1919, 1930), True, 'import WDRT.ESSC as ESSC\n'), ((1946, 1975), 'WDRT.ESSC.ClaytonCopula', 'ESSC.ClaytonCopula', (['buoy46022'], {}), '(buoy46022)\n', (1964, 1975), True, 'import WDRT.ESSC as ESSC\n'), ((1989, 2015), 'WDRT.ESSC.Rosenblatt', 'ESSC.Rosenblatt', (['buoy46022'], {}), '(buoy46022)\n', (2004, 2015), True, 'import WDRT.ESSC as ESSC\n'), ((2036, 2073), 'WDRT.ESSC.NonParaGaussianCopula', 'ESSC.NonParaGaussianCopula', (['buoy46022'], {}), '(buoy46022)\n', (2062, 2073), True, 'import WDRT.ESSC as ESSC\n'), ((2093, 2129), 'WDRT.ESSC.NonParaClaytonCopula', 'ESSC.NonParaClaytonCopula', (['buoy46022'], {}), '(buoy46022)\n', (2118, 2129), True, 'import WDRT.ESSC as ESSC\n'), ((2148, 2183), 'WDRT.ESSC.NonParaGumbelCopula', 'ESSC.NonParaGumbelCopula', (['buoy46022'], {}), '(buoy46022)\n', (2172, 2183), True, 'import WDRT.ESSC as ESSC\n'), ((2204, 2269), 'WDRT.ESSC.BivariateKDE', 'ESSC.BivariateKDE', (['buoy46022'], {'bw': '[0.23, 0.23]', 'logTransform': '(False)'}), '(buoy46022, bw=[0.23, 0.23], logTransform=False)\n', (2221, 2269), True, 'import WDRT.ESSC as ESSC\n'), ((2297, 2361), 'WDRT.ESSC.BivariateKDE', 'ESSC.BivariateKDE', (['buoy46022'], {'bw': '[0.02, 0.11]', 'logTransform': '(True)'}), '(buoy46022, bw=[0.02, 0.11], logTransform=True)\n', (2314, 2361), True, 'import WDRT.ESSC as ESSC\n'), ((3209, 3221), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3219, 3221), True, 'import matplotlib.pyplot as plt\n'), ((3306, 3372), 'matplotlib.pyplot.plot', 'plt.plot', (['buoy46022.T', 'buoy46022.Hs', '"""bo"""'], {'alpha': '(0.1)', 'label': '"""Data"""'}), "(buoy46022.T, buoy46022.Hs, 'bo', alpha=0.1, label='Data')\n", (3314, 3372), True, 'import matplotlib.pyplot as plt\n'), ((3373, 3428), 'matplotlib.pyplot.plot', 'plt.plot', (['pca_T_Return', 'pca_Hs_Return', '"""-"""'], {'label': '"""PCA"""'}), "(pca_T_Return, pca_Hs_Return, '-', label='PCA')\n", (3381, 3428), True, 'import matplotlib.pyplot as plt\n'), ((3429, 3493), 'matplotlib.pyplot.plot', 'plt.plot', (['Gauss_T_Return', 'Gauss_Hs_Return', '"""-"""'], {'label': '"""Gaussian"""'}), "(Gauss_T_Return, Gauss_Hs_Return, '-', label='Gaussian')\n", (3437, 3493), True, 'import matplotlib.pyplot as plt\n'), ((3494, 3558), 'matplotlib.pyplot.plot', 'plt.plot', (['Gumbel_T_Return', 'Gumbel_Hs_Return', '"""-"""'], {'label': '"""Gumbel"""'}), "(Gumbel_T_Return, Gumbel_Hs_Return, '-', label='Gumbel')\n", (3502, 3558), True, 'import matplotlib.pyplot as plt\n'), ((3559, 3626), 'matplotlib.pyplot.plot', 'plt.plot', (['Clayton_T_Return', 'Clayton_Hs_Return', '"""-"""'], {'label': '"""Clayton"""'}), "(Clayton_T_Return, Clayton_Hs_Return, '-', label='Clayton')\n", (3567, 3626), True, 'import matplotlib.pyplot as plt\n'), ((3627, 3693), 'matplotlib.pyplot.plot', 'plt.plot', (['rosen_T_Return', 'rosen_Hs_Return', '"""-"""'], {'label': '"""Rosenblatt"""'}), "(rosen_T_Return, rosen_Hs_Return, '-', label='Rosenblatt')\n", (3635, 3693), True, 'import matplotlib.pyplot as plt\n'), ((3694, 3790), 'matplotlib.pyplot.plot', 'plt.plot', (['NonParaGau_T_Return', 'NonParaGau_Hs_Return', '"""g--"""'], {'label': '"""Non-Parametric Gaussian"""'}), "(NonParaGau_T_Return, NonParaGau_Hs_Return, 'g--', label=\n 'Non-Parametric Gaussian')\n", (3702, 3790), True, 'import matplotlib.pyplot as plt\n'), ((3786, 3880), 'matplotlib.pyplot.plot', 'plt.plot', (['NonParaGum_T_Return', 'NonParaGum_Hs_Return', '"""r--"""'], {'label': '"""Non-Parametric Gumbel"""'}), "(NonParaGum_T_Return, NonParaGum_Hs_Return, 'r--', label=\n 'Non-Parametric Gumbel')\n", (3794, 3880), True, 'import matplotlib.pyplot as plt\n'), ((3876, 3973), 'matplotlib.pyplot.plot', 'plt.plot', (['NonParaClay_T_Return', 'NonParaClay_Hs_Return', '"""c--"""'], {'label': '"""Non-Parametric Clayton"""'}), "(NonParaClay_T_Return, NonParaClay_Hs_Return, 'c--', label=\n 'Non-Parametric Clayton')\n", (3884, 3973), True, 'import matplotlib.pyplot as plt\n'), ((3969, 4036), 'matplotlib.pyplot.plot', 'plt.plot', (['KDE_T_Return', 'KDE_Hs_Return', '"""m--"""'], {'label': '"""Bivariate KDE"""'}), "(KDE_T_Return, KDE_Hs_Return, 'm--', label='Bivariate KDE')\n", (3977, 4036), True, 'import matplotlib.pyplot as plt\n'), ((4039, 4118), 'matplotlib.pyplot.plot', 'plt.plot', (['logKDE_T_Return', 'logKDE_Hs_Return', '"""b--"""'], {'label': '"""Bivariate KDE (log)"""'}), "(logKDE_T_Return, logKDE_Hs_Return, 'b--', label='Bivariate KDE (log)')\n", (4047, 4118), True, 'import matplotlib.pyplot as plt\n'), ((4121, 4159), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy period, $T_e$ [s]"""'], {}), "('Energy period, $T_e$ [s]')\n", (4131, 4159), True, 'import matplotlib.pyplot as plt\n'), ((4160, 4201), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sig. wave height, $H_s$ [m]"""'], {}), "('Sig. wave height, $H_s$ [m]')\n", (4170, 4201), True, 'import matplotlib.pyplot as plt\n'), ((4202, 4216), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (4210, 4216), True, 'import matplotlib.pyplot as plt\n'), ((4217, 4306), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""center right"""', 'bbox_to_anchor': '(1.4, 0.5)', 'fontsize': '(10)', 'fancybox': '(True)'}), "(loc='center right', bbox_to_anchor=(1.4, 0.5), fontsize=10,\n fancybox=True)\n", (4227, 4306), True, 'import matplotlib.pyplot as plt\n'), ((4301, 4311), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4309, 4311), True, 'import matplotlib.pyplot as plt\n'), ((4828, 4880), 'numpy.where', 'np.where', (['(SteepH_Return < pca46022.Hs_ReturnContours)'], {}), '(SteepH_Return < pca46022.Hs_ReturnContours)\n', (4836, 4880), True, 'import numpy as np\n'), ((4899, 4940), 'copy.deepcopy', 'copy.deepcopy', (['pca46022.Hs_ReturnContours'], {}), '(pca46022.Hs_ReturnContours)\n', (4912, 4940), False, 'import copy\n'), ((5190, 5210), 'WDRT.ESSC.PCA', 'ESSC.PCA', (['subsetBuoy'], {}), '(subsetBuoy)\n', (5198, 5210), True, 'import WDRT.ESSC as ESSC\n'), ((5325, 5337), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5335, 5337), True, 'import matplotlib.pyplot as plt\n'), ((5423, 5491), 'matplotlib.pyplot.plot', 'plt.plot', (['subsetBuoy.T', 'subsetBuoy.Hs', '"""bo"""'], {'alpha': '(0.1)', 'label': '"""Data"""'}), "(subsetBuoy.T, subsetBuoy.Hs, 'bo', alpha=0.1, label='Data')\n", (5431, 5491), True, 'import matplotlib.pyplot as plt\n'), ((5492, 5553), 'matplotlib.pyplot.plot', 'plt.plot', (['Subset_T_Return', 'Subset_Hs_Return', '"""-"""'], {'label': '"""PCA"""'}), "(Subset_T_Return, Subset_Hs_Return, '-', label='PCA')\n", (5500, 5553), True, 'import matplotlib.pyplot as plt\n'), ((5556, 5594), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy period, $T_e$ [s]"""'], {}), "('Energy period, $T_e$ [s]')\n", (5566, 5594), True, 'import matplotlib.pyplot as plt\n'), ((5595, 5636), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sig. wave height, $H_s$ [m]"""'], {}), "('Sig. wave height, $H_s$ [m]')\n", (5605, 5636), True, 'import matplotlib.pyplot as plt\n'), ((5637, 5651), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (5645, 5651), True, 'import matplotlib.pyplot as plt\n'), ((5652, 5741), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""center right"""', 'bbox_to_anchor': '(1.4, 0.5)', 'fontsize': '(10)', 'fancybox': '(True)'}), "(loc='center right', bbox_to_anchor=(1.4, 0.5), fontsize=10,\n fancybox=True)\n", (5662, 5741), True, 'import matplotlib.pyplot as plt\n'), ((5736, 5746), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5744, 5746), True, 'import matplotlib.pyplot as plt\n'), ((4536, 4556), 'numpy.amax', 'np.amax', (['buoy46022.T'], {}), '(buoy46022.T)\n', (4543, 4556), True, 'import numpy as np\n')] |
import pickle
import struct
from contextlib import ExitStack
from tempfile import NamedTemporaryFile
from .forking import fork, get_id
def skip(iterable, skip, shift):
for idx, item in enumerate(iterable):
if idx % skip != shift:
continue
yield item
_HEADER_FORMAT = '>Q'
_HEADER_SIZE = struct.calcsize(_HEADER_FORMAT)
_HEADER_PLACEHOLDER = b'\x00' * _HEADER_SIZE
def _run(func, iterable, number_processes, fp):
fp.write(_HEADER_PLACEHOLDER)
count = 0
for item in skip(iterable, number_processes, get_id()):
pickle.dump(func(item), fp)
count += 1
fp.seek(0)
fp.write(struct.pack(_HEADER_FORMAT, count))
def fork_map(func, iterable, workers: int = 10, tmp_dir=None):
with ExitStack() as stack:
result_files = {
fid: stack.enter_context(NamedTemporaryFile(
delete=False, dir=tmp_dir
)) for fid in range(workers)
}
fork(workers, lambda: _run(
func,
iterable,
workers,
result_files[get_id()].file,
))
for fp in result_files.values():
fp.delete = True
fp.seek(0)
sizes = {fid: struct.unpack(_HEADER_FORMAT, fp.read(_HEADER_SIZE))[0]
for fid, fp in result_files.items()}
while any(sizes.values()):
for fid in range(workers):
if not sizes[fid]:
continue
yield pickle.load(result_files[fid])
sizes[fid] -= 1
| [
"struct.calcsize",
"pickle.load",
"struct.pack",
"contextlib.ExitStack",
"tempfile.NamedTemporaryFile"
] | [((323, 354), 'struct.calcsize', 'struct.calcsize', (['_HEADER_FORMAT'], {}), '(_HEADER_FORMAT)\n', (338, 354), False, 'import struct\n'), ((643, 677), 'struct.pack', 'struct.pack', (['_HEADER_FORMAT', 'count'], {}), '(_HEADER_FORMAT, count)\n', (654, 677), False, 'import struct\n'), ((753, 764), 'contextlib.ExitStack', 'ExitStack', ([], {}), '()\n', (762, 764), False, 'from contextlib import ExitStack\n'), ((837, 882), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)', 'dir': 'tmp_dir'}), '(delete=False, dir=tmp_dir)\n', (855, 882), False, 'from tempfile import NamedTemporaryFile\n'), ((1490, 1520), 'pickle.load', 'pickle.load', (['result_files[fid]'], {}), '(result_files[fid])\n', (1501, 1520), False, 'import pickle\n')] |
#!/usr/bin/env python
"""
move-virtualenv
~~~~~~~~~~~~~~~
A helper script that moves virtualenvs to a new location.
It only supports POSIX based virtualenvs and at the moment.
:copyright: (c) 2012 by Fireteam Ltd.
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function
import argparse
import collections
import marshal
import os.path
import re
import shutil
import sys
from types import CodeType
ACTIVATION_SCRIPTS = [
'activate',
'activate.csh',
'activate.fish',
'activate.xsh',
]
_pybin_match = re.compile(r'^python\d+\.\d+$')
_pypy_match = re.compile(r'^\d+.\d+$')
_activation_path_re = re.compile(
r'^(?:set -gx |setenv |)VIRTUAL_ENV[ =][\'"](.*?)[\'"]\s*$',
)
VERBOSE = False
MAGIC_LENGTH = 4 + 4 # magic length + 4 byte timestamp
# In python3.3, a 4 byte "size" hint was added to pyc files
if sys.version_info >= (3, 3): # pragma: no cover (PY33+)
MAGIC_LENGTH += 4
# PEP 552 (implemented in python 3.7) extends this by another word
if sys.version_info >= (3, 7): # pragma: no cover (PY37+)
MAGIC_LENGTH += 4
def debug(msg):
if VERBOSE:
print(msg)
def update_activation_script(script_filename, new_path):
"""Updates the paths for the activate shell scripts."""
with open(script_filename) as f:
lines = list(f)
def _handle_sub(match):
text = match.group()
start, end = match.span()
g_start, g_end = match.span(1)
return text[:(g_start - start)] + new_path + text[(g_end - end):]
changed = False
for idx, line in enumerate(lines):
new_line = _activation_path_re.sub(_handle_sub, line)
if line != new_line:
lines[idx] = new_line
changed = True
if changed:
debug('A %s' % script_filename)
with open(script_filename, 'w') as f:
f.writelines(lines)
def path_is_within(path, within):
relpath = os.path.relpath(path, within)
return not relpath.startswith(b'.')
def update_script(script_filename, old_path, new_path):
"""Updates shebang lines for actual scripts."""
filesystem_encoding = sys.getfilesystemencoding()
old_path = old_path.encode(filesystem_encoding)
new_path = new_path.encode(filesystem_encoding)
with open(script_filename, 'rb') as f:
if f.read(2) != b'#!':
return
f.seek(0)
lines = list(f)
args = lines[0][2:].strip().split()
if not args:
return
if path_is_within(args[0], old_path):
new_bin = os.path.join(new_path, os.path.relpath(args[0], old_path))
else:
return
args[0] = new_bin
lines[0] = b'#!' + b' '.join(args) + b'\n'
debug('S %s' % script_filename)
with open(script_filename, 'wb') as f:
f.writelines(lines)
def update_scripts(bin_dir, orig_path, new_path, activation=False):
"""Updates all scripts in the bin folder."""
for fname in os.listdir(bin_dir):
path = os.path.join(bin_dir, fname)
if fname in ACTIVATION_SCRIPTS and activation:
update_activation_script(path, new_path)
elif os.path.isfile(path):
update_script(path, orig_path, new_path)
def update_pyc(filename, new_path):
"""Updates the filenames stored in pyc files."""
with open(filename, 'rb') as f:
magic = f.read(MAGIC_LENGTH)
try:
code = marshal.load(f)
except Exception:
print('Error in %s' % filename)
raise
def _make_code(code, filename, consts):
if sys.version_info[0] == 2: # pragma: no cover (PY2)
return CodeType(
code.co_argcount, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, tuple(consts), code.co_names,
code.co_varnames, filename, code.co_name, code.co_firstlineno,
code.co_lnotab, code.co_freevars, code.co_cellvars,
)
elif sys.version_info < (3, 8): # pragma: no cover (<py38)
return CodeType(
code.co_argcount, code.co_kwonlyargcount, code.co_nlocals,
code.co_stacksize, code.co_flags, code.co_code, tuple(consts),
code.co_names, code.co_varnames, filename, code.co_name,
code.co_firstlineno, code.co_lnotab, code.co_freevars,
code.co_cellvars,
)
else: # pragma: no cover (py38+)
return code.replace(co_consts=tuple(consts), co_filename=filename)
def _process(code):
consts = []
for const in code.co_consts:
if type(const) is CodeType:
const = _process(const)
consts.append(const)
if new_path != code.co_filename or consts != list(code.co_consts):
code = _make_code(code, new_path, consts)
return code
new_code = _process(code)
if new_code is not code:
debug('B %s' % filename)
with open(filename, 'wb') as f:
f.write(magic)
marshal.dump(new_code, f)
def update_pycs(lib_dir, new_path):
"""Walks over all pyc files and updates their paths."""
def get_new_path(filename):
filename = os.path.normpath(filename)
return os.path.join(new_path, filename[len(lib_dir) + 1:])
for dirname, dirnames, filenames in os.walk(lib_dir):
for filename in filenames:
if (
filename.endswith(('.pyc', '.pyo')) and
# python 2, virtualenv 20.x symlinks os.pyc
not os.path.islink(os.path.join(dirname, filename))
):
filename = os.path.join(dirname, filename)
local_path = get_new_path(filename)
update_pyc(filename, local_path)
def _update_pth_file(pth_filename, orig_path, is_pypy):
with open(pth_filename) as f:
lines = f.readlines()
changed = False
for i, line in enumerate(lines):
val = line.strip()
if val.startswith('import ') or not os.path.isabs(val):
continue
changed = True
relto_original = os.path.relpath(val, orig_path)
# If we are moving a pypy venv the site-packages directory
# is in a different location than if we are moving a cpython venv
relto_pth = os.path.join(
'..' if is_pypy # venv/site-packages
else '../../..', # venv/lib/pythonX.X/site-packages
relto_original
)
lines[i] = '{}\n'.format(relto_pth)
if changed:
with open(pth_filename, 'w') as f:
f.write(''.join(lines))
debug('P {}'.format(pth_filename))
def update_pth_files(site_packages, orig_path, is_pypy):
"""Converts /full/paths in pth files to relative relocatable paths."""
for filename in os.listdir(site_packages):
filename = os.path.join(site_packages, filename)
if filename.endswith('.pth') and os.path.isfile(filename):
_update_pth_file(filename, orig_path, is_pypy)
def remove_local(base):
"""On some systems virtualenv seems to have something like a local
directory with symlinks. This directory is safe to remove in modern
versions of virtualenv. Delete it.
"""
local_dir = os.path.join(base, 'local')
if os.path.exists(local_dir): # pragma: no cover (not all systems)
debug('D {}'.format(local_dir))
shutil.rmtree(local_dir)
def update_paths(venv, new_path):
"""Updates all paths in a virtualenv to a new one."""
update_scripts(venv.bin_dir, venv.orig_path, new_path)
for lib_dir in venv.lib_dirs:
update_pycs(lib_dir, new_path)
update_pth_files(venv.site_packages, venv.orig_path, venv.is_pypy)
remove_local(venv.path)
update_scripts(venv.bin_dir, venv.orig_path, new_path, activation=True)
def get_orig_path(venv_path):
"""This helps us know whether someone has tried to relocate the
virtualenv
"""
activate_path = os.path.join(venv_path, 'bin/activate')
with open(activate_path) as activate:
for line in activate:
# virtualenv 20 changes the position
for possible in ('VIRTUAL_ENV="', "VIRTUAL_ENV='"):
if line.startswith(possible):
return line.split(possible[-1], 2)[1]
else:
raise AssertionError(
'Could not find VIRTUAL_ENV= in activation script: %s' %
activate_path
)
class NotAVirtualenvError(OSError):
def __init__(self, *args):
self.args = args
def __str__(self):
return '{} is not a virtualenv: not a {}: {}'.format(*self.args)
Virtualenv = collections.namedtuple(
'Virtualenv', (
'path',
'bin_dir',
'lib_dirs',
'site_packages',
'orig_path',
'is_pypy'
),
)
def _get_original_state(path):
is_pypy = os.path.isdir(os.path.join(path, 'lib_pypy'))
bin_dir = os.path.join(path, 'bin')
base_lib_dir = os.path.join(path, 'lib-python' if is_pypy else 'lib')
activate_file = os.path.join(bin_dir, 'activate')
for dir_path in (bin_dir, base_lib_dir):
if not os.path.isdir(dir_path):
raise NotAVirtualenvError(path, 'directory', dir_path)
if not os.path.isfile(activate_file):
raise NotAVirtualenvError(path, 'file', activate_file)
matcher = _pypy_match if is_pypy else _pybin_match
lib_dirs = [
os.path.join(base_lib_dir, potential_lib_dir)
for potential_lib_dir in os.listdir(base_lib_dir)
if matcher.match(potential_lib_dir)
]
if len(lib_dirs) != 1:
raise NotAVirtualenvError(
path,
'directory',
os.path.join(base_lib_dir, '#.#' if is_pypy else 'python#.#'),
)
lib_dir, = lib_dirs
site_packages = os.path.join(path if is_pypy else lib_dir, 'site-packages')
if not os.path.isdir(site_packages):
raise NotAVirtualenvError(path, 'directory', site_packages)
lib_dirs = [lib_dir]
if is_pypy: # pragma: no cover (pypy only)
lib_dirs.append(os.path.join(path, 'lib_pypy'))
return Virtualenv(
path=path,
bin_dir=bin_dir,
lib_dirs=lib_dirs,
site_packages=site_packages,
orig_path=get_orig_path(path),
is_pypy=is_pypy
)
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'--update-path',
required=True,
help=(
'Update the path for all required executables and helper files '
'that are supported to the new python prefix. You can also set '
'this to "auto" for autodetection.'
),
)
parser.add_argument(
'--verbose', action='store_true', help='show a listing of changes',
)
parser.add_argument('path', default='.', nargs='?')
args = parser.parse_args(argv)
global VERBOSE
VERBOSE = args.verbose
if args.update_path == 'auto':
update_path = os.path.abspath(args.path)
else:
update_path = args.update_path
if not os.path.isabs(update_path):
print('--update-path must be absolute: {}'.format(update_path))
return 1
try:
venv = _get_original_state(path=args.path)
except NotAVirtualenvError as e:
print(e)
return 1
if venv.orig_path == update_path:
print('Already up-to-date: %s (%s)' % (venv.path, update_path))
return 0
update_paths(venv, update_path)
print('Updated: %s (%s -> %s)' % (venv.path, venv.orig_path, update_path))
return 0
if __name__ == '__main__':
exit(main())
| [
"collections.namedtuple",
"sys.getfilesystemencoding",
"argparse.ArgumentParser",
"re.compile",
"marshal.dump",
"marshal.load",
"shutil.rmtree"
] | [((575, 608), 're.compile', 're.compile', (['"""^python\\\\d+\\\\.\\\\d+$"""'], {}), "('^python\\\\d+\\\\.\\\\d+$')\n", (585, 608), False, 'import re\n'), ((621, 646), 're.compile', 're.compile', (['"""^\\\\d+.\\\\d+$"""'], {}), "('^\\\\d+.\\\\d+$')\n", (631, 646), False, 'import re\n'), ((668, 743), 're.compile', 're.compile', (['"""^(?:set -gx |setenv |)VIRTUAL_ENV[ =][\\\\\'"](.*?)[\\\\\'"]\\\\s*$"""'], {}), '(\'^(?:set -gx |setenv |)VIRTUAL_ENV[ =][\\\\\\\'"](.*?)[\\\\\\\'"]\\\\s*$\')\n', (678, 743), False, 'import re\n'), ((8682, 8796), 'collections.namedtuple', 'collections.namedtuple', (['"""Virtualenv"""', "('path', 'bin_dir', 'lib_dirs', 'site_packages', 'orig_path', 'is_pypy')"], {}), "('Virtualenv', ('path', 'bin_dir', 'lib_dirs',\n 'site_packages', 'orig_path', 'is_pypy'))\n", (8704, 8796), False, 'import collections\n'), ((2149, 2176), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (2174, 2176), False, 'import sys\n'), ((10378, 10403), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10401, 10403), False, 'import argparse\n'), ((7412, 7436), 'shutil.rmtree', 'shutil.rmtree', (['local_dir'], {}), '(local_dir)\n', (7425, 7436), False, 'import shutil\n'), ((3404, 3419), 'marshal.load', 'marshal.load', (['f'], {}), '(f)\n', (3416, 3419), False, 'import marshal\n'), ((5032, 5057), 'marshal.dump', 'marshal.dump', (['new_code', 'f'], {}), '(new_code, f)\n', (5044, 5057), False, 'import marshal\n')] |
from __future__ import print_function
#! /usr/bin/env cmsRun
import sys
import FWCore.ParameterSet.Config as cms
from SimTracker.TrackerMaterialAnalysis.trackingMaterialVarParsing import options
process = cms.Process("MaterialAnalyser")
if options.geometry == 'run2':
process.load('Configuration.Geometry.GeometryExtended2016Reco_cff')
# Add our custom detector grouping to DDD
process.XMLIdealGeometryESSource.geomXMLFiles.extend(['SimTracker/TrackerMaterialAnalysis/data/trackingMaterialGroups.xml'])
elif options.geometry == 'Phase1':
process.load('Configuration.Geometry.GeometryExtended2017Reco_cff')
# Add our custom detector grouping to DDD
process.XMLIdealGeometryESSource.geomXMLFiles.extend(['SimTracker/TrackerMaterialAnalysis/data/trackingMaterialGroups_ForPhaseI.xml'])
elif options.geometry == 'Phase2':
process.load('Configuration.Geometry.GeometryExtended2026D41Reco_cff')
# Add our custom detector grouping to DDD
process.XMLIdealGeometryESSource.geomXMLFiles.extend(['SimTracker/TrackerMaterialAnalysis/data/trackingMaterialGroups_ForPhaseII.xml'])
else:
print("Unknow geometry, quitting.")
sys.exit(1)
process.load("Configuration.StandardSequences.MagneticField_cff")
process.source = cms.Source("EmptySource")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.listGroups = cms.EDAnalyzer("ListGroups",
SaveSummaryPlot = cms.untracked.bool(True))
process.path = cms.Path(process.listGroups)
| [
"FWCore.ParameterSet.Config.Source",
"FWCore.ParameterSet.Config.untracked.int32",
"sys.exit",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.Path"
] | [((208, 239), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""MaterialAnalyser"""'], {}), "('MaterialAnalyser')\n", (219, 239), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1255, 1280), 'FWCore.ParameterSet.Config.Source', 'cms.Source', (['"""EmptySource"""'], {}), "('EmptySource')\n", (1265, 1280), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1504, 1532), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['process.listGroups'], {}), '(process.listGroups)\n', (1512, 1532), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1333, 1355), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1)'], {}), '(1)\n', (1352, 1355), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1463, 1487), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (1481, 1487), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1157, 1168), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1165, 1168), False, 'import sys\n')] |
"""
Copyright 2015, Institute for Systems Biology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
import views
urlpatterns = patterns('',
url(r'^saveviz$', views.save_viz, name='saveviz'),
url(r'^deleteviz$', views.delete_viz, name='deleteviz'),
url(r'^share_viz/$', views.share_viz, name='share_viz'),
url(r'^share_viz/(?P<id>\d+)/$', views.share_viz, name='share_viz_id'),
url(r'^clone_viz/(?P<id>\d+)/$', views.clone_viz, name='clone_viz_id'),
url(r'^add_plot/', views.add_plot, name='add_plot'),
url(r'^delete_plot/', views.delete_plot, name='delete_plot'),
url(r'^genericplot/$', views.genericplot, name='genericplot'),
url(r'^genericplot/(?P<id>\d+)/$', views.genericplot, name='genericplot_id'),
url(r'^stackviz/$', views.stackviz, name='stackviz'),
url(r'^stackviz/(?P<id>\d+)/$', views.stackviz, name='stackviz_id'),
url(r'^circviz/$', views.circviz, name='circviz'),
url(r'^circviz/(?P<id>\d+)/$', views.circviz, name='circviz_id'),
url(r'^save_comment/$', views.save_comment, name='save_comment')
) | [
"django.conf.urls.url"
] | [((671, 719), 'django.conf.urls.url', 'url', (['"""^saveviz$"""', 'views.save_viz'], {'name': '"""saveviz"""'}), "('^saveviz$', views.save_viz, name='saveviz')\n", (674, 719), False, 'from django.conf.urls import patterns, url\n'), ((726, 780), 'django.conf.urls.url', 'url', (['"""^deleteviz$"""', 'views.delete_viz'], {'name': '"""deleteviz"""'}), "('^deleteviz$', views.delete_viz, name='deleteviz')\n", (729, 780), False, 'from django.conf.urls import patterns, url\n'), ((787, 841), 'django.conf.urls.url', 'url', (['"""^share_viz/$"""', 'views.share_viz'], {'name': '"""share_viz"""'}), "('^share_viz/$', views.share_viz, name='share_viz')\n", (790, 841), False, 'from django.conf.urls import patterns, url\n'), ((848, 918), 'django.conf.urls.url', 'url', (['"""^share_viz/(?P<id>\\\\d+)/$"""', 'views.share_viz'], {'name': '"""share_viz_id"""'}), "('^share_viz/(?P<id>\\\\d+)/$', views.share_viz, name='share_viz_id')\n", (851, 918), False, 'from django.conf.urls import patterns, url\n'), ((924, 994), 'django.conf.urls.url', 'url', (['"""^clone_viz/(?P<id>\\\\d+)/$"""', 'views.clone_viz'], {'name': '"""clone_viz_id"""'}), "('^clone_viz/(?P<id>\\\\d+)/$', views.clone_viz, name='clone_viz_id')\n", (927, 994), False, 'from django.conf.urls import patterns, url\n'), ((1000, 1050), 'django.conf.urls.url', 'url', (['"""^add_plot/"""', 'views.add_plot'], {'name': '"""add_plot"""'}), "('^add_plot/', views.add_plot, name='add_plot')\n", (1003, 1050), False, 'from django.conf.urls import patterns, url\n'), ((1057, 1116), 'django.conf.urls.url', 'url', (['"""^delete_plot/"""', 'views.delete_plot'], {'name': '"""delete_plot"""'}), "('^delete_plot/', views.delete_plot, name='delete_plot')\n", (1060, 1116), False, 'from django.conf.urls import patterns, url\n'), ((1123, 1183), 'django.conf.urls.url', 'url', (['"""^genericplot/$"""', 'views.genericplot'], {'name': '"""genericplot"""'}), "('^genericplot/$', views.genericplot, name='genericplot')\n", (1126, 1183), False, 'from django.conf.urls import patterns, url\n'), ((1190, 1266), 'django.conf.urls.url', 'url', (['"""^genericplot/(?P<id>\\\\d+)/$"""', 'views.genericplot'], {'name': '"""genericplot_id"""'}), "('^genericplot/(?P<id>\\\\d+)/$', views.genericplot, name='genericplot_id')\n", (1193, 1266), False, 'from django.conf.urls import patterns, url\n'), ((1272, 1323), 'django.conf.urls.url', 'url', (['"""^stackviz/$"""', 'views.stackviz'], {'name': '"""stackviz"""'}), "('^stackviz/$', views.stackviz, name='stackviz')\n", (1275, 1323), False, 'from django.conf.urls import patterns, url\n'), ((1330, 1397), 'django.conf.urls.url', 'url', (['"""^stackviz/(?P<id>\\\\d+)/$"""', 'views.stackviz'], {'name': '"""stackviz_id"""'}), "('^stackviz/(?P<id>\\\\d+)/$', views.stackviz, name='stackviz_id')\n", (1333, 1397), False, 'from django.conf.urls import patterns, url\n'), ((1403, 1451), 'django.conf.urls.url', 'url', (['"""^circviz/$"""', 'views.circviz'], {'name': '"""circviz"""'}), "('^circviz/$', views.circviz, name='circviz')\n", (1406, 1451), False, 'from django.conf.urls import patterns, url\n'), ((1458, 1522), 'django.conf.urls.url', 'url', (['"""^circviz/(?P<id>\\\\d+)/$"""', 'views.circviz'], {'name': '"""circviz_id"""'}), "('^circviz/(?P<id>\\\\d+)/$', views.circviz, name='circviz_id')\n", (1461, 1522), False, 'from django.conf.urls import patterns, url\n'), ((1528, 1591), 'django.conf.urls.url', 'url', (['"""^save_comment/$"""', 'views.save_comment'], {'name': '"""save_comment"""'}), "('^save_comment/$', views.save_comment, name='save_comment')\n", (1531, 1591), False, 'from django.conf.urls import patterns, url\n')] |
#!/usr/bin/env python3
import subprocess
import os
import sys
sys.path.append("../")
sys.path.append("../../system/lib/")
sys.path.append("../array/")
import json_parser
import pos
import pos_util
import cli
import api
import json
import time
import CREATE_ARRAY_BASIC
ARRAYNAME = CREATE_ARRAY_BASIC.ARRAYNAME
def execute():
CREATE_ARRAY_BASIC.execute()
api.detach_ssd(CREATE_ARRAY_BASIC.ANY_DATA)
time.sleep(5)
out = cli.mount_array(CREATE_ARRAY_BASIC.ARRAYNAME)
timeout = 80000 #80s
if api.wait_situation(ARRAYNAME, "REBUILDING", timeout) == True:
return "pass"
return "fail"
if __name__ == "__main__":
if len(sys.argv) >= 2:
pos.set_addr(sys.argv[1])
api.clear_result(__file__)
result = execute()
ret = api.set_result_manually(cli.array_info(ARRAYNAME), result, __file__)
pos.flush_and_kill_pos()
exit(ret) | [
"api.clear_result",
"pos.flush_and_kill_pos",
"api.wait_situation",
"time.sleep",
"pos.set_addr",
"api.detach_ssd",
"cli.array_info",
"sys.path.append",
"CREATE_ARRAY_BASIC.execute",
"cli.mount_array"
] | [((62, 84), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (77, 84), False, 'import sys\n'), ((85, 121), 'sys.path.append', 'sys.path.append', (['"""../../system/lib/"""'], {}), "('../../system/lib/')\n", (100, 121), False, 'import sys\n'), ((122, 150), 'sys.path.append', 'sys.path.append', (['"""../array/"""'], {}), "('../array/')\n", (137, 150), False, 'import sys\n'), ((331, 359), 'CREATE_ARRAY_BASIC.execute', 'CREATE_ARRAY_BASIC.execute', ([], {}), '()\n', (357, 359), False, 'import CREATE_ARRAY_BASIC\n'), ((364, 407), 'api.detach_ssd', 'api.detach_ssd', (['CREATE_ARRAY_BASIC.ANY_DATA'], {}), '(CREATE_ARRAY_BASIC.ANY_DATA)\n', (378, 407), False, 'import api\n'), ((412, 425), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (422, 425), False, 'import time\n'), ((436, 481), 'cli.mount_array', 'cli.mount_array', (['CREATE_ARRAY_BASIC.ARRAYNAME'], {}), '(CREATE_ARRAY_BASIC.ARRAYNAME)\n', (451, 481), False, 'import cli\n'), ((709, 735), 'api.clear_result', 'api.clear_result', (['__file__'], {}), '(__file__)\n', (725, 735), False, 'import api\n'), ((842, 866), 'pos.flush_and_kill_pos', 'pos.flush_and_kill_pos', ([], {}), '()\n', (864, 866), False, 'import pos\n'), ((514, 566), 'api.wait_situation', 'api.wait_situation', (['ARRAYNAME', '"""REBUILDING"""', 'timeout'], {}), "(ARRAYNAME, 'REBUILDING', timeout)\n", (532, 566), False, 'import api\n'), ((679, 704), 'pos.set_addr', 'pos.set_addr', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (691, 704), False, 'import pos\n'), ((793, 818), 'cli.array_info', 'cli.array_info', (['ARRAYNAME'], {}), '(ARRAYNAME)\n', (807, 818), False, 'import cli\n')] |
import numpy as np
import matplotlib.pyplot as plt
# colors corresponding to initial flight, stance, second flight
colors = ['k', 'b', 'g']
### The attributes of sol are:
## sol.t : series of time-points at which the solution was calculated
## sol.y : simulation results, size 6 x times
## sol.t_events : list of the times of 7 events:
# - fall during flight
# - touchdown
# - fall during stance
# - lift-off
# - reversal during stance
# - apex during flight
# - fall during flight
### If the event did not occur than the array is empty.
def com_visualisation(sol, leg_visibility=0.5, colors=colors, size=100, Ground=False):
'''
This function plots failure events in red.
'''
times = sol.t
result = sol.y
t_events = sol.t_events
x_com = result[0]
y_com = result[1]
# plt.figure()
### Initial position
plt.scatter(x_com[0], y_com[0], color = colors[0], s = size)
foot_x = result[4,0]
foot_y = result[5,0]
plt.plot([foot_x,x_com[0]],[foot_y,y_com[0]], color = colors[0],
alpha = leg_visibility)
### First flight phase
if len(t_events[1]) == 0: # no touch-down
## Time of failure
if len(t_events[0]) == 0: # no fall during initial flight
print('No touch-down but no fall during flight')
else:
failure = t_events[0][0]
fail_index = np.argmax(times > failure)
plt.plot(x_com[:fail_index],y_com[:fail_index], color = colors[0])
plt.scatter(x_com[fail_index -1],y_com[fail_index-1],
color = 'r', s = size)
else:
touchdown = t_events[1][0]
index = np.argmax(times > touchdown)
foot_x = result[4,index]
plt.plot(x_com[:index],y_com[:index], color = colors[0])
plt.scatter(x_com[index-1],y_com[index-1], color = colors[1], s = size)
plt.plot([foot_x,x_com[index-1]],[0,y_com[index-1]], color = colors[1],
alpha = leg_visibility)
### Stance phase
if len(t_events[3]) == 0: # no lift-off
## Time of failure
failure = False
if len(t_events[2]) == 0: # no fall during initial flight
if len(t_events[4]) == 0: # no reversal during initial flight
print('No lift-off but no failure during stance')
else:
failure = t_events[4][0] # time of reversal
else:
failure = t_events[2][0] # time of fall
if failure:
fail_index = np.argmax(times > failure)
plt.plot(x_com[index:fail_index],y_com[index:fail_index],
color = colors[1])
plt.scatter(x_com[fail_index -1],y_com[fail_index-1],
color = 'r', s = size)
else:
liftoff = t_events[3][0]
lift_index = np.argmax(times > liftoff)
plt.plot(x_com[index-1:lift_index],y_com[index-1:lift_index],
color = colors[1])
plt.scatter(x_com[lift_index-1],y_com[lift_index-1],
color = colors[2], s = size)
plt.plot([foot_x,x_com[lift_index-1]],[0,y_com[lift_index-1]],
color = colors[2], alpha = leg_visibility)
### Flight phase
if len(t_events[5]) == 0: # no apex
## Time of failure
if len(t_events[6]) == 0: # no fall
print('No apex but no fall during flight')
else:
failure = t_events[6][0]
fail_index = np.argmax(times > failure)
plt.plot(x_com[lift_index-1:fail_index],y_com[lift_index-1:fail_index], color = colors[2])
plt.scatter(x_com[fail_index -1],y_com[fail_index-1], color = 'r', s = size)
else:
apex = t_events[5][0]
if times[-1] > apex:
apex_index = np.argmax(times > apex)
else:
apex_index = len(times)
plt.plot(x_com[lift_index-1:apex_index],
y_com[lift_index-1:apex_index], color = colors[2])
plt.scatter(x_com[apex_index-1],y_com[apex_index-1],
color = colors[0], s = size)
plt.plot([result[4,apex_index-1],x_com[apex_index-1]],
[result[5,apex_index-1],y_com[apex_index-1]],
color = colors[0], alpha = leg_visibility)
if Ground:
ground = result[-1]
plt.plot(x_com, ground, color = 'k')
else:
plt.axhline(y=0, color = 'k')
plt.xlabel('Horizontal position')
plt.ylabel('Vertical position')
def full_visualisation(sol, colors = colors, foot = False):
'''
This function only plots if there was no failure in the trial
'''
times = sol.t
result = sol.y
t_events = sol.t_events
labels = ['touchdown','liftoff','apex']
# If the trial was not a failure:
if len(t_events[1]) > 0 and len(t_events[3]) > 0 and len(t_events[5]) > 0:
events = [t_events[1][0],t_events[3][0],t_events[5][0]]
indices = [0]
for e in range(3):
indices.append(np.argmax(times >= events[e]))
if foot:
## Foot trajectory
foot_x = result[4]
foot_y = result[5]
plt.figure()
for e in range(3):
plt.subplot(221)
plt.plot(times[indices[e]:indices[e+1]], foot_x[indices[e]:indices[e+1]], color = colors[e])
plt.subplot(223)
plt.plot(times[indices[e]:indices[e+1]], foot_y[indices[e]:indices[e+1]], color = colors[e])
plt.subplot(122)
plt.plot(foot_x[indices[e]:indices[e+1]], foot_y[indices[e]:indices[e+1]], color = colors[e])
plt.scatter(foot_x[indices[e]], foot_y[indices[e]], color = colors[e])
## Indicate the events
for i in [3,1]:
plt.subplot(2,2,i)
plt.axvline(x = events[e], color = colors[e], label = labels[e])
## Legends and labels
plt.subplot(221)
plt.xticks([])
plt.ylabel('Horizontal position')
plt.subplot(223)
plt.ylabel('Vertical position')
plt.xlabel('Time')
plt.subplot(122)
plt.xlabel('Horizontal position')
plt.ylabel('Vertical position')
plt.title('Foot trajectory')
## CoM position
plt.figure()
for e in range(3):
for i in range(2):
for j in range(2):
plt.subplot(2,3,1+i+3*j)
plt.plot(times[indices[e]:indices[e+1]+1],
result[i+2*j,indices[e]:indices[e+1]+1],
color = colors[e])
plt.subplot(133)
plt.plot(result[0,indices[e]:indices[e+1]+1],
result[1,indices[e]:indices[e+1]+1], color = colors[e])
## Indicate the events
for i in range(2):
for j in range(2):
plt.subplot(2,3,1+i+3*j)
plt.axvline(x = events[e], color = colors[e],
label = labels[e])
plt.subplot(133)
index = np.argmax(times >= events[e])
plt.scatter(result[0,index], result[1,index], color = colors[e])
## Legends and labels
plt.subplot(231)
plt.legend(loc = 2)
plt.xticks([])
plt.ylabel('Horizontal position')
plt.subplot(232)
plt.xticks([])
plt.ylabel('Vertical position')
plt.subplot(234)
plt.xlabel('Time')
plt.ylabel('Horizontal speed')
plt.subplot(235)
plt.xlabel('Time')
plt.ylabel('Vertical speed')
plt.subplot(133)
plt.xlabel('Horizontal position')
plt.ylabel('Vertical position')
plt.title('CoM trajectory')
else:
print('The trial was a failure') | [
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.argmax",
"matplotlib.pyplot.axhline",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.axvline",... | [((835, 891), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_com[0]', 'y_com[0]'], {'color': 'colors[0]', 's': 'size'}), '(x_com[0], y_com[0], color=colors[0], s=size)\n', (846, 891), True, 'import matplotlib.pyplot as plt\n'), ((941, 1033), 'matplotlib.pyplot.plot', 'plt.plot', (['[foot_x, x_com[0]]', '[foot_y, y_com[0]]'], {'color': 'colors[0]', 'alpha': 'leg_visibility'}), '([foot_x, x_com[0]], [foot_y, y_com[0]], color=colors[0], alpha=\n leg_visibility)\n', (949, 1033), True, 'import matplotlib.pyplot as plt\n'), ((3869, 3902), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Horizontal position"""'], {}), "('Horizontal position')\n", (3879, 3902), True, 'import matplotlib.pyplot as plt\n'), ((3904, 3935), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vertical position"""'], {}), "('Vertical position')\n", (3914, 3935), True, 'import matplotlib.pyplot as plt\n'), ((1519, 1547), 'numpy.argmax', 'np.argmax', (['(times > touchdown)'], {}), '(times > touchdown)\n', (1528, 1547), True, 'import numpy as np\n'), ((1580, 1635), 'matplotlib.pyplot.plot', 'plt.plot', (['x_com[:index]', 'y_com[:index]'], {'color': 'colors[0]'}), '(x_com[:index], y_com[:index], color=colors[0])\n', (1588, 1635), True, 'import matplotlib.pyplot as plt\n'), ((1639, 1711), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_com[index - 1]', 'y_com[index - 1]'], {'color': 'colors[1]', 's': 'size'}), '(x_com[index - 1], y_com[index - 1], color=colors[1], s=size)\n', (1650, 1711), True, 'import matplotlib.pyplot as plt\n'), ((1713, 1815), 'matplotlib.pyplot.plot', 'plt.plot', (['[foot_x, x_com[index - 1]]', '[0, y_com[index - 1]]'], {'color': 'colors[1]', 'alpha': 'leg_visibility'}), '([foot_x, x_com[index - 1]], [0, y_com[index - 1]], color=colors[1],\n alpha=leg_visibility)\n', (1721, 1815), True, 'import matplotlib.pyplot as plt\n'), ((3787, 3821), 'matplotlib.pyplot.plot', 'plt.plot', (['x_com', 'ground'], {'color': '"""k"""'}), "(x_com, ground, color='k')\n", (3795, 3821), True, 'import matplotlib.pyplot as plt\n'), ((3838, 3865), 'matplotlib.pyplot.axhline', 'plt.axhline', ([], {'y': '(0)', 'color': '"""k"""'}), "(y=0, color='k')\n", (3849, 3865), True, 'import matplotlib.pyplot as plt\n'), ((5441, 5453), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5451, 5453), True, 'import matplotlib.pyplot as plt\n'), ((6135, 6151), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(231)'], {}), '(231)\n', (6146, 6151), True, 'import matplotlib.pyplot as plt\n'), ((6154, 6171), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(2)'}), '(loc=2)\n', (6164, 6171), True, 'import matplotlib.pyplot as plt\n'), ((6176, 6190), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (6186, 6190), True, 'import matplotlib.pyplot as plt\n'), ((6193, 6226), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Horizontal position"""'], {}), "('Horizontal position')\n", (6203, 6226), True, 'import matplotlib.pyplot as plt\n'), ((6229, 6245), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(232)'], {}), '(232)\n', (6240, 6245), True, 'import matplotlib.pyplot as plt\n'), ((6248, 6262), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (6258, 6262), True, 'import matplotlib.pyplot as plt\n'), ((6265, 6296), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vertical position"""'], {}), "('Vertical position')\n", (6275, 6296), True, 'import matplotlib.pyplot as plt\n'), ((6299, 6315), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(234)'], {}), '(234)\n', (6310, 6315), True, 'import matplotlib.pyplot as plt\n'), ((6318, 6336), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (6328, 6336), True, 'import matplotlib.pyplot as plt\n'), ((6339, 6369), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Horizontal speed"""'], {}), "('Horizontal speed')\n", (6349, 6369), True, 'import matplotlib.pyplot as plt\n'), ((6372, 6388), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(235)'], {}), '(235)\n', (6383, 6388), True, 'import matplotlib.pyplot as plt\n'), ((6391, 6409), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (6401, 6409), True, 'import matplotlib.pyplot as plt\n'), ((6412, 6440), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vertical speed"""'], {}), "('Vertical speed')\n", (6422, 6440), True, 'import matplotlib.pyplot as plt\n'), ((6443, 6459), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(133)'], {}), '(133)\n', (6454, 6459), True, 'import matplotlib.pyplot as plt\n'), ((6462, 6495), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Horizontal position"""'], {}), "('Horizontal position')\n", (6472, 6495), True, 'import matplotlib.pyplot as plt\n'), ((6498, 6529), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vertical position"""'], {}), "('Vertical position')\n", (6508, 6529), True, 'import matplotlib.pyplot as plt\n'), ((6532, 6559), 'matplotlib.pyplot.title', 'plt.title', (['"""CoM trajectory"""'], {}), "('CoM trajectory')\n", (6541, 6559), True, 'import matplotlib.pyplot as plt\n'), ((1287, 1313), 'numpy.argmax', 'np.argmax', (['(times > failure)'], {}), '(times > failure)\n', (1296, 1313), True, 'import numpy as np\n'), ((1317, 1382), 'matplotlib.pyplot.plot', 'plt.plot', (['x_com[:fail_index]', 'y_com[:fail_index]'], {'color': 'colors[0]'}), '(x_com[:fail_index], y_com[:fail_index], color=colors[0])\n', (1325, 1382), True, 'import matplotlib.pyplot as plt\n'), ((1387, 1463), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_com[fail_index - 1]', 'y_com[fail_index - 1]'], {'color': '"""r"""', 's': 'size'}), "(x_com[fail_index - 1], y_com[fail_index - 1], color='r', s=size)\n", (1398, 1463), True, 'import matplotlib.pyplot as plt\n'), ((2495, 2521), 'numpy.argmax', 'np.argmax', (['(times > liftoff)'], {}), '(times > liftoff)\n', (2504, 2521), True, 'import numpy as np\n'), ((2525, 2613), 'matplotlib.pyplot.plot', 'plt.plot', (['x_com[index - 1:lift_index]', 'y_com[index - 1:lift_index]'], {'color': 'colors[1]'}), '(x_com[index - 1:lift_index], y_com[index - 1:lift_index], color=\n colors[1])\n', (2533, 2613), True, 'import matplotlib.pyplot as plt\n'), ((2614, 2700), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_com[lift_index - 1]', 'y_com[lift_index - 1]'], {'color': 'colors[2]', 's': 'size'}), '(x_com[lift_index - 1], y_com[lift_index - 1], color=colors[2],\n s=size)\n', (2625, 2700), True, 'import matplotlib.pyplot as plt\n'), ((2704, 2817), 'matplotlib.pyplot.plot', 'plt.plot', (['[foot_x, x_com[lift_index - 1]]', '[0, y_com[lift_index - 1]]'], {'color': 'colors[2]', 'alpha': 'leg_visibility'}), '([foot_x, x_com[lift_index - 1]], [0, y_com[lift_index - 1]], color\n =colors[2], alpha=leg_visibility)\n', (2712, 2817), True, 'import matplotlib.pyplot as plt\n'), ((4513, 4525), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4523, 4525), True, 'import matplotlib.pyplot as plt\n'), ((5147, 5163), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(221)'], {}), '(221)\n', (5158, 5163), True, 'import matplotlib.pyplot as plt\n'), ((5167, 5181), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (5177, 5181), True, 'import matplotlib.pyplot as plt\n'), ((5185, 5218), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Horizontal position"""'], {}), "('Horizontal position')\n", (5195, 5218), True, 'import matplotlib.pyplot as plt\n'), ((5222, 5238), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(223)'], {}), '(223)\n', (5233, 5238), True, 'import matplotlib.pyplot as plt\n'), ((5242, 5273), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vertical position"""'], {}), "('Vertical position')\n", (5252, 5273), True, 'import matplotlib.pyplot as plt\n'), ((5277, 5295), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (5287, 5295), True, 'import matplotlib.pyplot as plt\n'), ((5299, 5315), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(122)'], {}), '(122)\n', (5310, 5315), True, 'import matplotlib.pyplot as plt\n'), ((5319, 5352), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Horizontal position"""'], {}), "('Horizontal position')\n", (5329, 5352), True, 'import matplotlib.pyplot as plt\n'), ((5356, 5387), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vertical position"""'], {}), "('Vertical position')\n", (5366, 5387), True, 'import matplotlib.pyplot as plt\n'), ((5391, 5419), 'matplotlib.pyplot.title', 'plt.title', (['"""Foot trajectory"""'], {}), "('Foot trajectory')\n", (5400, 5419), True, 'import matplotlib.pyplot as plt\n'), ((5675, 5691), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(133)'], {}), '(133)\n', (5686, 5691), True, 'import matplotlib.pyplot as plt\n'), ((5695, 5809), 'matplotlib.pyplot.plot', 'plt.plot', (['result[0, indices[e]:indices[e + 1] + 1]', 'result[1, indices[e]:indices[e + 1] + 1]'], {'color': 'colors[e]'}), '(result[0, indices[e]:indices[e + 1] + 1], result[1, indices[e]:\n indices[e + 1] + 1], color=colors[e])\n', (5703, 5809), True, 'import matplotlib.pyplot as plt\n'), ((5983, 5999), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(133)'], {}), '(133)\n', (5994, 5999), True, 'import matplotlib.pyplot as plt\n'), ((6011, 6040), 'numpy.argmax', 'np.argmax', (['(times >= events[e])'], {}), '(times >= events[e])\n', (6020, 6040), True, 'import numpy as np\n'), ((6044, 6108), 'matplotlib.pyplot.scatter', 'plt.scatter', (['result[0, index]', 'result[1, index]'], {'color': 'colors[e]'}), '(result[0, index], result[1, index], color=colors[e])\n', (6055, 6108), True, 'import matplotlib.pyplot as plt\n'), ((2242, 2268), 'numpy.argmax', 'np.argmax', (['(times > failure)'], {}), '(times > failure)\n', (2251, 2268), True, 'import numpy as np\n'), ((2273, 2348), 'matplotlib.pyplot.plot', 'plt.plot', (['x_com[index:fail_index]', 'y_com[index:fail_index]'], {'color': 'colors[1]'}), '(x_com[index:fail_index], y_com[index:fail_index], color=colors[1])\n', (2281, 2348), True, 'import matplotlib.pyplot as plt\n'), ((2360, 2436), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_com[fail_index - 1]', 'y_com[fail_index - 1]'], {'color': '"""r"""', 's': 'size'}), "(x_com[fail_index - 1], y_com[fail_index - 1], color='r', s=size)\n", (2371, 2436), True, 'import matplotlib.pyplot as plt\n'), ((3394, 3491), 'matplotlib.pyplot.plot', 'plt.plot', (['x_com[lift_index - 1:apex_index]', 'y_com[lift_index - 1:apex_index]'], {'color': 'colors[2]'}), '(x_com[lift_index - 1:apex_index], y_com[lift_index - 1:apex_index],\n color=colors[2])\n', (3402, 3491), True, 'import matplotlib.pyplot as plt\n'), ((3496, 3582), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_com[apex_index - 1]', 'y_com[apex_index - 1]'], {'color': 'colors[0]', 's': 'size'}), '(x_com[apex_index - 1], y_com[apex_index - 1], color=colors[0],\n s=size)\n', (3507, 3582), True, 'import matplotlib.pyplot as plt\n'), ((3588, 3749), 'matplotlib.pyplot.plot', 'plt.plot', (['[result[4, apex_index - 1], x_com[apex_index - 1]]', '[result[5, apex_index - 1], y_com[apex_index - 1]]'], {'color': 'colors[0]', 'alpha': 'leg_visibility'}), '([result[4, apex_index - 1], x_com[apex_index - 1]], [result[5, \n apex_index - 1], y_com[apex_index - 1]], color=colors[0], alpha=\n leg_visibility)\n', (3596, 3749), True, 'import matplotlib.pyplot as plt\n'), ((4401, 4430), 'numpy.argmax', 'np.argmax', (['(times >= events[e])'], {}), '(times >= events[e])\n', (4410, 4430), True, 'import numpy as np\n'), ((4552, 4568), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(221)'], {}), '(221)\n', (4563, 4568), True, 'import matplotlib.pyplot as plt\n'), ((4573, 4672), 'matplotlib.pyplot.plot', 'plt.plot', (['times[indices[e]:indices[e + 1]]', 'foot_x[indices[e]:indices[e + 1]]'], {'color': 'colors[e]'}), '(times[indices[e]:indices[e + 1]], foot_x[indices[e]:indices[e + 1]\n ], color=colors[e])\n', (4581, 4672), True, 'import matplotlib.pyplot as plt\n'), ((4670, 4686), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(223)'], {}), '(223)\n', (4681, 4686), True, 'import matplotlib.pyplot as plt\n'), ((4691, 4790), 'matplotlib.pyplot.plot', 'plt.plot', (['times[indices[e]:indices[e + 1]]', 'foot_y[indices[e]:indices[e + 1]]'], {'color': 'colors[e]'}), '(times[indices[e]:indices[e + 1]], foot_y[indices[e]:indices[e + 1]\n ], color=colors[e])\n', (4699, 4790), True, 'import matplotlib.pyplot as plt\n'), ((4788, 4804), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(122)'], {}), '(122)\n', (4799, 4804), True, 'import matplotlib.pyplot as plt\n'), ((4809, 4909), 'matplotlib.pyplot.plot', 'plt.plot', (['foot_x[indices[e]:indices[e + 1]]', 'foot_y[indices[e]:indices[e + 1]]'], {'color': 'colors[e]'}), '(foot_x[indices[e]:indices[e + 1]], foot_y[indices[e]:indices[e + 1\n ]], color=colors[e])\n', (4817, 4909), True, 'import matplotlib.pyplot as plt\n'), ((4907, 4975), 'matplotlib.pyplot.scatter', 'plt.scatter', (['foot_x[indices[e]]', 'foot_y[indices[e]]'], {'color': 'colors[e]'}), '(foot_x[indices[e]], foot_y[indices[e]], color=colors[e])\n', (4918, 4975), True, 'import matplotlib.pyplot as plt\n'), ((3044, 3070), 'numpy.argmax', 'np.argmax', (['(times > failure)'], {}), '(times > failure)\n', (3053, 3070), True, 'import numpy as np\n'), ((3076, 3173), 'matplotlib.pyplot.plot', 'plt.plot', (['x_com[lift_index - 1:fail_index]', 'y_com[lift_index - 1:fail_index]'], {'color': 'colors[2]'}), '(x_com[lift_index - 1:fail_index], y_com[lift_index - 1:fail_index],\n color=colors[2])\n', (3084, 3173), True, 'import matplotlib.pyplot as plt\n'), ((3172, 3248), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_com[fail_index - 1]', 'y_com[fail_index - 1]'], {'color': '"""r"""', 's': 'size'}), "(x_com[fail_index - 1], y_com[fail_index - 1], color='r', s=size)\n", (3183, 3248), True, 'import matplotlib.pyplot as plt\n'), ((3327, 3350), 'numpy.argmax', 'np.argmax', (['(times > apex)'], {}), '(times > apex)\n', (3336, 3350), True, 'import numpy as np\n'), ((5030, 5050), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', 'i'], {}), '(2, 2, i)\n', (5041, 5050), True, 'import matplotlib.pyplot as plt\n'), ((5054, 5112), 'matplotlib.pyplot.axvline', 'plt.axvline', ([], {'x': 'events[e]', 'color': 'colors[e]', 'label': 'labels[e]'}), '(x=events[e], color=colors[e], label=labels[e])\n', (5065, 5112), True, 'import matplotlib.pyplot as plt\n'), ((5525, 5557), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(3)', '(1 + i + 3 * j)'], {}), '(2, 3, 1 + i + 3 * j)\n', (5536, 5557), True, 'import matplotlib.pyplot as plt\n'), ((5555, 5673), 'matplotlib.pyplot.plot', 'plt.plot', (['times[indices[e]:indices[e + 1] + 1]', 'result[i + 2 * j, indices[e]:indices[e + 1] + 1]'], {'color': 'colors[e]'}), '(times[indices[e]:indices[e + 1] + 1], result[i + 2 * j, indices[e]\n :indices[e + 1] + 1], color=colors[e])\n', (5563, 5673), True, 'import matplotlib.pyplot as plt\n'), ((5878, 5910), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(3)', '(1 + i + 3 * j)'], {}), '(2, 3, 1 + i + 3 * j)\n', (5889, 5910), True, 'import matplotlib.pyplot as plt\n'), ((5908, 5966), 'matplotlib.pyplot.axvline', 'plt.axvline', ([], {'x': 'events[e]', 'color': 'colors[e]', 'label': 'labels[e]'}), '(x=events[e], color=colors[e], label=labels[e])\n', (5919, 5966), True, 'import matplotlib.pyplot as plt\n')] |
import numpy as np
class perceptron(object):
#eta learning rata
#n_iter times
def __init__(self,eta,n_iter):
self.eta=eta
self.n_iter=n_iter
def fit(self,x,y):
'''
x=ndarray(n_samples,n_features),training data
y=ndarray(n_samples),labels
returns
self:object
w_:1darray,weights after fitting
errors=list,errors times
'''
#init
self.w_=np.zeros(np.shape(x)[1]+1)
self.errors_=[]
for _ in range(self.n_iter):
errors=0
for xi,yi in zip(x,y):
updata=self.eta*(self.predict(xi)-yi)
self.w_[1:]+=updata*xi
self.w_[0]+=updata
errors+=int(updata!=0.0)
self.errors_.append(errors)
print(self.errors_)
return self
def net_input(self,x):
'''
calculate net input
'''
return np.dot(x,self.w_[1:])+self.w_[0]
def predict(self,x):
'''
positive function
'''
return np.where(self.net_input(x)>=0.0,1,-1)
#painting
import matplotlib.pyplot as plt
#from perception import perceptron
#read data as DaraFrame
import pandas as pd
import numpy as np
import os
import pandas as pd
import numpy as np
import random
a=np.random.uniform(6.0,7.0,150)
b=np.random.uniform(2.0,4.0,150)
c=np.random.uniform(5.0,5.5,150)
d=np.random.uniform(1.5,2.5,150)
q=[]
for i in range(150):
e=np.random.choice(['a','b'])
q.append(e)
dic={'0':a,'1':b,'2':c,'3':d,'4':q}
df=pd.DataFrame(dic)
y=df.iloc[0:100,4].values
y=np.where(y=='b',-1,1)
x=df.iloc[0:100,[0,2]].values
plt.scatter(x[:50,0],x[:50,1],color='red',marker='o',label='setosa')
plt.scatter(x[50:100,0],x[50:100,1],color='green',marker='x',label='versicolor')
plt.xlabel('petal length')
plt.ylabel('sepal length')
plt.legend(loc='upper right')
plt.show()
ppn=perceptron(eta=1,n_iter=10000)
ppn.fit(x,y)
plt.plot(range(1,len(ppn.errors_)+1),ppn.errors_,marker='o',color='red')
plt.xlabel('epochs')
plt.ylabel('number of miscalssifications')
plt.show()
| [
"matplotlib.pyplot.ylabel",
"numpy.where",
"numpy.random.choice",
"matplotlib.pyplot.xlabel",
"numpy.dot",
"matplotlib.pyplot.scatter",
"numpy.random.uniform",
"pandas.DataFrame",
"numpy.shape",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((1308, 1340), 'numpy.random.uniform', 'np.random.uniform', (['(6.0)', '(7.0)', '(150)'], {}), '(6.0, 7.0, 150)\n', (1325, 1340), True, 'import numpy as np\n'), ((1341, 1373), 'numpy.random.uniform', 'np.random.uniform', (['(2.0)', '(4.0)', '(150)'], {}), '(2.0, 4.0, 150)\n', (1358, 1373), True, 'import numpy as np\n'), ((1374, 1406), 'numpy.random.uniform', 'np.random.uniform', (['(5.0)', '(5.5)', '(150)'], {}), '(5.0, 5.5, 150)\n', (1391, 1406), True, 'import numpy as np\n'), ((1407, 1439), 'numpy.random.uniform', 'np.random.uniform', (['(1.5)', '(2.5)', '(150)'], {}), '(1.5, 2.5, 150)\n', (1424, 1439), True, 'import numpy as np\n'), ((1553, 1570), 'pandas.DataFrame', 'pd.DataFrame', (['dic'], {}), '(dic)\n', (1565, 1570), True, 'import pandas as pd\n'), ((1599, 1624), 'numpy.where', 'np.where', (["(y == 'b')", '(-1)', '(1)'], {}), "(y == 'b', -1, 1)\n", (1607, 1624), True, 'import numpy as np\n'), ((1651, 1725), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x[:50, 0]', 'x[:50, 1]'], {'color': '"""red"""', 'marker': '"""o"""', 'label': '"""setosa"""'}), "(x[:50, 0], x[:50, 1], color='red', marker='o', label='setosa')\n", (1662, 1725), True, 'import matplotlib.pyplot as plt\n'), ((1720, 1811), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x[50:100, 0]', 'x[50:100, 1]'], {'color': '"""green"""', 'marker': '"""x"""', 'label': '"""versicolor"""'}), "(x[50:100, 0], x[50:100, 1], color='green', marker='x', label=\n 'versicolor')\n", (1731, 1811), True, 'import matplotlib.pyplot as plt\n'), ((1801, 1827), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""petal length"""'], {}), "('petal length')\n", (1811, 1827), True, 'import matplotlib.pyplot as plt\n'), ((1828, 1854), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""sepal length"""'], {}), "('sepal length')\n", (1838, 1854), True, 'import matplotlib.pyplot as plt\n'), ((1855, 1884), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (1865, 1884), True, 'import matplotlib.pyplot as plt\n'), ((1885, 1895), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1893, 1895), True, 'import matplotlib.pyplot as plt\n'), ((2017, 2037), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epochs"""'], {}), "('epochs')\n", (2027, 2037), True, 'import matplotlib.pyplot as plt\n'), ((2038, 2080), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""number of miscalssifications"""'], {}), "('number of miscalssifications')\n", (2048, 2080), True, 'import matplotlib.pyplot as plt\n'), ((2081, 2091), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2089, 2091), True, 'import matplotlib.pyplot as plt\n'), ((1470, 1498), 'numpy.random.choice', 'np.random.choice', (["['a', 'b']"], {}), "(['a', 'b'])\n", (1486, 1498), True, 'import numpy as np\n'), ((942, 964), 'numpy.dot', 'np.dot', (['x', 'self.w_[1:]'], {}), '(x, self.w_[1:])\n', (948, 964), True, 'import numpy as np\n'), ((455, 466), 'numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (463, 466), True, 'import numpy as np\n')] |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
from pandas import DataFrame
from lib.cast import safe_int_cast
from lib.data_source import DataSource
from lib.time import datetime_isoformat
class SudanHumdataDataSource(DataSource):
def parse_dataframes(
self, dataframes: Dict[str, DataFrame], aux: Dict[str, DataFrame], **parse_opts
) -> DataFrame:
# Rename the appropriate columns
data = (
dataframes[0]
.rename(
columns={
"Report Date": "date",
"State": "match_string",
"Confirmed Cases": "total_confirmed",
}
)
.drop([0])
)
# The dates in the provided CSV are incorrect for one of the reports.
# Replace with report date taken from text of report.
data.loc[
data["Source"]
== "https://reliefweb.int/sites/reliefweb.int/files/resources/Situation%20Report%20-%20Sudan%20-%207%20May%202020.pdf",
"date",
] = "5/11/2020"
data = data.drop(axis=1, columns=["As of Date", "Source"])
# Remove Abyei PCA, a disputed region with no data shown.
data = data[data["match_string"] != "Abyei PCA"]
# Data source uses different spelling from src/data/iso_3166_2_codes.csv
data["match_string"].replace({"Gedaref": "Al Qadarif"}, inplace=True)
data.date = data.date.apply(lambda x: datetime_isoformat(x, "%m/%d/%Y"))
# Sudan data includes empty cells where there are no confirmed cases.
# These get read in as NaN. Replace them with zeroes so that the
# grouped_diff call to get new confirmed cases works for a state's first
# day with a case.
data["total_confirmed"] = data["total_confirmed"].fillna(0).apply(safe_int_cast)
# Make sure all records have the country code
data["country_code"] = "SD"
# Output the results
return data
| [
"lib.time.datetime_isoformat"
] | [((2025, 2058), 'lib.time.datetime_isoformat', 'datetime_isoformat', (['x', '"""%m/%d/%Y"""'], {}), "(x, '%m/%d/%Y')\n", (2043, 2058), False, 'from lib.time import datetime_isoformat\n')] |
import base64
import dataclasses
import gzip
import json
from collections import defaultdict
from typing import DefaultDict, Dict, Generator, List, Optional
from sentry_sdk.api import capture_exception, capture_message
from posthog.models import utils
Event = Dict
SnapshotData = Dict
@dataclasses.dataclass
class PaginatedSnapshotList:
has_next: bool
paginated_list: List[SnapshotData]
FULL_SNAPSHOT = 2
def preprocess_session_recording_events(events: List[Event]) -> List[Event]:
result = []
snapshots_by_session_and_window_id = defaultdict(list)
for event in events:
if is_unchunked_snapshot(event):
session_id = event["properties"]["$session_id"]
window_id = event["properties"].get("$window_id")
snapshots_by_session_and_window_id[(session_id, window_id)].append(event)
else:
result.append(event)
for _, snapshots in snapshots_by_session_and_window_id.items():
result.extend(list(compress_and_chunk_snapshots(snapshots)))
return result
def compress_and_chunk_snapshots(events: List[Event], chunk_size=512 * 1024) -> Generator[Event, None, None]:
data_list = [event["properties"]["$snapshot_data"] for event in events]
session_id = events[0]["properties"]["$session_id"]
has_full_snapshot = any(snapshot_data["type"] == FULL_SNAPSHOT for snapshot_data in data_list)
window_id = events[0]["properties"].get("$window_id")
compressed_data = compress_to_string(json.dumps(data_list))
id = str(utils.UUIDT())
chunks = chunk_string(compressed_data, chunk_size)
for index, chunk in enumerate(chunks):
yield {
**events[0],
"properties": {
**events[0]["properties"],
"$session_id": session_id,
"$window_id": window_id,
"$snapshot_data": {
"chunk_id": id,
"chunk_index": index,
"chunk_count": len(chunks),
"data": chunk,
"compression": "gzip-base64",
"has_full_snapshot": has_full_snapshot,
},
},
}
def decompress_chunked_snapshot_data(
team_id: int, session_recording_id: str, snapshot_list: List[SnapshotData]
) -> Generator[SnapshotData, None, None]:
chunks_collector = defaultdict(list)
for snapshot_data in snapshot_list:
if "chunk_id" not in snapshot_data:
yield snapshot_data
else:
chunks_collector[snapshot_data["chunk_id"]].append(snapshot_data)
for chunks in chunks_collector.values():
if len(chunks) != chunks[0]["chunk_count"]:
capture_message(
"Did not find all session recording chunks! Team: {}, Session: {}".format(team_id, session_recording_id)
)
continue
b64_compressed_data = "".join(chunk["data"] for chunk in sorted(chunks, key=lambda c: c["chunk_index"]))
decompressed_data = json.loads(decompress(b64_compressed_data))
yield from decompressed_data
def chunk_string(string: str, chunk_length: int) -> List[str]:
"""Split a string into chunk_length-sized elements. Reversal operation: `''.join()`."""
return [string[0 + offset : chunk_length + offset] for offset in range(0, len(string), chunk_length)]
def is_unchunked_snapshot(event: Dict) -> bool:
try:
is_snapshot = event["event"] == "$snapshot"
except KeyError:
raise ValueError('All events must have the event name field "event"!')
try:
return is_snapshot and "chunk_id" not in event["properties"]["$snapshot_data"]
except KeyError:
capture_exception()
raise ValueError('$snapshot events must contain property "$snapshot_data"!')
def compress_to_string(json_string: str) -> str:
compressed_data = gzip.compress(json_string.encode("utf-16", "surrogatepass"))
return base64.b64encode(compressed_data).decode("utf-8")
def decompress(base64data: str) -> str:
compressed_bytes = base64.b64decode(base64data)
return gzip.decompress(compressed_bytes).decode("utf-16", "surrogatepass")
def paginate_snapshot_list(list_to_paginate: List, limit: Optional[int], offset: int) -> PaginatedSnapshotList:
if not limit:
has_next = False
paginated_list = list_to_paginate[offset:]
elif offset + limit < len(list_to_paginate):
has_next = True
paginated_list = list_to_paginate[offset : offset + limit]
else:
has_next = False
paginated_list = list_to_paginate[offset:]
return PaginatedSnapshotList(has_next=has_next, paginated_list=paginated_list)
def paginate_chunk_decompression(
team_id: int,
session_recording_id: str,
all_recording_snapshots: List[SnapshotData],
limit: Optional[int] = None,
offset: int = 0,
) -> PaginatedSnapshotList:
if len(all_recording_snapshots) == 0:
return PaginatedSnapshotList(has_next=False, paginated_list=[])
# Simple case of unchunked and therefore uncompressed snapshots
if "chunk_id" not in all_recording_snapshots[0]:
return paginate_snapshot_list(all_recording_snapshots, limit, offset)
chunks_collector: DefaultDict[str, List[SnapshotData]] = defaultdict(list)
for snapshot in all_recording_snapshots:
chunks_collector[snapshot["chunk_id"]].append(snapshot)
chunks_list = paginate_snapshot_list(list(chunks_collector.values()), limit, offset)
decompressed_data_list: List[SnapshotData] = []
for chunks in chunks_list.paginated_list:
if len(chunks) != chunks[0]["chunk_count"]:
capture_message(
"Did not find all session recording chunks! Team: {}, Session: {}, Chunk-id: {}. Found {} of {} chunks".format(
team_id, session_recording_id, chunks[0]["chunk_id"], len(chunks), chunks[0]["chunk_count"],
)
)
continue
b64_compressed_data = "".join(chunk["data"] for chunk in sorted(chunks, key=lambda c: c["chunk_index"]))
decompressed_data = json.loads(decompress(b64_compressed_data))
decompressed_data_list.extend(decompressed_data)
return PaginatedSnapshotList(has_next=chunks_list.has_next, paginated_list=decompressed_data_list)
def is_active_event(event: SnapshotData) -> bool:
# Determines which rr-web events are "active" - meaning user generated
# Event type 3 means incremental_update (not a full snapshot, metadata etc)
# And the following are the defined source types:
# Mutation = 0
# MouseMove = 1
# MouseInteraction = 2
# Scroll = 3
# ViewportResize = 4
# Input = 5
# TouchMove = 6
# MediaInteraction = 7
# StyleSheetRule = 8
# CanvasMutation = 9
# Font = 10
# Log = 11
# Drag = 12
# StyleDeclaration = 13
return event.get("type") == 3 and event.get("data", {}).get("source") in [1, 2, 3, 4, 5, 6, 7, 12]
| [
"base64.b64encode",
"json.dumps",
"base64.b64decode",
"gzip.decompress",
"collections.defaultdict",
"sentry_sdk.api.capture_exception",
"posthog.models.utils.UUIDT"
] | [((556, 573), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (567, 573), False, 'from collections import defaultdict\n'), ((2376, 2393), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2387, 2393), False, 'from collections import defaultdict\n'), ((4073, 4101), 'base64.b64decode', 'base64.b64decode', (['base64data'], {}), '(base64data)\n', (4089, 4101), False, 'import base64\n'), ((5290, 5307), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5301, 5307), False, 'from collections import defaultdict\n'), ((1495, 1516), 'json.dumps', 'json.dumps', (['data_list'], {}), '(data_list)\n', (1505, 1516), False, 'import json\n'), ((1532, 1545), 'posthog.models.utils.UUIDT', 'utils.UUIDT', ([], {}), '()\n', (1543, 1545), False, 'from posthog.models import utils\n'), ((3708, 3727), 'sentry_sdk.api.capture_exception', 'capture_exception', ([], {}), '()\n', (3725, 3727), False, 'from sentry_sdk.api import capture_exception, capture_message\n'), ((3958, 3991), 'base64.b64encode', 'base64.b64encode', (['compressed_data'], {}), '(compressed_data)\n', (3974, 3991), False, 'import base64\n'), ((4113, 4146), 'gzip.decompress', 'gzip.decompress', (['compressed_bytes'], {}), '(compressed_bytes)\n', (4128, 4146), False, 'import gzip\n')] |
"""Django command for rebuilding cohort statistics after import."""
from django.core.management.base import BaseCommand
from django.db import transaction
from ...tasks import refresh_variants_smallvariantsummary
import variants.models as models
class Command(BaseCommand):
"""Implementation of rebuilding variant summary.
"""
#: Help message displayed on the command line.
help = "Rebuild the variants summary."
def add_arguments(self, parser):
"""Add the command's argument to the ``parser``."""
parser.add_argument("--async", help="Run the rebuild asynchronously.", action="store_false")
@transaction.atomic
def handle(self, *args, **options):
"""Perform rebuilding the statistics."""
if options["async"]:
models.refresh_variants_smallvariantsummary()
msg = "Done rebuilding variant summary."
else:
refresh_variants_smallvariantsummary.delay()
msg = "Pushed rebuilding variant summary to background."
self.stdout.write(self.style.SUCCESS(msg))
| [
"variants.models.refresh_variants_smallvariantsummary"
] | [((787, 832), 'variants.models.refresh_variants_smallvariantsummary', 'models.refresh_variants_smallvariantsummary', ([], {}), '()\n', (830, 832), True, 'import variants.models as models\n')] |
"""
BEHAVIOR demo batch analysis script
"""
import argparse
import json
import logging
import os
from pathlib import Path
import pandas as pd
import igibson
from igibson.examples.learning.demo_replaying_example import replay_demo
def replay_demo_batch(
demo_dir,
demo_manifest,
out_dir,
get_callbacks_callback,
skip_existing=True,
ignore_errors=True,
save_frames=False,
debug_display=False,
image_size=(1280, 720),
deactivate_logger=True,
):
"""
Execute replay analysis functions (provided through callbacks) on a batch of BEHAVIOR demos.
@param demo_dir: Directory containing the demo files listed in the manifests.
@param demo_manifest: The manifest file containing list of BEHAVIOR demos to batch over.
@param out_dir: Directory to store results in.
@param get_callbacks_callback: A function that will be called for each demo that needs to return
a four-tuple: (start_callbacks, step_callbacks, end_callbacks, data_callbacks). Each of the
the first three callback function sets need to be compatible with the behavior_demo_replay
API and will be used for this purpose for that particular demo. The data callbacks should
take no arguments and return a dictionary to be included in the demo's replay data that will
be saved in the end.
@param ignore_errors: If an Error is raised, the batch will continue if this is True (with the error saved to the
log file). If False, the error will be propagated.
@param skip_existing: Whether demos with existing output logs should be skipped.
@param save_frames: Whether the demo's frames should be saved alongside statistics.
@param debug_display: Whether a debug display (the pybullet GUI) should be enabled.
@param image_size: The image size that should be used by the renderer.
@param deactivate_logger: If we deactivate the logger
"""
if deactivate_logger:
logger = logging.getLogger()
logger.disabled = True
demo_list = pd.read_csv(demo_manifest)
logging.info("Demos in manifest: {}".format(demo_list["demos"]))
for idx, demo in enumerate(demo_list["demos"]):
if "replay" in demo:
continue
demo_name = os.path.splitext(demo)[0]
demo_file = os.path.join(demo_dir, demo)
replaying_log_file = os.path.join(out_dir, demo_name + "_replay_log.json")
if skip_existing and os.path.exists(replaying_log_file):
logging.info("Skipping existing demo: {}, {} out of {}".format(demo, idx, len(demo_list["demos"])))
continue
logging.info("Replaying demo: {}, {} out of {}".format(demo, idx, len(demo_list["demos"])))
curr_frame_save_path = None
if save_frames:
curr_frame_save_path = os.path.join(out_dir, demo_name + ".mp4")
try:
if get_callbacks_callback is not None:
start_callbacks, step_callbacks, end_callbacks, data_callbacks = get_callbacks_callback(
demo_name=demo_name, out_dir=out_dir
)
else:
start_callbacks, step_callbacks, end_callbacks, data_callbacks = [], [], [], []
demo_information = replay_demo(
in_demo_file=demo_file,
frame_save_dir=curr_frame_save_path,
start_callbacks=start_callbacks,
step_callbacks=step_callbacks,
end_callbacks=end_callbacks,
mode="headless",
use_pb_gui=debug_display,
verbose=False,
image_size=image_size,
)
demo_information["failed"] = False
demo_information["filename"] = Path(demo).name
for callback in data_callbacks:
demo_information.update(callback())
except Exception as e:
if ignore_errors:
logging.info("Demo failed with the error: {}".format(str(e)))
demo_information = {"demo_id": Path(demo).name, "failed": True, "failure_reason": str(e)}
else:
raise
with open(replaying_log_file, "w") as file:
json.dump(demo_information, file)
def parse_args(defaults=False):
args_dict = dict()
args_dict["demo_dir"] = os.path.join(igibson.ig_dataset_path, "tests")
args_dict["demo_manifest"] = os.path.join(igibson.ig_dataset_path, "tests", "test_manifest.txt")
args_dict["out_dir"] = os.path.join(igibson.ig_dataset_path, "tests")
args_dict["split"] = 0
if not defaults:
parser = argparse.ArgumentParser(description="Replays a batch demos specified in a manifest file")
parser.add_argument(
"--demo_dir", type=str, required=True, help="Directory containing the demo files listed in the manifests."
)
parser.add_argument(
"--demo_manifest",
type=str,
required=True,
help="The manifest file containing list of BEHAVIOR demos to batch over.",
)
parser.add_argument("--demo_dir", type=str, required=True, help="Directory to store results in.")
args = parser.parse_args()
args_dict["demo_dir"] = args.demo_root
args_dict["demo_manifest"] = args.log_manifest
args_dict["out_dir"] = args.out_dir
return args_dict
def main(selection="user", headless=False, short_exec=False):
"""
Replays a batch of demos specified in a manifest file
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
testing = selection == "random" and headless and short_exec
args_dict = parse_args(defaults=testing)
get_callbacks_callback = None # Add a function that generates callbacks here to call during the batch processing
replay_demo_batch(
args_dict["demo_dir"],
args_dict["demo_manifest"],
args_dict["out_dir"],
get_callbacks_callback,
deactivate_logger=False,
skip_existing=not testing, # Do not skip when testing
ignore_errors=not testing, # Do not ignore when testing
)
RUN_AS_TEST = False # Change to True to run this example in test mode
if __name__ == "__main__":
if RUN_AS_TEST:
main(selection="random", headless=True, short_exec=True)
else:
main()
| [
"logging.getLogger",
"os.path.exists",
"argparse.ArgumentParser",
"pandas.read_csv",
"pathlib.Path",
"os.path.join",
"os.path.splitext",
"igibson.examples.learning.demo_replaying_example.replay_demo",
"logging.info",
"json.dump"
] | [((2037, 2063), 'pandas.read_csv', 'pd.read_csv', (['demo_manifest'], {}), '(demo_manifest)\n', (2048, 2063), True, 'import pandas as pd\n'), ((4323, 4369), 'os.path.join', 'os.path.join', (['igibson.ig_dataset_path', '"""tests"""'], {}), "(igibson.ig_dataset_path, 'tests')\n", (4335, 4369), False, 'import os\n'), ((4403, 4470), 'os.path.join', 'os.path.join', (['igibson.ig_dataset_path', '"""tests"""', '"""test_manifest.txt"""'], {}), "(igibson.ig_dataset_path, 'tests', 'test_manifest.txt')\n", (4415, 4470), False, 'import os\n'), ((4498, 4544), 'os.path.join', 'os.path.join', (['igibson.ig_dataset_path', '"""tests"""'], {}), "(igibson.ig_dataset_path, 'tests')\n", (4510, 4544), False, 'import os\n'), ((5514, 5581), 'logging.info', 'logging.info', (["('*' * 80 + '\\nDescription:' + main.__doc__ + '*' * 80)"], {}), "('*' * 80 + '\\nDescription:' + main.__doc__ + '*' * 80)\n", (5526, 5581), False, 'import logging\n'), ((1969, 1988), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1986, 1988), False, 'import logging\n'), ((2304, 2332), 'os.path.join', 'os.path.join', (['demo_dir', 'demo'], {}), '(demo_dir, demo)\n', (2316, 2332), False, 'import os\n'), ((2362, 2415), 'os.path.join', 'os.path.join', (['out_dir', "(demo_name + '_replay_log.json')"], {}), "(out_dir, demo_name + '_replay_log.json')\n", (2374, 2415), False, 'import os\n'), ((4610, 4704), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Replays a batch demos specified in a manifest file"""'}), "(description=\n 'Replays a batch demos specified in a manifest file')\n", (4633, 4704), False, 'import argparse\n'), ((2258, 2280), 'os.path.splitext', 'os.path.splitext', (['demo'], {}), '(demo)\n', (2274, 2280), False, 'import os\n'), ((2446, 2480), 'os.path.exists', 'os.path.exists', (['replaying_log_file'], {}), '(replaying_log_file)\n', (2460, 2480), False, 'import os\n'), ((2812, 2853), 'os.path.join', 'os.path.join', (['out_dir', "(demo_name + '.mp4')"], {}), "(out_dir, demo_name + '.mp4')\n", (2824, 2853), False, 'import os\n'), ((3244, 3502), 'igibson.examples.learning.demo_replaying_example.replay_demo', 'replay_demo', ([], {'in_demo_file': 'demo_file', 'frame_save_dir': 'curr_frame_save_path', 'start_callbacks': 'start_callbacks', 'step_callbacks': 'step_callbacks', 'end_callbacks': 'end_callbacks', 'mode': '"""headless"""', 'use_pb_gui': 'debug_display', 'verbose': '(False)', 'image_size': 'image_size'}), "(in_demo_file=demo_file, frame_save_dir=curr_frame_save_path,\n start_callbacks=start_callbacks, step_callbacks=step_callbacks,\n end_callbacks=end_callbacks, mode='headless', use_pb_gui=debug_display,\n verbose=False, image_size=image_size)\n", (3255, 3502), False, 'from igibson.examples.learning.demo_replaying_example import replay_demo\n'), ((4204, 4237), 'json.dump', 'json.dump', (['demo_information', 'file'], {}), '(demo_information, file)\n', (4213, 4237), False, 'import json\n'), ((3740, 3750), 'pathlib.Path', 'Path', (['demo'], {}), '(demo)\n', (3744, 3750), False, 'from pathlib import Path\n'), ((4040, 4050), 'pathlib.Path', 'Path', (['demo'], {}), '(demo)\n', (4044, 4050), False, 'from pathlib import Path\n')] |
from celery.task import task
from time import sleep
@task()
def add(x, y):
return x + y;
@task()
def status(delay):
status.update_state(state='PROGRESS', meta={'description': 'starting timer'})
sleep(delay)
status.update_state(state='PROGRESS', meta={'description': 'after first sleep'})
sleep(delay)
return 'done'
| [
"celery.task.task",
"time.sleep"
] | [((54, 60), 'celery.task.task', 'task', ([], {}), '()\n', (58, 60), False, 'from celery.task import task\n'), ((96, 102), 'celery.task.task', 'task', ([], {}), '()\n', (100, 102), False, 'from celery.task import task\n'), ((208, 220), 'time.sleep', 'sleep', (['delay'], {}), '(delay)\n', (213, 220), False, 'from time import sleep\n'), ((310, 322), 'time.sleep', 'sleep', (['delay'], {}), '(delay)\n', (315, 322), False, 'from time import sleep\n')] |
from aioresponses import aioresponses
from asynctest import TestCase
from asyncworker.testing import HttpClientContext
from baas.api import app
class TransferAPITest(TestCase):
async def test_health(self):
async with HttpClientContext(app) as client:
resp = await client.get("/health")
data = await resp.json()
self.assertEqual({"OK": True}, data)
| [
"asyncworker.testing.HttpClientContext"
] | [((232, 254), 'asyncworker.testing.HttpClientContext', 'HttpClientContext', (['app'], {}), '(app)\n', (249, 254), False, 'from asyncworker.testing import HttpClientContext\n')] |
"""
Capstone Project. Code to run on the EV3 robot (NOT on a laptop).
Author: Your professors (for the framework)
and <NAME>.
Winter term, 2018-2019.
"""
import rosebot
import mqtt_remote_method_calls as com
import time
import shared_gui_delegate_on_robot as dingding
def main():
"""
This code, which must run on the EV3 ROBOT:
1. Makes the EV3 robot to various things.
2. Communicates via MQTT with the GUI code that runs on the LAPTOP.
"""
big_boy_robot_code()
def big_boy_robot_code():
robot = rosebot.RoseBot()
delegate_that_recieves = dingding.DelegateThatRecieves(robot)
mqtt_reciever = com.MqttClient(delegate_that_recieves)
mqtt_reciever.connect_to_pc()
while True:
time.sleep(0.01)
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main() | [
"rosebot.RoseBot",
"shared_gui_delegate_on_robot.DelegateThatRecieves",
"time.sleep",
"mqtt_remote_method_calls.MqttClient"
] | [((545, 562), 'rosebot.RoseBot', 'rosebot.RoseBot', ([], {}), '()\n', (560, 562), False, 'import rosebot\n'), ((592, 628), 'shared_gui_delegate_on_robot.DelegateThatRecieves', 'dingding.DelegateThatRecieves', (['robot'], {}), '(robot)\n', (621, 628), True, 'import shared_gui_delegate_on_robot as dingding\n'), ((649, 687), 'mqtt_remote_method_calls.MqttClient', 'com.MqttClient', (['delegate_that_recieves'], {}), '(delegate_that_recieves)\n', (663, 687), True, 'import mqtt_remote_method_calls as com\n'), ((747, 763), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (757, 763), False, 'import time\n')] |
import numpy as np
from typing import Any, Tuple, Dict
import logging
class NotDescentDirection(Exception):
pass
class ZeroDescentProduct(Exception):
pass
class ZeroUpdate(Exception):
pass
class Newton:
def __init__(self,
obj_func : Any,
gradient_func : Any,
reg_inv_hessian : Any
):
self.gradient_func = gradient_func
self.obj_func = obj_func
self.reg_inv_hessian = reg_inv_hessian
# Logging
handlerPrint = logging.StreamHandler()
handlerPrint.setLevel(logging.DEBUG)
self.log = logging.getLogger("l-bfgs")
self.log.addHandler(handlerPrint)
self.log.setLevel(logging.DEBUG)
self.line_search_c = pow(10,-4)
self.line_search_tau = 0.5
def get_descent_inner_product(self,
p : np.array,
params : np.array
) -> float:
grads = self.gradient_func(params)
inner_prod = np.dot(p, grads)
if inner_prod > -1e-16 and inner_prod <= 0:
raise ZeroDescentProduct()
elif inner_prod > 0:
self.log.error("ERROR: Positive inner product: %.16f" % inner_prod)
raise NotDescentDirection()
return inner_prod
def run_line_search(self,
p : np.array,
params : np.array
) -> float:
# Check inputs
assert self.line_search_tau < 1
assert self.line_search_tau > 0
assert self.line_search_c > 0
assert self.line_search_c < 1
inner_prod = self.get_descent_inner_product(p, params)
alpha = 1.0
fx = self.obj_func(params)
fx_new = self.obj_func(params + alpha * p)
rhs = alpha * self.line_search_c * inner_prod
self.log.debug(" Line search armijo: obj func old: %f new: %f diff: %.16f rhs: %.16f" % (fx, fx_new, fx_new - fx, rhs))
while fx_new - fx > rhs:
alpha *= self.line_search_tau
fx_new = self.obj_func(params + alpha * p)
rhs = alpha * self.line_search_c * inner_prod
self.log.debug(" Line search armijo: obj func old: %f new: %f diff: %.16f rhs: %.16f" % (fx, fx_new, fx_new - fx, rhs))
return alpha
def step(self,
k : int,
tol : float,
params : np.array
) -> Tuple[bool,np.array,np.array,float]:
update = np.zeros(len(params))
try:
self.log.debug("Iteration: %d [start]" % k)
# Get current grads
gradients = self.gradient_func(params)
# Get regularized inv hessian
rih = self.reg_inv_hessian(params)
# Calculate updates
update = - np.dot(rih, gradients)
# Line search
alpha = self.run_line_search(update, params)
update *= alpha
self.log.debug(" Line search factor: %.16f" % alpha)
# Commit update
params_new = params + update
self.log.debug(" Old params: %s" % params)
self.log.debug(" New params: %s" % params_new)
self.log.debug("Iteration: %d [finished]" % k)
# Monitor convergence
if np.max(abs(update)) < tol:
raise ZeroUpdate()
return (False, params_new, update, alpha)
except ZeroUpdate:
self.log.info("Converged because zero update")
return (True, params, update, 1.0)
except ZeroDescentProduct:
self.log.info("Converged because zero descent inner product")
return (True, params, update, 1.0)
def run(self,
no_steps : int,
params_init : np.array,
tol : float = 1e-8,
store_traj : bool = False
) -> Tuple[bool, int, np.array, Dict[int, np.array], Dict[int, float]]:
assert no_steps >= 1
params = params_init.copy()
traj = {}
line_search = {}
if store_traj:
traj[0] = params.copy()
update = np.zeros(len(params_init))
for k in range(0,no_steps):
converged, params, update, alpha = self.step(
k=k,
tol=tol,
params=params
)
if store_traj:
traj[k+1] = params.copy()
line_search[k+1] = alpha
if converged:
return (True, k, update, traj, line_search)
return (False, no_steps, update, traj, line_search) | [
"logging.getLogger",
"numpy.dot",
"logging.StreamHandler"
] | [((505, 528), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (526, 528), False, 'import logging\n'), ((593, 620), 'logging.getLogger', 'logging.getLogger', (['"""l-bfgs"""'], {}), "('l-bfgs')\n", (610, 620), False, 'import logging\n'), ((953, 969), 'numpy.dot', 'np.dot', (['p', 'grads'], {}), '(p, grads)\n', (959, 969), True, 'import numpy as np\n'), ((2727, 2749), 'numpy.dot', 'np.dot', (['rih', 'gradients'], {}), '(rih, gradients)\n', (2733, 2749), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
# Copyright © 2021 Helmholtz Centre Potsdam GFZ German Research Centre for Geosciences, Potsdam, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
"""
Test cases for the intensity classes.
"""
import unittest
import intensityprovider
import testimplementations
class TestIntensity(unittest.TestCase):
"""
Unit test class for intensity related classes.
"""
def test_always_the_same_intensity_provider(self):
"""
Tests an test implementation which always
returns the same values regardless of
the coordinates.
"""
intensity_provider = testimplementations.AlwaysTheSameIntensityProvider(
kind="PGA", value=1.0, unit="g"
)
intensities, units = intensity_provider.get_nearest(1, 1)
self.assertLess(0.9, intensities["PGA"])
self.assertLess(intensities["PGA"], 1.1)
self.assertEqual(units["PGA"], "g")
intensities2, units2 = intensity_provider.get_nearest(180, 90)
self.assertEqual(intensities, intensities2)
self.assertEqual(units, units2)
def test_alias_intensity_provider(self):
"""
Test for aliases.
"""
inner_intensity_provider = testimplementations.AlwaysTheSameIntensityProvider(
kind="PGA", value=1.0, unit="g"
)
intensity_provider = intensityprovider.AliasIntensityProvider(
inner_intensity_provider,
aliases={
"SA_01": ["PGA"],
"SA_03": ["PGA"],
"ID": ["mwh"],
},
)
intensities, units = intensity_provider.get_nearest(1, 1)
for kind in ["PGA", "SA_01", "SA_03"]:
self.assertLess(0.9, intensities[kind])
self.assertLess(intensities[kind], 1.1)
self.assertEqual(units[kind], "g")
self.assertNotIn("mwh", intensities.keys())
self.assertNotIn("ID", intensities.keys())
def test_conversion_intensity_provider(self):
"""
Test for intensity conversion.
"""
inner_intensity_provider = testimplementations.AlwaysTheSameIntensityProvider(
kind="PGA", value=1.0, unit="g"
)
def pga_to_pga1000(old_intensity, old_unit):
return old_intensity / 1000, "g/1000"
intensity_provider = intensityprovider.ConversionIntensityProvider(
inner_intensity_provider,
from_intensity="PGA",
as_intensity="PGA/1000",
fun=pga_to_pga1000,
)
intensities, units = intensity_provider.get_nearest(1, 1)
self.assertLess(0.9, intensities["PGA"])
self.assertLess(intensities["PGA"], 1.1)
self.assertEqual(units["PGA"], "g")
self.assertLess(0.0009, intensities["PGA/1000"])
self.assertLess(intensities["PGA/1000"], 0.0011)
self.assertEqual(units["PGA/1000"], "g/1000")
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"intensityprovider.AliasIntensityProvider",
"testimplementations.AlwaysTheSameIntensityProvider",
"intensityprovider.ConversionIntensityProvider"
] | [((3462, 3477), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3475, 3477), False, 'import unittest\n'), ((1116, 1203), 'testimplementations.AlwaysTheSameIntensityProvider', 'testimplementations.AlwaysTheSameIntensityProvider', ([], {'kind': '"""PGA"""', 'value': '(1.0)', 'unit': '"""g"""'}), "(kind='PGA', value=1.0,\n unit='g')\n", (1166, 1203), False, 'import testimplementations\n'), ((1728, 1815), 'testimplementations.AlwaysTheSameIntensityProvider', 'testimplementations.AlwaysTheSameIntensityProvider', ([], {'kind': '"""PGA"""', 'value': '(1.0)', 'unit': '"""g"""'}), "(kind='PGA', value=1.0,\n unit='g')\n", (1778, 1815), False, 'import testimplementations\n'), ((1864, 1996), 'intensityprovider.AliasIntensityProvider', 'intensityprovider.AliasIntensityProvider', (['inner_intensity_provider'], {'aliases': "{'SA_01': ['PGA'], 'SA_03': ['PGA'], 'ID': ['mwh']}"}), "(inner_intensity_provider, aliases=\n {'SA_01': ['PGA'], 'SA_03': ['PGA'], 'ID': ['mwh']})\n", (1904, 1996), False, 'import intensityprovider\n'), ((2610, 2697), 'testimplementations.AlwaysTheSameIntensityProvider', 'testimplementations.AlwaysTheSameIntensityProvider', ([], {'kind': '"""PGA"""', 'value': '(1.0)', 'unit': '"""g"""'}), "(kind='PGA', value=1.0,\n unit='g')\n", (2660, 2697), False, 'import testimplementations\n'), ((2850, 2992), 'intensityprovider.ConversionIntensityProvider', 'intensityprovider.ConversionIntensityProvider', (['inner_intensity_provider'], {'from_intensity': '"""PGA"""', 'as_intensity': '"""PGA/1000"""', 'fun': 'pga_to_pga1000'}), "(inner_intensity_provider,\n from_intensity='PGA', as_intensity='PGA/1000', fun=pga_to_pga1000)\n", (2895, 2992), False, 'import intensityprovider\n')] |
import pytest
from sqlalchemy.orm.exc import NoResultFound
from content_store.api.api import create_app
from content_store.api.config import TestingConfig
from content_store.api.models import ArticlePart
from content_store.api.repositories import ArticlePartRepository
from content_store.api.database import DB
@pytest.fixture
def app():
application = create_app(TestingConfig)
with application.app_context():
DB.drop_all()
DB.create_all()
yield application
@pytest.mark.usefixtures("app")
def test_add_get_parts():
article_parts = ArticlePartRepository(DB)
test_parts = (
ArticlePart("001", 1, "front", "Article 001 front matter content v1"),
ArticlePart("001", 1, "body", "Article 001 body content v1"),
ArticlePart("002", 1, "front", "Article 002 front matter content v1"),
ArticlePart("002", 1, "body", "Article 002 front matter content v1")
)
for part in test_parts:
article_parts.add_article_part(part)
assert part == article_parts.get_article_part(
article_id=part.article_id,
version=part.version,
part_name=part.part_name
)
@pytest.mark.usefixtures("app")
def test_delete_part():
article_parts = ArticlePartRepository(DB)
test_part = ArticlePart("001", 1, "front", "Article 001 front matter content v1")
article_parts.add_article_part(test_part)
part = article_parts.get_article_part(
test_part.article_id,
test_part.version,
test_part.part_name
)
assert part == test_part
article_parts.delete_article_part(
part.article_id,
part.version,
part.part_name
)
with pytest.raises(NoResultFound):
article_parts.get_article_part(
test_part.article_id,
test_part.version,
test_part.part_name
)
| [
"content_store.api.database.DB.create_all",
"content_store.api.repositories.ArticlePartRepository",
"pytest.raises",
"pytest.mark.usefixtures",
"content_store.api.models.ArticlePart",
"content_store.api.database.DB.drop_all",
"content_store.api.api.create_app"
] | [((496, 526), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""app"""'], {}), "('app')\n", (519, 526), False, 'import pytest\n'), ((1184, 1214), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""app"""'], {}), "('app')\n", (1207, 1214), False, 'import pytest\n'), ((359, 384), 'content_store.api.api.create_app', 'create_app', (['TestingConfig'], {}), '(TestingConfig)\n', (369, 384), False, 'from content_store.api.api import create_app\n'), ((574, 599), 'content_store.api.repositories.ArticlePartRepository', 'ArticlePartRepository', (['DB'], {}), '(DB)\n', (595, 599), False, 'from content_store.api.repositories import ArticlePartRepository\n'), ((1260, 1285), 'content_store.api.repositories.ArticlePartRepository', 'ArticlePartRepository', (['DB'], {}), '(DB)\n', (1281, 1285), False, 'from content_store.api.repositories import ArticlePartRepository\n'), ((1302, 1371), 'content_store.api.models.ArticlePart', 'ArticlePart', (['"""001"""', '(1)', '"""front"""', '"""Article 001 front matter content v1"""'], {}), "('001', 1, 'front', 'Article 001 front matter content v1')\n", (1313, 1371), False, 'from content_store.api.models import ArticlePart\n'), ((429, 442), 'content_store.api.database.DB.drop_all', 'DB.drop_all', ([], {}), '()\n', (440, 442), False, 'from content_store.api.database import DB\n'), ((451, 466), 'content_store.api.database.DB.create_all', 'DB.create_all', ([], {}), '()\n', (464, 466), False, 'from content_store.api.database import DB\n'), ((628, 697), 'content_store.api.models.ArticlePart', 'ArticlePart', (['"""001"""', '(1)', '"""front"""', '"""Article 001 front matter content v1"""'], {}), "('001', 1, 'front', 'Article 001 front matter content v1')\n", (639, 697), False, 'from content_store.api.models import ArticlePart\n'), ((707, 767), 'content_store.api.models.ArticlePart', 'ArticlePart', (['"""001"""', '(1)', '"""body"""', '"""Article 001 body content v1"""'], {}), "('001', 1, 'body', 'Article 001 body content v1')\n", (718, 767), False, 'from content_store.api.models import ArticlePart\n'), ((777, 846), 'content_store.api.models.ArticlePart', 'ArticlePart', (['"""002"""', '(1)', '"""front"""', '"""Article 002 front matter content v1"""'], {}), "('002', 1, 'front', 'Article 002 front matter content v1')\n", (788, 846), False, 'from content_store.api.models import ArticlePart\n'), ((856, 924), 'content_store.api.models.ArticlePart', 'ArticlePart', (['"""002"""', '(1)', '"""body"""', '"""Article 002 front matter content v1"""'], {}), "('002', 1, 'body', 'Article 002 front matter content v1')\n", (867, 924), False, 'from content_store.api.models import ArticlePart\n'), ((1708, 1736), 'pytest.raises', 'pytest.raises', (['NoResultFound'], {}), '(NoResultFound)\n', (1721, 1736), False, 'import pytest\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('painindex_app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PainProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='painreport',
name='pain_profile',
field=models.ForeignKey(blank=True, to='painindex_app.PainProfile', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='painreport',
name='intensity',
field=models.IntegerField(choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)]),
),
]
| [
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey"
] | [((653, 725), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'to': '"""painindex_app.PainProfile"""', 'null': '(True)'}), "(blank=True, to='painindex_app.PainProfile', null=True)\n", (670, 725), False, 'from django.db import models, migrations\n'), ((889, 1004), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': '[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (\n 10, 10)]'}), '(choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6),\n (7, 7), (8, 8), (9, 9), (10, 10)])\n', (908, 1004), False, 'from django.db import models, migrations\n'), ((346, 439), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (362, 439), False, 'from django.db import models, migrations\n')] |
# Transfer functions and derivatives
# Note _all_ transfer functions and derivatives _must_ accept keyword arguments
# and handle the output keyword argument out=z correctly.
# <NAME>
import numpy as np
import scipy.special
#-------------------------------------------------------------------------------
"""
def sigval(x, **kwds):
# return 1./(1+exp(-x))
# return 0.5 * np.tanh(0.5*x) + 0.5
z = kwds["out"] if "out" in kwds else np.empty_like(x)
np.multiply(x, 0.5, out=z)
np.tanh(z, out=z)
np.multiply(z, 0.5, out=z)
np.add(z, 0.5, out=z)
return z
"""
sigval = scipy.special.expit
#-------------------------------------------------------------------------------
def sigder(x, **kwds):
#y = sigval(x); return (1.-y)*y
z = kwds["out"] if "out" in kwds else np.empty_like(x)
y = kwds["val"] if "val" in kwds else sigval(x)
np.subtract(1., y, out=z)
np.multiply(z, y, out=z)
return z
#-------------------------------------------------------------------------------
def ReLU(x, **kwds):
z = kwds["out"] if "out" in kwds else np.empty_like(x)
y = kwds["ind"] if "ind" in kwds else x < 0
np.copyto(z, x, casting='no')
z[y].fill(0.)
return z
#-------------------------------------------------------------------------------
def ReDU(x, **kwds):
z = kwds["out"] if "out" in kwds else np.empty_like(x)
y = kwds["ind"] if "ind" in kwds else x < 0
z.fill(1.)
z[y].fill(0.)
return z
#-------------------------------------------------------------------------------
TRANSFER_FUNCTION_DERIVATIVE = {'none': (None, None),
'sigm': (sigval, sigder),
'relu': (ReLU, ReDU)}
#-------------------------------------------------------------------------------
| [
"numpy.copyto",
"numpy.multiply",
"numpy.empty_like",
"numpy.subtract"
] | [((852, 878), 'numpy.subtract', 'np.subtract', (['(1.0)', 'y'], {'out': 'z'}), '(1.0, y, out=z)\n', (863, 878), True, 'import numpy as np\n'), ((880, 904), 'numpy.multiply', 'np.multiply', (['z', 'y'], {'out': 'z'}), '(z, y, out=z)\n', (891, 904), True, 'import numpy as np\n'), ((1125, 1154), 'numpy.copyto', 'np.copyto', (['z', 'x'], {'casting': '"""no"""'}), "(z, x, casting='no')\n", (1134, 1154), True, 'import numpy as np\n'), ((783, 799), 'numpy.empty_like', 'np.empty_like', (['x'], {}), '(x)\n', (796, 799), True, 'import numpy as np\n'), ((1060, 1076), 'numpy.empty_like', 'np.empty_like', (['x'], {}), '(x)\n', (1073, 1076), True, 'import numpy as np\n'), ((1325, 1341), 'numpy.empty_like', 'np.empty_like', (['x'], {}), '(x)\n', (1338, 1341), True, 'import numpy as np\n')] |
# Copyright (c) 2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from swift.common.utils import config_true_value
class Config(dict):
def __init__(self, base=None):
if base is not None:
self.update(base)
def __getattr__(self, name):
if name not in self:
raise AttributeError("No attribute '%s'" % name)
return self[name]
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
del self[name]
def update(self, other):
if hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
def __setitem__(self, key, value):
if isinstance(self.get(key), bool):
dict.__setitem__(self, key, config_true_value(value))
elif isinstance(self.get(key), int):
dict.__setitem__(self, key, int(value))
else:
dict.__setitem__(self, key, value)
# Global config dictionary. The default values can be defined here.
CONF = Config({
'allow_no_owner': False,
'location': 'US',
'max_bucket_listing': 1000,
'max_parts_listing': 1000,
'max_multi_delete_objects': 1000,
's3_acl': False,
'storage_domain': '',
'auth_pipeline_check': True,
'max_upload_part_num': 1000,
'check_bucket_owner': False,
'force_swift_request_proxy_log': False,
})
| [
"swift.common.utils.config_true_value"
] | [((1382, 1406), 'swift.common.utils.config_true_value', 'config_true_value', (['value'], {}), '(value)\n', (1399, 1406), False, 'from swift.common.utils import config_true_value\n')] |
import urwid
from console.app import app
from console.widgets.help import HelpDialog
class Pane(urwid.WidgetPlaceholder):
"""
A widget which allows for easy display of dialogs.
"""
def __init__(self, widget=urwid.SolidFill(' ')):
urwid.WidgetPlaceholder.__init__(self, widget)
self.widget = widget
self.dialog = None
def show_dialog(self, dialog):
if not self.dialog:
self.dialog = dialog
self.original_widget = urwid.Overlay(
urwid.LineBox(dialog),
self.original_widget,
align=getattr(dialog, 'align', 'center'),
width=getattr(dialog, 'width', ('relative', 99)),
valign=getattr(dialog, 'valign', 'middle'),
height=getattr(dialog, 'height', 'pack'),
)
app.draw_screen()
def close_dialog(self):
if self.dialog:
self.original_widget = self.widget
self.dialog = None
app.draw_screen()
def keypress(self, size, event):
if not self.handle_event(event):
return self.original_widget.keypress(size, event)
return super(Pane, self).keypress(size, event)
def handle_event(self, event):
if event == 'close-dialog':
self.close_dialog()
else:
return event
def get_help_dialog(self):
return HelpDialog()
| [
"console.app.app.draw_screen",
"console.widgets.help.HelpDialog",
"urwid.WidgetPlaceholder.__init__",
"urwid.SolidFill",
"urwid.LineBox"
] | [((227, 247), 'urwid.SolidFill', 'urwid.SolidFill', (['""" """'], {}), "(' ')\n", (242, 247), False, 'import urwid\n'), ((258, 304), 'urwid.WidgetPlaceholder.__init__', 'urwid.WidgetPlaceholder.__init__', (['self', 'widget'], {}), '(self, widget)\n', (290, 304), False, 'import urwid\n'), ((1418, 1430), 'console.widgets.help.HelpDialog', 'HelpDialog', ([], {}), '()\n', (1428, 1430), False, 'from console.widgets.help import HelpDialog\n'), ((853, 870), 'console.app.app.draw_screen', 'app.draw_screen', ([], {}), '()\n', (868, 870), False, 'from console.app import app\n'), ((1014, 1031), 'console.app.app.draw_screen', 'app.draw_screen', ([], {}), '()\n', (1029, 1031), False, 'from console.app import app\n'), ((524, 545), 'urwid.LineBox', 'urwid.LineBox', (['dialog'], {}), '(dialog)\n', (537, 545), False, 'import urwid\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-17 13:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='\u6807\u9898')),
('en_title', models.CharField(max_length=100, verbose_name='\u82f1\u6587\u6807\u9898')),
('img', models.CharField(default='/static/img/article/default.jpg', max_length=200)),
('tags', models.CharField(blank=True, help_text='\u7528\u9017\u53f7\u5206\u9694', max_length=200, null=True, verbose_name='\u6807\u7b7e')),
('summary', models.TextField(verbose_name='\u6458\u8981')),
('content', models.TextField(verbose_name='\u6b63\u6587')),
('view_times', models.IntegerField(default=0)),
('zan_times', models.IntegerField(default=0)),
('is_top', models.BooleanField(default=False, verbose_name='\u7f6e\u9876')),
('rank', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')),
('status', models.IntegerField(choices=[(0, '\u6b63\u5e38'), (1, '\u8349\u7a3f'), (2, '\u5220\u9664')], default=0, verbose_name='\u72b6\u6001')),
('pub_time', models.DateTimeField(default=False, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='\u66f4\u65b0\u65f6\u95f4')),
],
options={
'ordering': ['rank', '-is_top', '-pub_time', '-create_time'],
'verbose_name': '\u6587\u7ae0',
'verbose_name_plural': '\u6587\u7ae0',
},
),
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='\u540d\u79f0')),
],
),
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='\u6807\u9898')),
('en_title', models.CharField(max_length=100, verbose_name='\u82f1\u6587\u6807\u9898')),
('img', models.CharField(default='/static/img/article/default.jpg', max_length=200)),
('tags', models.CharField(blank=True, help_text='\u7528\u9017\u53f7\u5206\u9694', max_length=200, null=True, verbose_name='\u6807\u7b7e')),
('summary', models.TextField(verbose_name='\u6458\u8981')),
('view_times', models.IntegerField(default=0)),
('zan_times', models.IntegerField(default=0)),
('is_top', models.BooleanField(default=False, verbose_name='\u7f6e\u9876')),
('rank', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')),
('status', models.IntegerField(choices=[(0, '\u6b63\u5e38'), (1, '\u8349\u7a3f'), (2, '\u5220\u9664')], default=0, verbose_name='\u72b6\u6001')),
('pub_time', models.DateTimeField(default=False, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='\u66f4\u65b0\u65f6\u95f4')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='articles.Author', verbose_name='\u4f5c\u8005')),
],
options={
'ordering': ['rank', '-is_top', '-pub_time', '-create_time'],
'verbose_name': '\u4e66\u7c4d',
'verbose_name_plural': '\u4e66\u7c4d',
},
),
migrations.CreateModel(
name='Carousel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='\u6807\u9898')),
('summary', models.TextField(blank=True, null=True, verbose_name='\u6458\u8981')),
('img', models.CharField(default='/static/img/carousel/default.jpg', max_length=200, verbose_name='\u8f6e\u64ad\u56fe\u7247')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='articles.Article', verbose_name='\u6587\u7ae0')),
],
options={
'ordering': ['-create_time'],
'verbose_name': '\u8f6e\u64ad',
'verbose_name_plural': '\u8f6e\u64ad',
},
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40, verbose_name='\u540d\u79f0')),
('rank', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')),
('status', models.IntegerField(choices=[(0, '\u6b63\u5e38'), (1, '\u8349\u7a3f'), (2, '\u5220\u9664')], default=0, verbose_name='\u72b6\u6001')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('parent', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='articles.Category', verbose_name='\u4e0a\u7ea7\u5206\u7c7b')),
],
options={
'ordering': ['rank', '-create_time'],
'verbose_name': '\u5206\u7c7b',
'verbose_name_plural': '\u5206\u7c7b',
},
),
migrations.CreateModel(
name='Chapter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='\u6807\u9898')),
('en_title', models.CharField(max_length=100, verbose_name='\u82f1\u6587\u6807\u9898')),
('img', models.CharField(default='/static/img/article/default.jpg', max_length=200)),
('tags', models.CharField(blank=True, help_text='\u7528\u9017\u53f7\u5206\u9694', max_length=200, null=True, verbose_name='\u6807\u7b7e')),
('summary', models.TextField(verbose_name='\u6458\u8981')),
('content', models.TextField(verbose_name='\u6b63\u6587')),
('view_times', models.IntegerField(default=0)),
('zan_times', models.IntegerField(default=0)),
('rank', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')),
('status', models.IntegerField(choices=[(0, '\u6b63\u5e38'), (1, '\u8349\u7a3f'), (2, '\u5220\u9664')], default=0, verbose_name='\u72b6\u6001')),
('pub_time', models.DateTimeField(default=False, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='\u66f4\u65b0\u65f6\u95f4')),
('book', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='articles.Book', verbose_name='\u4e66\u540d')),
],
options={
'ordering': ['rank', '-pub_time', '-create_time'],
'verbose_name': '\u7ae0\u8282',
'verbose_name_plural': '\u7ae0\u8282',
},
),
migrations.CreateModel(
name='Column',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40, verbose_name='\u4e13\u680f\u5185\u5bb9')),
('summary', models.TextField(verbose_name='\u4e13\u680f\u6458\u8981')),
('status', models.IntegerField(choices=[(0, '\u6b63\u5e38'), (1, '\u8349\u7a3f'), (2, '\u5220\u9664')], default=0, verbose_name='\u72b6\u6001')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('article', models.ManyToManyField(to='articles.Article', verbose_name='\u6587\u7ae0')),
],
options={
'ordering': ['-create_time'],
'verbose_name': '\u4e13\u680f',
'verbose_name_plural': '\u4e13\u680f',
},
),
migrations.CreateModel(
name='Nav',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40, verbose_name='\u5bfc\u822a\u6761\u5185\u5bb9')),
('url', models.CharField(blank=True, max_length=200, null=True, verbose_name='\u6307\u5411\u5730\u5740')),
('status', models.IntegerField(choices=[(0, '\u6b63\u5e38'), (1, '\u8349\u7a3f'), (2, '\u5220\u9664')], default=0, verbose_name='\u72b6\u6001')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
],
options={
'ordering': ['-create_time'],
'verbose_name': '\u5bfc\u822a\u6761',
'verbose_name_plural': '\u5bfc\u822a\u6761',
},
),
migrations.CreateModel(
name='News',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='\u6807\u9898')),
('summary', models.TextField(verbose_name='\u6458\u8981')),
('news_from', models.IntegerField(choices=[(0, 'oschina'), (1, 'chiphell'), (2, 'freebuf'), (3, 'cnBeta')], default=0, verbose_name='\u6765\u6e90')),
('url', models.CharField(max_length=200, verbose_name='\u6e90\u5730\u5740')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('pub_time', models.DateTimeField(default=False, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
],
options={
'ordering': ['-title'],
'verbose_name': '\u8d44\u8baf',
'verbose_name_plural': '\u8d44\u8baf',
},
),
migrations.AddField(
model_name='book',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='articles.Category', verbose_name='\u5206\u7c7b'),
),
migrations.AddField(
model_name='article',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='articles.Author', verbose_name='\u4f5c\u8005'),
),
migrations.AddField(
model_name='article',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='articles.Category', verbose_name='\u5206\u7c7b'),
),
]
| [
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((11368, 11478), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Category"""', 'verbose_name': '"""分类"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'articles.Category', verbose_name='分类')\n", (11385, 11478), False, 'from django.db import migrations, models\n'), ((11604, 11712), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Author"""', 'verbose_name': '"""作者"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'articles.Author', verbose_name='作者')\n", (11621, 11712), False, 'from django.db import migrations, models\n'), ((11840, 11950), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Category"""', 'verbose_name': '"""分类"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'articles.Category', verbose_name='分类')\n", (11857, 11950), False, 'from django.db import migrations, models\n'), ((400, 493), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (416, 493), False, 'from django.db import migrations, models\n'), ((518, 569), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""标题"""'}), "(max_length=100, verbose_name='标题')\n", (534, 569), False, 'from django.db import migrations, models\n'), ((611, 664), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""英文标题"""'}), "(max_length=100, verbose_name='英文标题')\n", (627, 664), False, 'from django.db import migrations, models\n'), ((711, 786), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""/static/img/article/default.jpg"""', 'max_length': '(200)'}), "(default='/static/img/article/default.jpg', max_length=200)\n", (727, 786), False, 'from django.db import migrations, models\n'), ((814, 911), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""用逗号分隔"""', 'max_length': '(200)', 'null': '(True)', 'verbose_name': '"""标签"""'}), "(blank=True, help_text='用逗号分隔', max_length=200, null=True,\n verbose_name='标签')\n", (830, 911), False, 'from django.db import migrations, models\n'), ((973, 1008), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""摘要"""'}), "(verbose_name='摘要')\n", (989, 1008), False, 'from django.db import migrations, models\n'), ((1049, 1084), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""正文"""'}), "(verbose_name='正文')\n", (1065, 1084), False, 'from django.db import migrations, models\n'), ((1128, 1158), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1147, 1158), False, 'from django.db import migrations, models\n'), ((1191, 1221), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1210, 1221), False, 'from django.db import migrations, models\n'), ((1251, 1304), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""置顶"""'}), "(default=False, verbose_name='置顶')\n", (1270, 1304), False, 'from django.db import migrations, models\n'), ((1342, 1391), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""排序"""'}), "(default=0, verbose_name='排序')\n", (1361, 1391), False, 'from django.db import migrations, models\n'), ((1431, 1527), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, '正常'), (1, '草稿'), (2, '删除')]", 'default': '(0)', 'verbose_name': '"""状态"""'}), "(choices=[(0, '正常'), (1, '草稿'), (2, '删除')], default=0,\n verbose_name='状态')\n", (1450, 1527), False, 'from django.db import migrations, models\n'), ((1595, 1651), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': '(False)', 'verbose_name': '"""发布时间"""'}), "(default=False, verbose_name='发布时间')\n", (1615, 1651), False, 'from django.db import migrations, models\n'), ((1706, 1766), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (1726, 1766), False, 'from django.db import migrations, models\n'), ((1821, 1877), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""更新时间"""'}), "(auto_now=True, verbose_name='更新时间')\n", (1841, 1877), False, 'from django.db import migrations, models\n'), ((2247, 2340), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2263, 2340), False, 'from django.db import migrations, models\n'), ((2364, 2415), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""名称"""'}), "(max_length=100, verbose_name='名称')\n", (2380, 2415), False, 'from django.db import migrations, models\n'), ((2555, 2648), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2571, 2648), False, 'from django.db import migrations, models\n'), ((2673, 2724), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""标题"""'}), "(max_length=100, verbose_name='标题')\n", (2689, 2724), False, 'from django.db import migrations, models\n'), ((2766, 2819), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""英文标题"""'}), "(max_length=100, verbose_name='英文标题')\n", (2782, 2819), False, 'from django.db import migrations, models\n'), ((2866, 2941), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""/static/img/article/default.jpg"""', 'max_length': '(200)'}), "(default='/static/img/article/default.jpg', max_length=200)\n", (2882, 2941), False, 'from django.db import migrations, models\n'), ((2969, 3066), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""用逗号分隔"""', 'max_length': '(200)', 'null': '(True)', 'verbose_name': '"""标签"""'}), "(blank=True, help_text='用逗号分隔', max_length=200, null=True,\n verbose_name='标签')\n", (2985, 3066), False, 'from django.db import migrations, models\n'), ((3128, 3163), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""摘要"""'}), "(verbose_name='摘要')\n", (3144, 3163), False, 'from django.db import migrations, models\n'), ((3207, 3237), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (3226, 3237), False, 'from django.db import migrations, models\n'), ((3270, 3300), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (3289, 3300), False, 'from django.db import migrations, models\n'), ((3330, 3383), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""置顶"""'}), "(default=False, verbose_name='置顶')\n", (3349, 3383), False, 'from django.db import migrations, models\n'), ((3421, 3470), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""排序"""'}), "(default=0, verbose_name='排序')\n", (3440, 3470), False, 'from django.db import migrations, models\n'), ((3510, 3606), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, '正常'), (1, '草稿'), (2, '删除')]", 'default': '(0)', 'verbose_name': '"""状态"""'}), "(choices=[(0, '正常'), (1, '草稿'), (2, '删除')], default=0,\n verbose_name='状态')\n", (3529, 3606), False, 'from django.db import migrations, models\n'), ((3674, 3730), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': '(False)', 'verbose_name': '"""发布时间"""'}), "(default=False, verbose_name='发布时间')\n", (3694, 3730), False, 'from django.db import migrations, models\n'), ((3785, 3845), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (3805, 3845), False, 'from django.db import migrations, models\n'), ((3900, 3956), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""更新时间"""'}), "(auto_now=True, verbose_name='更新时间')\n", (3920, 3956), False, 'from django.db import migrations, models\n'), ((4006, 4114), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Author"""', 'verbose_name': '"""作者"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'articles.Author', verbose_name='作者')\n", (4023, 4114), False, 'from django.db import migrations, models\n'), ((4471, 4564), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4487, 4564), False, 'from django.db import migrations, models\n'), ((4589, 4640), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""标题"""'}), "(max_length=100, verbose_name='标题')\n", (4605, 4640), False, 'from django.db import migrations, models\n'), ((4681, 4739), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""摘要"""'}), "(blank=True, null=True, verbose_name='摘要')\n", (4697, 4739), False, 'from django.db import migrations, models\n'), ((4776, 4877), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""/static/img/carousel/default.jpg"""', 'max_length': '(200)', 'verbose_name': '"""轮播图片"""'}), "(default='/static/img/carousel/default.jpg', max_length=200,\n verbose_name='轮播图片')\n", (4792, 4877), False, 'from django.db import migrations, models\n'), ((4928, 4988), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (4948, 4988), False, 'from django.db import migrations, models\n'), ((5039, 5148), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Article"""', 'verbose_name': '"""文章"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'articles.Article', verbose_name='文章')\n", (5056, 5148), False, 'from django.db import migrations, models\n'), ((5473, 5566), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5489, 5566), False, 'from django.db import migrations, models\n'), ((5590, 5640), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'verbose_name': '"""名称"""'}), "(max_length=40, verbose_name='名称')\n", (5606, 5640), False, 'from django.db import migrations, models\n'), ((5678, 5727), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""排序"""'}), "(default=0, verbose_name='排序')\n", (5697, 5727), False, 'from django.db import migrations, models\n'), ((5767, 5863), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, '正常'), (1, '草稿'), (2, '删除')]", 'default': '(0)', 'verbose_name': '"""状态"""'}), "(choices=[(0, '正常'), (1, '草稿'), (2, '删除')], default=0,\n verbose_name='状态')\n", (5786, 5863), False, 'from django.db import migrations, models\n'), ((5934, 5994), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (5954, 5994), False, 'from django.db import migrations, models\n'), ((6044, 6193), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Category"""', 'verbose_name': '"""上级分类"""'}), "(blank=True, default=None, null=True, on_delete=django.db.\n models.deletion.CASCADE, to='articles.Category', verbose_name='上级分类')\n", (6061, 6193), False, 'from django.db import migrations, models\n'), ((6535, 6628), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (6551, 6628), False, 'from django.db import migrations, models\n'), ((6653, 6704), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""标题"""'}), "(max_length=100, verbose_name='标题')\n", (6669, 6704), False, 'from django.db import migrations, models\n'), ((6746, 6799), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""英文标题"""'}), "(max_length=100, verbose_name='英文标题')\n", (6762, 6799), False, 'from django.db import migrations, models\n'), ((6846, 6921), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""/static/img/article/default.jpg"""', 'max_length': '(200)'}), "(default='/static/img/article/default.jpg', max_length=200)\n", (6862, 6921), False, 'from django.db import migrations, models\n'), ((6949, 7046), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""用逗号分隔"""', 'max_length': '(200)', 'null': '(True)', 'verbose_name': '"""标签"""'}), "(blank=True, help_text='用逗号分隔', max_length=200, null=True,\n verbose_name='标签')\n", (6965, 7046), False, 'from django.db import migrations, models\n'), ((7108, 7143), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""摘要"""'}), "(verbose_name='摘要')\n", (7124, 7143), False, 'from django.db import migrations, models\n'), ((7184, 7219), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""正文"""'}), "(verbose_name='正文')\n", (7200, 7219), False, 'from django.db import migrations, models\n'), ((7263, 7293), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (7282, 7293), False, 'from django.db import migrations, models\n'), ((7326, 7356), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (7345, 7356), False, 'from django.db import migrations, models\n'), ((7384, 7433), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""排序"""'}), "(default=0, verbose_name='排序')\n", (7403, 7433), False, 'from django.db import migrations, models\n'), ((7473, 7569), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, '正常'), (1, '草稿'), (2, '删除')]", 'default': '(0)', 'verbose_name': '"""状态"""'}), "(choices=[(0, '正常'), (1, '草稿'), (2, '删除')], default=0,\n verbose_name='状态')\n", (7492, 7569), False, 'from django.db import migrations, models\n'), ((7637, 7693), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': '(False)', 'verbose_name': '"""发布时间"""'}), "(default=False, verbose_name='发布时间')\n", (7657, 7693), False, 'from django.db import migrations, models\n'), ((7748, 7808), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (7768, 7808), False, 'from django.db import migrations, models\n'), ((7863, 7919), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""更新时间"""'}), "(auto_now=True, verbose_name='更新时间')\n", (7883, 7919), False, 'from django.db import migrations, models\n'), ((7967, 8073), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""articles.Book"""', 'verbose_name': '"""书名"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'articles.Book', verbose_name='书名')\n", (7984, 8073), False, 'from django.db import migrations, models\n'), ((8417, 8510), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (8433, 8510), False, 'from django.db import migrations, models\n'), ((8534, 8586), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'verbose_name': '"""专栏内容"""'}), "(max_length=40, verbose_name='专栏内容')\n", (8550, 8586), False, 'from django.db import migrations, models\n'), ((8637, 8674), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""专栏摘要"""'}), "(verbose_name='专栏摘要')\n", (8653, 8674), False, 'from django.db import migrations, models\n'), ((8724, 8820), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, '正常'), (1, '草稿'), (2, '删除')]", 'default': '(0)', 'verbose_name': '"""状态"""'}), "(choices=[(0, '正常'), (1, '草稿'), (2, '删除')], default=0,\n verbose_name='状态')\n", (8743, 8820), False, 'from django.db import migrations, models\n'), ((8891, 8951), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (8911, 8951), False, 'from django.db import migrations, models\n'), ((9002, 9066), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""articles.Article"""', 'verbose_name': '"""文章"""'}), "(to='articles.Article', verbose_name='文章')\n", (9024, 9066), False, 'from django.db import migrations, models\n'), ((9391, 9484), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (9407, 9484), False, 'from django.db import migrations, models\n'), ((9508, 9561), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'verbose_name': '"""导航条内容"""'}), "(max_length=40, verbose_name='导航条内容')\n", (9524, 9561), False, 'from django.db import migrations, models\n'), ((9613, 9689), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(200)', 'null': '(True)', 'verbose_name': '"""指向地址"""'}), "(blank=True, max_length=200, null=True, verbose_name='指向地址')\n", (9629, 9689), False, 'from django.db import migrations, models\n'), ((9739, 9835), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, '正常'), (1, '草稿'), (2, '删除')]", 'default': '(0)', 'verbose_name': '"""状态"""'}), "(choices=[(0, '正常'), (1, '草稿'), (2, '删除')], default=0,\n verbose_name='状态')\n", (9758, 9835), False, 'from django.db import migrations, models\n'), ((9906, 9966), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (9926, 9966), False, 'from django.db import migrations, models\n'), ((10314, 10407), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (10330, 10407), False, 'from django.db import migrations, models\n'), ((10432, 10483), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""标题"""'}), "(max_length=100, verbose_name='标题')\n", (10448, 10483), False, 'from django.db import migrations, models\n'), ((10524, 10559), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""摘要"""'}), "(verbose_name='摘要')\n", (10540, 10559), False, 'from django.db import migrations, models\n'), ((10602, 10730), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, 'oschina'), (1, 'chiphell'), (2, 'freebuf'), (3, 'cnBeta')]", 'default': '(0)', 'verbose_name': '"""来源"""'}), "(choices=[(0, 'oschina'), (1, 'chiphell'), (2, 'freebuf'\n ), (3, 'cnBeta')], default=0, verbose_name='来源')\n", (10621, 10730), False, 'from django.db import migrations, models\n'), ((10762, 10814), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""源地址"""'}), "(max_length=200, verbose_name='源地址')\n", (10778, 10814), False, 'from django.db import migrations, models\n'), ((10864, 10924), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (10884, 10924), False, 'from django.db import migrations, models\n'), ((10976, 11032), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': '(False)', 'verbose_name': '"""发布时间"""'}), "(default=False, verbose_name='发布时间')\n", (10996, 11032), False, 'from django.db import migrations, models\n')] |
#------testing the trained model and ensemble weights on the test data to get the final accuracy
#importing required libraries and modules
import os
import sys
import cv2
import numpy as np
from preprocess import Preprocess
from data_split import Load
from conv_net import CNN
from ensemble import Ensemble
def load_numpy_data(arg, folder):
#loading the numpy data (.npy files) from the required directory
X_test = list(np.load('bin/'+folder+'/'+arg+'/X_test.npy'))
Y_test = list(np.load('bin/'+folder+'/'+arg+'/Y_test.npy'))
X_test = list(np.array(X_test).reshape(-1, 128, 431))
Y_test = list(np.array(Y_test).reshape(-1, 15))
return X_test, Y_test
def predict_test(arg, X_train, X_val, X_test, Y_train, Y_val, Y_test):
#loading the model and training its corresponding SVR classifier
data_size = 'full'
neural_net = CNN()
model = neural_net.create_1ConvModel()
model.load('DNN/'+data_size+'/'+arg+'.model')
#defining an ensemble class and training the SVR for the particular classifier
en = Ensemble()
en.regressor(arg, model, X_val[0], Y_val[0])
neural_net.predict_test_data(arg, model, X_test[0], Y_test[0])
if __name__ == '__main__':
feature = ['mono', 'left', 'right', 'mid', 'side', 'harmonic', 'percussive', 'mfcc'] #all the features used in the architecture
X_test = [0 for i in range(len(feature))]
Y_test = [0 for i in range(len(feature))]
for i in range(8):
X_test[i], Y_test[i] = load_numpy_data(feature[i], 'full')
en = Ensemble()
#uncomment whichever method you want to use in your ensemble(SVR or majority voting)
acc = en.result_SVR(X_test, Y_test)
#acc = en.result_majority_voting(X_test, Y_test)
print("Ensemble Test Accuracy =", acc, '%')
| [
"numpy.array",
"numpy.load",
"conv_net.CNN",
"ensemble.Ensemble"
] | [((865, 870), 'conv_net.CNN', 'CNN', ([], {}), '()\n', (868, 870), False, 'from conv_net import CNN\n'), ((1051, 1061), 'ensemble.Ensemble', 'Ensemble', ([], {}), '()\n', (1059, 1061), False, 'from ensemble import Ensemble\n'), ((1526, 1536), 'ensemble.Ensemble', 'Ensemble', ([], {}), '()\n', (1534, 1536), False, 'from ensemble import Ensemble\n'), ((444, 496), 'numpy.load', 'np.load', (["('bin/' + folder + '/' + arg + '/X_test.npy')"], {}), "('bin/' + folder + '/' + arg + '/X_test.npy')\n", (451, 496), True, 'import numpy as np\n'), ((506, 558), 'numpy.load', 'np.load', (["('bin/' + folder + '/' + arg + '/Y_test.npy')"], {}), "('bin/' + folder + '/' + arg + '/Y_test.npy')\n", (513, 558), True, 'import numpy as np\n'), ((570, 586), 'numpy.array', 'np.array', (['X_test'], {}), '(X_test)\n', (578, 586), True, 'import numpy as np\n'), ((626, 642), 'numpy.array', 'np.array', (['Y_test'], {}), '(Y_test)\n', (634, 642), True, 'import numpy as np\n')] |
from dynaconf import FlaskDynaconf
from flask import Flask
def create_app(**config):
app = Flask(__name__)
FlaskDynaconf(app) # config managed by Dynaconf
app.config.load_extensions('EXTENSIONS') # Load extensions from settings.toml
app.config.update(config) # Override with passed config
return app
def create_app_wsgi():
# workaround for Flask issue
# that doesn't allow **config
# to be passed to create_app
# https://github.com/pallets/flask/issues/4170
app = create_app()
return app
| [
"dynaconf.FlaskDynaconf",
"flask.Flask"
] | [((97, 112), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (102, 112), False, 'from flask import Flask\n'), ((117, 135), 'dynaconf.FlaskDynaconf', 'FlaskDynaconf', (['app'], {}), '(app)\n', (130, 135), False, 'from dynaconf import FlaskDynaconf\n')] |
"""Plots GridRad domains.
Specifically, plots number of convective days with GridRad data at each grid
point.
"""
import os.path
import argparse
import numpy
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot
from mpl_toolkits.basemap import Basemap
from gewittergefahr.gg_io import gridrad_io
from gewittergefahr.gg_utils import grids
from gewittergefahr.gg_utils import projections
from gewittergefahr.gg_utils import radar_utils
from gewittergefahr.gg_utils import time_conversion
from gewittergefahr.gg_utils import time_periods
from gewittergefahr.gg_utils import file_system_utils
from gewittergefahr.plotting import plotting_utils
TOLERANCE = 1e-6
SEPARATOR_STRING = '\n\n' + '*' * 50 + '\n\n'
TIME_INTERVAL_SEC = 300
OVERALL_MIN_LATITUDE_DEG = 20.
OVERALL_MAX_LATITUDE_DEG = 55.
OVERALL_MIN_LONGITUDE_DEG = 230.
OVERALL_MAX_LONGITUDE_DEG = 300.
LAMBERT_CONFORMAL_STRING = 'lcc'
NUM_PARALLELS = 8
NUM_MERIDIANS = 6
RESOLUTION_STRING = 'l'
BORDER_COLOUR = numpy.full(3, 0.)
FIGURE_WIDTH_INCHES = 15
FIGURE_HEIGHT_INCHES = 15
FIGURE_RESOLUTION_DPI = 300
INPUT_DIR_ARG_NAME = 'input_gridrad_dir_name'
FIRST_DATE_ARG_NAME = 'first_spc_date_string'
LAST_DATE_ARG_NAME = 'last_spc_date_string'
COLOUR_MAP_ARG_NAME = 'colour_map_name'
GRID_SPACING_ARG_NAME = 'grid_spacing_metres'
OUTPUT_FILE_ARG_NAME = 'output_file_name'
INPUT_DIR_HELP_STRING = (
'Name of top-level input directory. GridRad files therein will be found by'
' `gridrad_io.find_file` and read by '
'`gridrad_io.read_field_from_full_grid_file`.')
SPC_DATE_HELP_STRING = (
'SPC date or convective day (format "yyyymmdd"). This script will look for'
' GridRad files in the period `{0:s}`...`{1:s}`.'
).format(FIRST_DATE_ARG_NAME, LAST_DATE_ARG_NAME)
COLOUR_MAP_HELP_STRING = (
'Name of colour scheme for gridded plot (must be accepted by '
'`pyplot.get_cmap`).')
GRID_SPACING_HELP_STRING = 'Spacing (metres) of Lambert conformal grid.'
OUTPUT_FILE_HELP_STRING = 'Path to output file. Figure will be saved here.'
INPUT_ARG_PARSER = argparse.ArgumentParser()
INPUT_ARG_PARSER.add_argument(
'--' + INPUT_DIR_ARG_NAME, type=str, required=True,
help=INPUT_DIR_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + FIRST_DATE_ARG_NAME, type=str, required=True,
help=SPC_DATE_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + LAST_DATE_ARG_NAME, type=str, required=True,
help=SPC_DATE_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + COLOUR_MAP_ARG_NAME, type=str, required=False, default='YlOrRd',
help=COLOUR_MAP_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + GRID_SPACING_ARG_NAME, type=float, required=False, default=1e5,
help=GRID_SPACING_HELP_STRING)
INPUT_ARG_PARSER.add_argument(
'--' + OUTPUT_FILE_ARG_NAME, type=str, required=True,
help=OUTPUT_FILE_HELP_STRING)
def _get_domain_one_file(gridrad_file_name):
"""Returns spatial domain for one file.
:param gridrad_file_name: Path to input file.
:return: domain_limits_deg: length-4 numpy array with
[min latitude, max latitude, min longitude, max longitude].
Latitudes are in deg N, and longitudes are in deg E.
"""
print('Reading metadata from: "{0:s}"...'.format(gridrad_file_name))
metadata_dict = gridrad_io.read_metadata_from_full_grid_file(
gridrad_file_name)
max_latitude_deg = metadata_dict[radar_utils.NW_GRID_POINT_LAT_COLUMN]
min_longitude_deg = metadata_dict[radar_utils.NW_GRID_POINT_LNG_COLUMN]
latitude_spacing_deg = metadata_dict[radar_utils.LAT_SPACING_COLUMN]
longitude_spacing_deg = metadata_dict[radar_utils.LNG_SPACING_COLUMN]
num_rows = metadata_dict[radar_utils.NUM_LAT_COLUMN]
num_columns = metadata_dict[radar_utils.NUM_LNG_COLUMN]
min_latitude_deg = max_latitude_deg - (num_rows - 1) * latitude_spacing_deg
max_longitude_deg = min_longitude_deg + (
(num_columns - 1) * longitude_spacing_deg
)
return numpy.array([
min_latitude_deg, max_latitude_deg, min_longitude_deg, max_longitude_deg
])
def _get_lcc_params(projection_object):
"""Finds parameters for LCC (Lambert conformal conic) projection.
:param projection_object: Instance of `pyproj.Proj`.
:return: standard_latitudes_deg: length-2 numpy array of standard latitudes
(deg N).
:return: central_longitude_deg: Central longitude (deg E).
:raises: ValueError: if projection is not LCC.
"""
projection_string = projection_object.srs
words = projection_string.split()
property_names = [w.split('=')[0][1:] for w in words]
property_values = [w.split('=')[1] for w in words]
projection_dict = dict(list(
zip(property_names, property_values)
))
if projection_dict['proj'] != LAMBERT_CONFORMAL_STRING:
error_string = 'Grid projection should be "{0:s}", not "{1:s}".'.format(
LAMBERT_CONFORMAL_STRING, projection_dict['proj']
)
raise ValueError(error_string)
central_longitude_deg = float(projection_dict['lon_0'])
standard_latitudes_deg = numpy.array([
float(projection_dict['lat_1']), float(projection_dict['lat_2'])
])
return standard_latitudes_deg, central_longitude_deg
def _get_basemap(grid_metadata_dict):
"""Creates basemap.
M = number of rows in grid
M = number of columns in grid
:param grid_metadata_dict: Dictionary created by
`grids.create_equidistant_grid`.
:return: basemap_object: Basemap handle (instance of
`mpl_toolkits.basemap.Basemap`).
:return: basemap_x_matrix_metres: M-by-N numpy array of x-coordinates under
Basemap projection (different than pyproj projection).
:return: basemap_y_matrix_metres: Same but for y-coordinates.
"""
x_matrix_metres, y_matrix_metres = grids.xy_vectors_to_matrices(
x_unique_metres=grid_metadata_dict[grids.X_COORDS_KEY],
y_unique_metres=grid_metadata_dict[grids.Y_COORDS_KEY]
)
projection_object = grid_metadata_dict[grids.PROJECTION_KEY]
latitude_matrix_deg, longitude_matrix_deg = (
projections.project_xy_to_latlng(
x_coords_metres=x_matrix_metres, y_coords_metres=y_matrix_metres,
projection_object=projection_object)
)
standard_latitudes_deg, central_longitude_deg = _get_lcc_params(
projection_object)
basemap_object = Basemap(
projection='lcc', lat_1=standard_latitudes_deg[0],
lat_2=standard_latitudes_deg[1], lon_0=central_longitude_deg,
rsphere=projections.DEFAULT_EARTH_RADIUS_METRES,
ellps=projections.SPHERE_NAME, resolution=RESOLUTION_STRING,
llcrnrx=x_matrix_metres[0, 0], llcrnry=y_matrix_metres[0, 0],
urcrnrx=x_matrix_metres[-1, -1], urcrnry=y_matrix_metres[-1, -1]
)
basemap_x_matrix_metres, basemap_y_matrix_metres = basemap_object(
longitude_matrix_deg, latitude_matrix_deg)
return basemap_object, basemap_x_matrix_metres, basemap_y_matrix_metres
def _plot_data(num_days_matrix, grid_metadata_dict, colour_map_object):
"""Plots data.
M = number of rows in grid
N = number of columns in grid
:param num_days_matrix: M-by-N numpy array with number of convective days
for which grid cell is in domain.
:param grid_metadata_dict: Dictionary created by
`grids.create_equidistant_grid`.
:param colour_map_object: See documentation at top of file.
:return: figure_object: Figure handle (instance of
`matplotlib.figure.Figure`).
:return: axes_object: Axes handle (instance of
`matplotlib.axes._subplots.AxesSubplot`).
"""
figure_object, axes_object = pyplot.subplots(
1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)
)
basemap_object, basemap_x_matrix_metres, basemap_y_matrix_metres = (
_get_basemap(grid_metadata_dict)
)
num_grid_rows = num_days_matrix.shape[0]
num_grid_columns = num_days_matrix.shape[1]
x_spacing_metres = (
(basemap_x_matrix_metres[0, -1] - basemap_x_matrix_metres[0, 0]) /
(num_grid_columns - 1)
)
y_spacing_metres = (
(basemap_y_matrix_metres[-1, 0] - basemap_y_matrix_metres[0, 0]) /
(num_grid_rows - 1)
)
matrix_to_plot, edge_x_coords_metres, edge_y_coords_metres = (
grids.xy_field_grid_points_to_edges(
field_matrix=num_days_matrix,
x_min_metres=basemap_x_matrix_metres[0, 0],
y_min_metres=basemap_y_matrix_metres[0, 0],
x_spacing_metres=x_spacing_metres,
y_spacing_metres=y_spacing_metres)
)
matrix_to_plot = numpy.ma.masked_where(matrix_to_plot == 0, matrix_to_plot)
plotting_utils.plot_coastlines(
basemap_object=basemap_object, axes_object=axes_object,
line_colour=BORDER_COLOUR)
plotting_utils.plot_countries(
basemap_object=basemap_object, axes_object=axes_object,
line_colour=BORDER_COLOUR)
plotting_utils.plot_states_and_provinces(
basemap_object=basemap_object, axes_object=axes_object,
line_colour=BORDER_COLOUR)
plotting_utils.plot_parallels(
basemap_object=basemap_object, axes_object=axes_object,
num_parallels=NUM_PARALLELS)
plotting_utils.plot_meridians(
basemap_object=basemap_object, axes_object=axes_object,
num_meridians=NUM_MERIDIANS)
basemap_object.pcolormesh(
edge_x_coords_metres, edge_y_coords_metres, matrix_to_plot,
cmap=colour_map_object, vmin=1, vmax=numpy.max(num_days_matrix),
shading='flat', edgecolors='None', axes=axes_object, zorder=-1e12)
colour_bar_object = plotting_utils.plot_linear_colour_bar(
axes_object_or_matrix=axes_object, data_matrix=num_days_matrix,
colour_map_object=colour_map_object, min_value=1,
max_value=numpy.max(num_days_matrix), orientation_string='horizontal',
extend_min=False, extend_max=False, padding=0.05)
tick_values = colour_bar_object.get_ticks()
tick_strings = ['{0:d}'.format(int(numpy.round(v))) for v in tick_values]
colour_bar_object.set_ticks(tick_values)
colour_bar_object.set_ticklabels(tick_strings)
axes_object.set_title('Number of convective days by grid cell')
return figure_object, axes_object
def _run(top_gridrad_dir_name, first_spc_date_string, last_spc_date_string,
colour_map_name, grid_spacing_metres, output_file_name):
"""Plots GridRad domains.
This is effectively the main method.
:param top_gridrad_dir_name: See documentation at top of file.
:param first_spc_date_string: Same.
:param last_spc_date_string: Same.
:param colour_map_name: Same.
:param grid_spacing_metres: Same.
:param output_file_name: Same.
"""
colour_map_object = pyplot.get_cmap(colour_map_name)
file_system_utils.mkdir_recursive_if_necessary(file_name=output_file_name)
first_time_unix_sec = time_conversion.get_start_of_spc_date(
first_spc_date_string)
last_time_unix_sec = time_conversion.get_end_of_spc_date(
last_spc_date_string)
valid_times_unix_sec = time_periods.range_and_interval_to_list(
start_time_unix_sec=first_time_unix_sec,
end_time_unix_sec=last_time_unix_sec,
time_interval_sec=TIME_INTERVAL_SEC, include_endpoint=True)
valid_spc_date_strings = [
time_conversion.time_to_spc_date_string(t) for t in valid_times_unix_sec
]
domain_min_latitudes_deg = []
domain_max_latitudes_deg = []
domain_min_longitudes_deg = []
domain_max_longitudes_deg = []
prev_domain_limits_deg = numpy.full(4, numpy.nan)
prev_spc_date_string = 'foo'
num_times = len(valid_times_unix_sec)
for i in range(num_times):
this_gridrad_file_name = gridrad_io.find_file(
unix_time_sec=valid_times_unix_sec[i],
top_directory_name=top_gridrad_dir_name,
raise_error_if_missing=False)
if not os.path.isfile(this_gridrad_file_name):
continue
these_domain_limits_deg = _get_domain_one_file(this_gridrad_file_name)
same_domain = (
valid_spc_date_strings[i] == prev_spc_date_string and
numpy.allclose(
these_domain_limits_deg, prev_domain_limits_deg, TOLERANCE
)
)
if same_domain:
continue
prev_domain_limits_deg = these_domain_limits_deg + 0.
prev_spc_date_string = valid_spc_date_strings[i]
domain_min_latitudes_deg.append(these_domain_limits_deg[0])
domain_max_latitudes_deg.append(these_domain_limits_deg[1])
domain_min_longitudes_deg.append(these_domain_limits_deg[2])
domain_max_longitudes_deg.append(these_domain_limits_deg[3])
print(SEPARATOR_STRING)
domain_min_latitudes_deg = numpy.array(domain_min_latitudes_deg)
domain_max_latitudes_deg = numpy.array(domain_max_latitudes_deg)
domain_min_longitudes_deg = numpy.array(domain_min_longitudes_deg)
domain_max_longitudes_deg = numpy.array(domain_max_longitudes_deg)
num_domains = len(domain_min_latitudes_deg)
grid_metadata_dict = grids.create_equidistant_grid(
min_latitude_deg=OVERALL_MIN_LATITUDE_DEG,
max_latitude_deg=OVERALL_MAX_LATITUDE_DEG,
min_longitude_deg=OVERALL_MIN_LONGITUDE_DEG,
max_longitude_deg=OVERALL_MAX_LONGITUDE_DEG,
x_spacing_metres=grid_spacing_metres,
y_spacing_metres=grid_spacing_metres, azimuthal=False)
unique_x_coords_metres = grid_metadata_dict[grids.X_COORDS_KEY]
unique_y_coords_metres = grid_metadata_dict[grids.Y_COORDS_KEY]
projection_object = grid_metadata_dict[grids.PROJECTION_KEY]
x_coord_matrix_metres, y_coord_matrix_metres = grids.xy_vectors_to_matrices(
x_unique_metres=unique_x_coords_metres,
y_unique_metres=unique_y_coords_metres)
latitude_matrix_deg, longitude_matrix_deg = (
projections.project_xy_to_latlng(
x_coords_metres=x_coord_matrix_metres,
y_coords_metres=y_coord_matrix_metres,
projection_object=projection_object)
)
num_grid_rows = latitude_matrix_deg.shape[0]
num_grid_columns = latitude_matrix_deg.shape[1]
num_days_matrix = numpy.full((num_grid_rows, num_grid_columns), 0)
for i in range(num_domains):
if numpy.mod(i, 10) == 0:
print('Have found grid points in {0:d} of {1:d} domains...'.format(
i, num_domains
))
this_lat_flag_matrix = numpy.logical_and(
latitude_matrix_deg >= domain_min_latitudes_deg[i],
latitude_matrix_deg <= domain_max_latitudes_deg[i]
)
this_lng_flag_matrix = numpy.logical_and(
longitude_matrix_deg >= domain_min_longitudes_deg[i],
longitude_matrix_deg <= domain_max_longitudes_deg[i]
)
num_days_matrix += numpy.logical_and(
this_lat_flag_matrix, this_lng_flag_matrix
).astype(int)
print(SEPARATOR_STRING)
figure_object, axes_object = _plot_data(
num_days_matrix=num_days_matrix, grid_metadata_dict=grid_metadata_dict,
colour_map_object=colour_map_object)
plotting_utils.label_axes(axes_object=axes_object, label_string='(c)')
print('Saving figure to: "{0:s}"...'.format(output_file_name))
figure_object.savefig(
output_file_name, dpi=FIGURE_RESOLUTION_DPI, pad_inches=0,
bbox_inches='tight')
pyplot.close(figure_object)
if __name__ == '__main__':
INPUT_ARG_OBJECT = INPUT_ARG_PARSER.parse_args()
_run(
top_gridrad_dir_name=getattr(INPUT_ARG_OBJECT, INPUT_DIR_ARG_NAME),
first_spc_date_string=getattr(INPUT_ARG_OBJECT, FIRST_DATE_ARG_NAME),
last_spc_date_string=getattr(INPUT_ARG_OBJECT, LAST_DATE_ARG_NAME),
colour_map_name=getattr(INPUT_ARG_OBJECT, COLOUR_MAP_ARG_NAME),
grid_spacing_metres=getattr(INPUT_ARG_OBJECT, GRID_SPACING_ARG_NAME),
output_file_name=getattr(INPUT_ARG_OBJECT, OUTPUT_FILE_ARG_NAME)
)
| [
"gewittergefahr.gg_utils.time_periods.range_and_interval_to_list",
"numpy.array",
"gewittergefahr.gg_utils.grids.create_equidistant_grid",
"gewittergefahr.gg_utils.projections.project_xy_to_latlng",
"gewittergefahr.gg_utils.grids.xy_vectors_to_matrices",
"numpy.mod",
"gewittergefahr.gg_utils.time_conver... | [((178, 199), 'matplotlib.use', 'matplotlib.use', (['"""agg"""'], {}), "('agg')\n", (192, 199), False, 'import matplotlib\n'), ((988, 1006), 'numpy.full', 'numpy.full', (['(3)', '(0.0)'], {}), '(3, 0.0)\n', (998, 1006), False, 'import numpy\n'), ((2059, 2084), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2082, 2084), False, 'import argparse\n'), ((3281, 3344), 'gewittergefahr.gg_io.gridrad_io.read_metadata_from_full_grid_file', 'gridrad_io.read_metadata_from_full_grid_file', (['gridrad_file_name'], {}), '(gridrad_file_name)\n', (3325, 3344), False, 'from gewittergefahr.gg_io import gridrad_io\n'), ((3965, 4056), 'numpy.array', 'numpy.array', (['[min_latitude_deg, max_latitude_deg, min_longitude_deg, max_longitude_deg]'], {}), '([min_latitude_deg, max_latitude_deg, min_longitude_deg,\n max_longitude_deg])\n', (3976, 4056), False, 'import numpy\n'), ((5816, 5961), 'gewittergefahr.gg_utils.grids.xy_vectors_to_matrices', 'grids.xy_vectors_to_matrices', ([], {'x_unique_metres': 'grid_metadata_dict[grids.X_COORDS_KEY]', 'y_unique_metres': 'grid_metadata_dict[grids.Y_COORDS_KEY]'}), '(x_unique_metres=grid_metadata_dict[grids.\n X_COORDS_KEY], y_unique_metres=grid_metadata_dict[grids.Y_COORDS_KEY])\n', (5844, 5961), False, 'from gewittergefahr.gg_utils import grids\n'), ((6104, 6243), 'gewittergefahr.gg_utils.projections.project_xy_to_latlng', 'projections.project_xy_to_latlng', ([], {'x_coords_metres': 'x_matrix_metres', 'y_coords_metres': 'y_matrix_metres', 'projection_object': 'projection_object'}), '(x_coords_metres=x_matrix_metres,\n y_coords_metres=y_matrix_metres, projection_object=projection_object)\n', (6136, 6243), False, 'from gewittergefahr.gg_utils import projections\n'), ((6390, 6772), 'mpl_toolkits.basemap.Basemap', 'Basemap', ([], {'projection': '"""lcc"""', 'lat_1': 'standard_latitudes_deg[0]', 'lat_2': 'standard_latitudes_deg[1]', 'lon_0': 'central_longitude_deg', 'rsphere': 'projections.DEFAULT_EARTH_RADIUS_METRES', 'ellps': 'projections.SPHERE_NAME', 'resolution': 'RESOLUTION_STRING', 'llcrnrx': 'x_matrix_metres[0, 0]', 'llcrnry': 'y_matrix_metres[0, 0]', 'urcrnrx': 'x_matrix_metres[-1, -1]', 'urcrnry': 'y_matrix_metres[-1, -1]'}), "(projection='lcc', lat_1=standard_latitudes_deg[0], lat_2=\n standard_latitudes_deg[1], lon_0=central_longitude_deg, rsphere=\n projections.DEFAULT_EARTH_RADIUS_METRES, ellps=projections.SPHERE_NAME,\n resolution=RESOLUTION_STRING, llcrnrx=x_matrix_metres[0, 0], llcrnry=\n y_matrix_metres[0, 0], urcrnrx=x_matrix_metres[-1, -1], urcrnry=\n y_matrix_metres[-1, -1])\n", (6397, 6772), False, 'from mpl_toolkits.basemap import Basemap\n'), ((7676, 7750), 'matplotlib.pyplot.subplots', 'pyplot.subplots', (['(1)', '(1)'], {'figsize': '(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)'}), '(1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES))\n', (7691, 7750), False, 'from matplotlib import pyplot\n'), ((8327, 8563), 'gewittergefahr.gg_utils.grids.xy_field_grid_points_to_edges', 'grids.xy_field_grid_points_to_edges', ([], {'field_matrix': 'num_days_matrix', 'x_min_metres': 'basemap_x_matrix_metres[0, 0]', 'y_min_metres': 'basemap_y_matrix_metres[0, 0]', 'x_spacing_metres': 'x_spacing_metres', 'y_spacing_metres': 'y_spacing_metres'}), '(field_matrix=num_days_matrix,\n x_min_metres=basemap_x_matrix_metres[0, 0], y_min_metres=\n basemap_y_matrix_metres[0, 0], x_spacing_metres=x_spacing_metres,\n y_spacing_metres=y_spacing_metres)\n', (8362, 8563), False, 'from gewittergefahr.gg_utils import grids\n'), ((8640, 8698), 'numpy.ma.masked_where', 'numpy.ma.masked_where', (['(matrix_to_plot == 0)', 'matrix_to_plot'], {}), '(matrix_to_plot == 0, matrix_to_plot)\n', (8661, 8698), False, 'import numpy\n'), ((8704, 8822), 'gewittergefahr.plotting.plotting_utils.plot_coastlines', 'plotting_utils.plot_coastlines', ([], {'basemap_object': 'basemap_object', 'axes_object': 'axes_object', 'line_colour': 'BORDER_COLOUR'}), '(basemap_object=basemap_object, axes_object=\n axes_object, line_colour=BORDER_COLOUR)\n', (8734, 8822), False, 'from gewittergefahr.plotting import plotting_utils\n'), ((8840, 8957), 'gewittergefahr.plotting.plotting_utils.plot_countries', 'plotting_utils.plot_countries', ([], {'basemap_object': 'basemap_object', 'axes_object': 'axes_object', 'line_colour': 'BORDER_COLOUR'}), '(basemap_object=basemap_object, axes_object=\n axes_object, line_colour=BORDER_COLOUR)\n', (8869, 8957), False, 'from gewittergefahr.plotting import plotting_utils\n'), ((8975, 9102), 'gewittergefahr.plotting.plotting_utils.plot_states_and_provinces', 'plotting_utils.plot_states_and_provinces', ([], {'basemap_object': 'basemap_object', 'axes_object': 'axes_object', 'line_colour': 'BORDER_COLOUR'}), '(basemap_object=basemap_object,\n axes_object=axes_object, line_colour=BORDER_COLOUR)\n', (9015, 9102), False, 'from gewittergefahr.plotting import plotting_utils\n'), ((9121, 9240), 'gewittergefahr.plotting.plotting_utils.plot_parallels', 'plotting_utils.plot_parallels', ([], {'basemap_object': 'basemap_object', 'axes_object': 'axes_object', 'num_parallels': 'NUM_PARALLELS'}), '(basemap_object=basemap_object, axes_object=\n axes_object, num_parallels=NUM_PARALLELS)\n', (9150, 9240), False, 'from gewittergefahr.plotting import plotting_utils\n'), ((9258, 9377), 'gewittergefahr.plotting.plotting_utils.plot_meridians', 'plotting_utils.plot_meridians', ([], {'basemap_object': 'basemap_object', 'axes_object': 'axes_object', 'num_meridians': 'NUM_MERIDIANS'}), '(basemap_object=basemap_object, axes_object=\n axes_object, num_meridians=NUM_MERIDIANS)\n', (9287, 9377), False, 'from gewittergefahr.plotting import plotting_utils\n'), ((10803, 10835), 'matplotlib.pyplot.get_cmap', 'pyplot.get_cmap', (['colour_map_name'], {}), '(colour_map_name)\n', (10818, 10835), False, 'from matplotlib import pyplot\n'), ((10840, 10914), 'gewittergefahr.gg_utils.file_system_utils.mkdir_recursive_if_necessary', 'file_system_utils.mkdir_recursive_if_necessary', ([], {'file_name': 'output_file_name'}), '(file_name=output_file_name)\n', (10886, 10914), False, 'from gewittergefahr.gg_utils import file_system_utils\n'), ((10942, 11002), 'gewittergefahr.gg_utils.time_conversion.get_start_of_spc_date', 'time_conversion.get_start_of_spc_date', (['first_spc_date_string'], {}), '(first_spc_date_string)\n', (10979, 11002), False, 'from gewittergefahr.gg_utils import time_conversion\n'), ((11037, 11094), 'gewittergefahr.gg_utils.time_conversion.get_end_of_spc_date', 'time_conversion.get_end_of_spc_date', (['last_spc_date_string'], {}), '(last_spc_date_string)\n', (11072, 11094), False, 'from gewittergefahr.gg_utils import time_conversion\n'), ((11132, 11319), 'gewittergefahr.gg_utils.time_periods.range_and_interval_to_list', 'time_periods.range_and_interval_to_list', ([], {'start_time_unix_sec': 'first_time_unix_sec', 'end_time_unix_sec': 'last_time_unix_sec', 'time_interval_sec': 'TIME_INTERVAL_SEC', 'include_endpoint': '(True)'}), '(start_time_unix_sec=\n first_time_unix_sec, end_time_unix_sec=last_time_unix_sec,\n time_interval_sec=TIME_INTERVAL_SEC, include_endpoint=True)\n', (11171, 11319), False, 'from gewittergefahr.gg_utils import time_periods\n'), ((11624, 11648), 'numpy.full', 'numpy.full', (['(4)', 'numpy.nan'], {}), '(4, numpy.nan)\n', (11634, 11648), False, 'import numpy\n'), ((12833, 12870), 'numpy.array', 'numpy.array', (['domain_min_latitudes_deg'], {}), '(domain_min_latitudes_deg)\n', (12844, 12870), False, 'import numpy\n'), ((12902, 12939), 'numpy.array', 'numpy.array', (['domain_max_latitudes_deg'], {}), '(domain_max_latitudes_deg)\n', (12913, 12939), False, 'import numpy\n'), ((12972, 13010), 'numpy.array', 'numpy.array', (['domain_min_longitudes_deg'], {}), '(domain_min_longitudes_deg)\n', (12983, 13010), False, 'import numpy\n'), ((13043, 13081), 'numpy.array', 'numpy.array', (['domain_max_longitudes_deg'], {}), '(domain_max_longitudes_deg)\n', (13054, 13081), False, 'import numpy\n'), ((13156, 13472), 'gewittergefahr.gg_utils.grids.create_equidistant_grid', 'grids.create_equidistant_grid', ([], {'min_latitude_deg': 'OVERALL_MIN_LATITUDE_DEG', 'max_latitude_deg': 'OVERALL_MAX_LATITUDE_DEG', 'min_longitude_deg': 'OVERALL_MIN_LONGITUDE_DEG', 'max_longitude_deg': 'OVERALL_MAX_LONGITUDE_DEG', 'x_spacing_metres': 'grid_spacing_metres', 'y_spacing_metres': 'grid_spacing_metres', 'azimuthal': '(False)'}), '(min_latitude_deg=OVERALL_MIN_LATITUDE_DEG,\n max_latitude_deg=OVERALL_MAX_LATITUDE_DEG, min_longitude_deg=\n OVERALL_MIN_LONGITUDE_DEG, max_longitude_deg=OVERALL_MAX_LONGITUDE_DEG,\n x_spacing_metres=grid_spacing_metres, y_spacing_metres=\n grid_spacing_metres, azimuthal=False)\n', (13185, 13472), False, 'from gewittergefahr.gg_utils import grids\n'), ((13758, 13870), 'gewittergefahr.gg_utils.grids.xy_vectors_to_matrices', 'grids.xy_vectors_to_matrices', ([], {'x_unique_metres': 'unique_x_coords_metres', 'y_unique_metres': 'unique_y_coords_metres'}), '(x_unique_metres=unique_x_coords_metres,\n y_unique_metres=unique_y_coords_metres)\n', (13786, 13870), False, 'from gewittergefahr.gg_utils import grids\n'), ((13943, 14094), 'gewittergefahr.gg_utils.projections.project_xy_to_latlng', 'projections.project_xy_to_latlng', ([], {'x_coords_metres': 'x_coord_matrix_metres', 'y_coords_metres': 'y_coord_matrix_metres', 'projection_object': 'projection_object'}), '(x_coords_metres=x_coord_matrix_metres,\n y_coords_metres=y_coord_matrix_metres, projection_object=projection_object)\n', (13975, 14094), False, 'from gewittergefahr.gg_utils import projections\n'), ((14258, 14306), 'numpy.full', 'numpy.full', (['(num_grid_rows, num_grid_columns)', '(0)'], {}), '((num_grid_rows, num_grid_columns), 0)\n', (14268, 14306), False, 'import numpy\n'), ((15209, 15279), 'gewittergefahr.plotting.plotting_utils.label_axes', 'plotting_utils.label_axes', ([], {'axes_object': 'axes_object', 'label_string': '"""(c)"""'}), "(axes_object=axes_object, label_string='(c)')\n", (15234, 15279), False, 'from gewittergefahr.plotting import plotting_utils\n'), ((15475, 15502), 'matplotlib.pyplot.close', 'pyplot.close', (['figure_object'], {}), '(figure_object)\n', (15487, 15502), False, 'from matplotlib import pyplot\n'), ((11376, 11418), 'gewittergefahr.gg_utils.time_conversion.time_to_spc_date_string', 'time_conversion.time_to_spc_date_string', (['t'], {}), '(t)\n', (11415, 11418), False, 'from gewittergefahr.gg_utils import time_conversion\n'), ((11789, 11923), 'gewittergefahr.gg_io.gridrad_io.find_file', 'gridrad_io.find_file', ([], {'unix_time_sec': 'valid_times_unix_sec[i]', 'top_directory_name': 'top_gridrad_dir_name', 'raise_error_if_missing': '(False)'}), '(unix_time_sec=valid_times_unix_sec[i],\n top_directory_name=top_gridrad_dir_name, raise_error_if_missing=False)\n', (11809, 11923), False, 'from gewittergefahr.gg_io import gridrad_io\n'), ((14533, 14659), 'numpy.logical_and', 'numpy.logical_and', (['(latitude_matrix_deg >= domain_min_latitudes_deg[i])', '(latitude_matrix_deg <= domain_max_latitudes_deg[i])'], {}), '(latitude_matrix_deg >= domain_min_latitudes_deg[i], \n latitude_matrix_deg <= domain_max_latitudes_deg[i])\n', (14550, 14659), False, 'import numpy\n'), ((14720, 14850), 'numpy.logical_and', 'numpy.logical_and', (['(longitude_matrix_deg >= domain_min_longitudes_deg[i])', '(longitude_matrix_deg <= domain_max_longitudes_deg[i])'], {}), '(longitude_matrix_deg >= domain_min_longitudes_deg[i], \n longitude_matrix_deg <= domain_max_longitudes_deg[i])\n', (14737, 14850), False, 'import numpy\n'), ((9535, 9561), 'numpy.max', 'numpy.max', (['num_days_matrix'], {}), '(num_days_matrix)\n', (9544, 9561), False, 'import numpy\n'), ((9850, 9876), 'numpy.max', 'numpy.max', (['num_days_matrix'], {}), '(num_days_matrix)\n', (9859, 9876), False, 'import numpy\n'), ((12216, 12290), 'numpy.allclose', 'numpy.allclose', (['these_domain_limits_deg', 'prev_domain_limits_deg', 'TOLERANCE'], {}), '(these_domain_limits_deg, prev_domain_limits_deg, TOLERANCE)\n', (12230, 12290), False, 'import numpy\n'), ((14352, 14368), 'numpy.mod', 'numpy.mod', (['i', '(10)'], {}), '(i, 10)\n', (14361, 14368), False, 'import numpy\n'), ((10057, 10071), 'numpy.round', 'numpy.round', (['v'], {}), '(v)\n', (10068, 10071), False, 'import numpy\n'), ((14908, 14969), 'numpy.logical_and', 'numpy.logical_and', (['this_lat_flag_matrix', 'this_lng_flag_matrix'], {}), '(this_lat_flag_matrix, this_lng_flag_matrix)\n', (14925, 14969), False, 'import numpy\n')] |
import random
import matplotlib.pyplot as plt
import gym
# from agents.actor_critic_agents.A2C import A2C
# from agents.actor_critic_agents.A3C import A3C
# from agents.actor_critic_agents.SAC import SAC
from agents.actor_critic_agents.SAC_Discrete import SAC_Discrete
# from agents.DQN_agents.DQN_HER import DQN_HER
# from agents.DQN_agents.DDQN import DDQN
# from agents.DQN_agents.DDQN_With_Prioritised_Experience_Replay import DDQN_With_Prioritised_Experience_Replay
# from agents.DQN_agents.DQN_With_Fixed_Q_Targets import DQN_With_Fixed_Q_Targets
# from agents.actor_critic_agents.DDPG import DDPG
from agents.actor_critic_agents.DDPG_HER import DDPG_HER
# from environments.Bit_Flipping_Environment import Bit_Flipping_Environment
from environments.Cache_server import Cache_server
# from agents.policy_gradient_agents.PPO import PPO
# from environments.Four_Rooms_Environment import Four_Rooms_Environment
# from agents.hierarchical_agents.SNN_HRL import SNN_HRL
# from agents.actor_critic_agents.TD3 import TD3
from agents.Trainer import Trainer
from utilities.data_structures.Config import Config
# from agents.DQN_agents.DQN import DQN
import numpy as np
import torch
random.seed(1)
np.random.seed(1)
torch.manual_seed(1)
config = Config()
config.seed = 1
# config.environment = Bit_Flipping_Environment(4)
config.environment = Cache_server()
config.num_episodes_to_run = 100
config.file_to_save_data_results = None
config.file_to_save_results_graph = None
config.visualise_individual_results = False
config.visualise_overall_agent_results = False
config.randomise_random_seed = False
config.runs_per_agent = 1
config.use_GPU = False
config.hyperparameters = {
"Actor_Critic_Agents": {
"learning_rate": 0.0005,
"linear_hidden_units": [50, 30, 30, 30],
"final_layer_activation": ["SOFTMAX", None],
"gradient_clipping_norm": 25.0,
"discount_rate": 1,
"epsilon_decay_rate_denominator": 10.0,
"normalise_rewards": False,
"automatically_tune_entropy_hyperparameter": True,
"add_extra_noise": False,
"min_steps_before_learning": 1,
"do_evaluation_iterations": True,
"clip_rewards": False,
"Actor": {
"learning_rate": 0.001,
# "linear_hidden_units": [20, 20],
"linear_hidden_units": [50,100],
# "final_layer_activation": "TANH",
"final_layer_activation": "Softmax",
"batch_norm": False,
"tau": 0.005,
"gradient_clipping_norm": 25
},
"Critic": {
"learning_rate": 0.01,
# "linear_hidden_units": [20, 20],
"linear_hidden_units": [50,100],
"final_layer_activation": "None",
"batch_norm": False,
"buffer_size": 100000,
"tau": 0.005,
"gradient_clipping_norm": 25
},
"batch_size": 3,
"mu": 0.0, # for O-H noise
"theta": 0.15, # for O-H noise
"sigma": 0.25, # for O-H noise
"action_noise_std": 0.2, # for TD3
"action_noise_clipping_range": 0.5, # for TD3
"update_every_n_steps": 20,
"learning_updates_per_learning_session": 10,
"HER_sample_proportion": 0.8,
"exploration_worker_difference": 1.0
},
}
# def test_agent_solve_RL_cache():
AGENTS = [SAC_Discrete]
trainer = Trainer(config, AGENTS)
results = trainer.run_games_for_agents()
for agent in AGENTS:
agent_results = results[agent.agent_name]
agent_results = np.max(agent_results[0][1][50:])
assert agent_results >= 0.0, "Failed for {} -- score {}".format(agent.agent_name, agent_results)
plt.plot(results["SAC"][0][0])
plt.plot(results["SAC"][0][1])
plt.show()
# test_agent_solve_RL_cache()
# def test_agents_can_play_games_of_different_dimensions():
# config.num_episodes_to_run = 10
# config.hyperparameters["DQN_Agents"]["batch_size"] = 3
# AGENTS = [A2C, A3C, PPO, DDQN, DQN_With_Fixed_Q_Targets, DDQN_With_Prioritised_Experience_Replay, DQN]
# trainer = Trainer(config, AGENTS)
# config.environment = gym.make("CartPole-v0")
# results = trainer.run_games_for_agents()
# for agent in AGENTS:
# assert agent.agent_name in results.keys()
#
# AGENTS = [SAC, TD3, PPO, DDPG]
# config.environment = gym.make("MountainCarContinuous-v0")
# trainer = Trainer(config, AGENTS)
# results = trainer.run_games_for_agents()
# for agent in AGENTS:
# assert agent.agent_name in results.keys()
#
# AGENTS = [DDQN, SNN_HRL]
# config.environment = Four_Rooms_Environment(15, 15, stochastic_actions_probability=0.25,
# random_start_user_place=True, random_goal_place=False)
# trainer = Trainer(config, AGENTS)
# results = trainer.run_games_for_agents()
# for agent in AGENTS:
# assert agent.agent_name in results.keys()
| [
"utilities.data_structures.Config.Config",
"torch.manual_seed",
"environments.Cache_server.Cache_server",
"agents.Trainer.Trainer",
"matplotlib.pyplot.plot",
"random.seed",
"numpy.max",
"numpy.random.seed",
"matplotlib.pyplot.show"
] | [((1183, 1197), 'random.seed', 'random.seed', (['(1)'], {}), '(1)\n', (1194, 1197), False, 'import random\n'), ((1198, 1215), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (1212, 1215), True, 'import numpy as np\n'), ((1216, 1236), 'torch.manual_seed', 'torch.manual_seed', (['(1)'], {}), '(1)\n', (1233, 1236), False, 'import torch\n'), ((1247, 1255), 'utilities.data_structures.Config.Config', 'Config', ([], {}), '()\n', (1253, 1255), False, 'from utilities.data_structures.Config import Config\n'), ((1344, 1358), 'environments.Cache_server.Cache_server', 'Cache_server', ([], {}), '()\n', (1356, 1358), False, 'from environments.Cache_server import Cache_server\n'), ((3390, 3413), 'agents.Trainer.Trainer', 'Trainer', (['config', 'AGENTS'], {}), '(config, AGENTS)\n', (3397, 3413), False, 'from agents.Trainer import Trainer\n'), ((3676, 3706), 'matplotlib.pyplot.plot', 'plt.plot', (["results['SAC'][0][0]"], {}), "(results['SAC'][0][0])\n", (3684, 3706), True, 'import matplotlib.pyplot as plt\n'), ((3707, 3737), 'matplotlib.pyplot.plot', 'plt.plot', (["results['SAC'][0][1]"], {}), "(results['SAC'][0][1])\n", (3715, 3737), True, 'import matplotlib.pyplot as plt\n'), ((3738, 3748), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3746, 3748), True, 'import matplotlib.pyplot as plt\n'), ((3542, 3574), 'numpy.max', 'np.max', (['agent_results[0][1][50:]'], {}), '(agent_results[0][1][50:])\n', (3548, 3574), True, 'import numpy as np\n')] |
import os, argparse, torch, math, time, random
from os.path import join, isfile
import torch.nn.functional as F
from torch.optim import SGD
from torch.distributions import Beta
from tensorboardX import SummaryWriter
import torchvision
import modified_vgg
from dataloader import dataloader1
import dataloader
from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight
from model import ConvLarge
parser = argparse.ArgumentParser()
# Basic configuration
parser.add_argument('--dataset', type=str, default='mnist', choices=["mnist", "fmnist", "stl10"])
# parser.add_argument('--data-path', type=str, default='./data', help='Data path')
# parser.add_argument('--num-label', type=int, default=4000, help='Number of labeled data')
parser.add_argument('-a', '--architecture', type=str, default='convlarge', choices=['convlarge', 'vgg16'], help='Network architecture')
# parser.add_argument('--mix-up', action='store_true', help='Use mix-up augmentation')
# parser.add_argument('--alpha', type=float, default=1., help='Concentration parameter of Beta distribution')
parser.add_argument('--weight', type=float, default=1., help='re-weighting scalar for the additional loss')
# Training setting
# parser.add_argument('--total-steps', type=int, default=50000, help='Start step (for resume)')
# parser.add_argument('--start-step', type=int, default=0, help='Start step (for resume)')
# parser.add_argument('--batch-size', type=int, default=128, help='Batch size')
parser.add_argument('--epsilon', type=float, default=1e-2, help='epsilon for gradient estimation')
parser.add_argument('--lr', type=float, default=0.1, help='Maximum learning rate')
parser.add_argument('--warmup', type=int, default=4000, help='Warmup iterations')
parser.add_argument('--gamma', type=float, default=0.1, help='Learning rate annealing multiplier')
parser.add_argument('--milestones', type=eval, default=[300000, 350000], help='Learning rate annealing steps')
parser.add_argument('--weight-decay', type=float, default=1e-4, help='Weight decay')
parser.add_argument('--momentum', type=float, default=0.9, help='Momentum for SGD optimizer')
# parser.add_argument('--num-workers', type=int, default=4, help='Number of workers')
# parser.add_argument('--resume', type=str, default=None, help='Resume model from a checkpoint')
parser.add_argument('--seed', type=int, default=1234, help='Random seed for reproducibility')
parser.add_argument('--print-freq', type=int, default=100, help='Print and log frequency')
parser.add_argument('--test-freq', type=int, default=400, help='Test frequency')
# parser.add_argument('--save-path', type=str, default='./results/tmp', help='Save path')
# parser.add_argument('--gpu', action='store_true', help='Use GPU')
args = parser.parse_args()
num_classes = dataloader.train_dset[args.dataset].num_classes
data_path = './data/' + args.dataset
result_path = './results/' + args.dataset
num_of_labeled_data = 4000
gamma = 1
batch_size = 100
# Set random seed
random.seed(args.seed)
torch.manual_seed(args.seed)
if args.gpu:
torch.cuda.manual_seed_all(args.seed)
os.environ['PYTHONHASHSEED'] = str(args.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = True
# Create directories if not exist
make_folder(args.save_path)
logger = Logger(join(args.save_path, 'log.txt'))
writer = SummaryWriter(log_dir=args.save_path)
logger.info('Called with args:')
logger.info(args)
# Define dataloader
logger.info("Loading data...")
train_loader, test_loader = dataloader1(
dset = args.dataset,
path = args.data_path,
bs = args.batch_size,
num_workers = args.num_workers,
num_labels = args.num_label,
num_iters = args.total_steps,
return_unlabel = True,
save_path = args.save_path
)
kwargs = {'num_classes': dataloader.train_dset[args.dataset].num_classes}
# Build model and optimizer
logger.info("Building model and optimizer...")
if args.architecture == "convlarge":
model = ConvLarge(num_classes=args.num_classes)
elif args.architecture == "vgg16" and args.dataset == "lst10":
model = torchvision.models.vgg11(pretrained = False, progress=True, **kwargs)
elif args.architecture == "vgg16":
model = modified_vgg.vgg11(pretrained = False, progress=True, **kwargs)
if args.gpu:
model.cuda()
optimizer = SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)
logger.info("Model:\n%s\nOptimizer:\n%s" % (str(model), str(optimizer)))
# Optionally build beta distribution
if args.mix_up:
beta_distribution = Beta(torch.tensor([args.alpha]), torch.tensor([args.alpha]))
# Optionally resume from a checkpoint
if args.resume is not None:
if isfile(args.resume):
logger.info("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_step = checkpoint['step']
best_acc = checkpoint['best_acc']
model.load_state_dict(checkpoint['model'])
optimizer.load_state_dict(checkpoint['optimizer'])
logger.info("=> loaded checkpoint '{}' (step {})".format(args.resume, checkpoint['step']))
else:
logger.info("=> no checkpoint found at '{}'".format(args.resume))
def compute_lr(step):
if step < args.warmup:
lr = args.lr * step / args.warmup
else:
lr = args.lr
for milestone in args.milestones:
if step > milestone:
lr *= args.gamma
return lr
def main():
data_times, batch_times, label_losses, unlabel_losses, label_acc, unlabel_acc = [AverageMeter() for _ in range(6)]
if args.mix_up: interp_losses = AverageMeter()
best_acc = 0.
logger.info("Start training...")
for step in range(args.start_step, args.total_steps):
print(step)
# Load data and distribute to devices
data_start = time.time()
label_img, label_gt, unlabel_img, unlabel_gt = next(train_loader)
if args.gpu:
label_img = label_img.cuda()
label_gt = label_gt.cuda()
unlabel_img = unlabel_img.cuda()
unlabel_gt = unlabel_gt.cuda()
_label_gt = F.one_hot(label_gt, num_classes=args.num_classes).float()
data_end = time.time()
# Compute learning rate and meta learning rate
lr = compute_lr(step)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
weight = compute_weight(args.weight, step, args.warmup)
print(lr, weight)
### First-order Approximation ###
_concat = lambda xs: torch.cat([x.view(-1) for x in xs])
# Evaluation mode
model.eval()
# Forward label data and perform backward pass
label_pred = model(label_img)
label_loss = F.cross_entropy(label_pred, label_gt, reduction='mean')
dtheta = torch.autograd.grad(label_loss, model.parameters(), only_inputs=True)
print(label_pred, label_loss)
with torch.no_grad():
# Compute the unlabel pseudo-gt
unlabel_pred = model(unlabel_img)
unlabel_pseudo_gt = F.softmax(unlabel_pred, dim=1)
# Compute step size for first-order approximation
epsilon = args.epsilon / torch.norm(_concat(dtheta))
# Forward finite difference
for p, g in zip(model.parameters(), dtheta):
p.data.add_(g, alpha=epsilon)
unlabel_pred_pos = model(unlabel_img)
# Backward finite difference
for p, g in zip(model.parameters(), dtheta):
p.data.sub_(g, alpha=2.*epsilon)
unlabel_pred_neg = model(unlabel_img)
# Resume original params
for p, g in zip(model.parameters(), dtheta):
p.data.add_(g, alpha=epsilon)
# Compute (approximated) gradients w.r.t pseudo-gt of unlabel data
unlabel_grad = F.softmax(unlabel_pred_pos, dim=1) - F.softmax(unlabel_pred_neg, dim=1)
unlabel_grad.div_(epsilon)
# Update and normalize pseudo-labels
unlabel_pseudo_gt.sub_(unlabel_grad, alpha=lr)
torch.relu_(unlabel_pseudo_gt)
sums = torch.sum(unlabel_pseudo_gt, dim=1, keepdim=True)
unlabel_pseudo_gt /= torch.where(sums == 0., torch.ones_like(sums), sums)
# Training mode
model.train()
# First compute label loss
if args.mix_up:
# Adopt mix-up augmentation
with torch.no_grad():
alpha = beta_distribution.sample((args.batch_size,))
if args.gpu:
alpha = alpha.cuda()
_alpha = alpha.view(-1, 1, 1, 1)
# print(alpha.shape)
# print(label_img.shape)
# print(unlabel_img.shape)
interp_img = (label_img * _alpha + unlabel_img * (1. - _alpha)).detach()
interp_pseudo_gt = (_label_gt * alpha + unlabel_pseudo_gt * (1. - alpha)).detach()
interp_pred = model(interp_img)
interp_loss = F.kl_div(F.log_softmax(interp_pred, dim=1), interp_pseudo_gt, reduction='batchmean')
else:
# Regular label loss
label_pred = model(label_img)
label_loss = F.cross_entropy(label_pred, label_gt, reduction='mean')
# Then compute unlabel loss with `unlabel_pseudo_gt`
unlabel_pred = model(unlabel_img)
unlabel_loss = torch.norm(F.softmax(unlabel_pred, dim=1)-unlabel_pseudo_gt, p=2, dim=1).pow(2).mean()
loss = interp_loss + weight * unlabel_loss if args.mix_up else label_loss + weight * unlabel_loss
# One SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Compute accuracy
label_top1, = accuracy(label_pred, label_gt, topk=(1,))
unlabel_top1, = accuracy(unlabel_pred, unlabel_gt, topk=(1,))
# Update AverageMeter stats
data_times.update(data_end - data_start)
batch_times.update(time.time() - data_end)
label_losses.update(label_loss.item(), label_img.size(0))
unlabel_losses.update(unlabel_loss.item(), unlabel_img.size(0))
label_acc.update(label_top1.item(), label_img.size(0))
unlabel_acc.update(unlabel_top1.item(), unlabel_img.size(0))
if args.mix_up:
interp_losses.update(interp_loss.item(), label_img.size(0))
# Print and log
if step % args.print_freq == 0:
logger.info("Step {0:05d} Dtime: {dtimes.avg:.3f} Btime: {btimes.avg:.3f} "
"Lloss: {llosses.val:.3f} (avg {llosses.avg:.3f}) Uloss: {ulosses.val:.3f} (avg {ulosses.avg:.3f}) "
"Lacc: {label.val:.3f} (avg {label.avg:.3f}) Uacc: {unlabel.val:.3f} (avg {unlabel.avg:.3f}) "
"LR: {1:.4f}".format(step, lr, dtimes=data_times, btimes=batch_times, llosses=label_losses,
ulosses=unlabel_losses, label=label_acc, unlabel=unlabel_acc))
# Test and save model
if (step + 1) % args.test_freq == 0 or step == args.total_steps - 1:
acc = evaluate(test_loader, model)
# Remember best accuracy and save checkpoint
is_best = acc > best_acc
if is_best:
best_acc = acc
logger.info("Best Accuracy: %.5f" % best_acc)
save_checkpoint({
'step': step + 1,
'model': model.state_dict(),
'best_acc': best_acc,
'optimizer': optimizer.state_dict()
}, is_best, path=args.save_path, filename="checkpoint.pth")
# Write to the tfboard
writer.add_scalar('train/label-acc', label_acc.avg, step)
writer.add_scalar('train/unlabel-acc', unlabel_acc.avg, step)
writer.add_scalar('train/label-loss', label_losses.avg, step)
writer.add_scalar('train/unlabel-loss', unlabel_losses.avg, step)
writer.add_scalar('train/lr', lr, step)
writer.add_scalar('test/accuracy', acc, step)
if args.mix_up:
writer.add_scalar('train/interp-loss', interp_losses.avg, step)
# Reset AverageMeters
label_losses.reset()
unlabel_losses.reset()
label_acc.reset()
unlabel_acc.reset()
if args.mix_up:
interp_losses.reset()
@torch.no_grad()
def evaluate(test_loader, model):
batch_time, losses, acc = [AverageMeter() for _ in range(3)]
# switch to evaluate mode
model.eval()
end = time.time()
for i, (data, target) in enumerate(test_loader):
# Load data
if args.gpu:
data = data.cuda()
target = target.cuda()
# Compute output
pred = model(data)
loss = F.cross_entropy(pred, target, reduction='mean')
# Measure accuracy and record loss
top1, = accuracy(pred, target, topk=(1,))
losses.update(loss.item(), data.size(0))
acc.update(top1.item(), data.size(0))
# Measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
logger.info('Test: [{0}/{1}] Time {btime.val:.3f} (avg={btime.avg:.3f}) '
'Test Loss {loss.val:.3f} (avg={loss.avg:.3f}) '
'Acc {acc.val:.3f} (avg={acc.avg:.3f})' \
.format(i, len(test_loader), btime=batch_time, loss=losses, acc=acc))
logger.info(' * Accuracy {acc.avg:.5f}'.format(acc=acc))
return acc.avg
if __name__ == "__main__":
# Train and evaluate the model
torch.multiprocessing.freeze_support()
main()
writer.close()
logger.close()
| [
"utils.compute_weight",
"torch.multiprocessing.freeze_support",
"torch.sum",
"torchvision.models.vgg11",
"modified_vgg.vgg11",
"torch.nn.functional.softmax",
"utils.make_folder",
"tensorboardX.SummaryWriter",
"argparse.ArgumentParser",
"model.ConvLarge",
"torch.ones_like",
"utils.accuracy",
... | [((442, 467), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (465, 467), False, 'import os, argparse, torch, math, time, random\n'), ((2994, 3016), 'random.seed', 'random.seed', (['args.seed'], {}), '(args.seed)\n', (3005, 3016), False, 'import os, argparse, torch, math, time, random\n'), ((3017, 3045), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (3034, 3045), False, 'import os, argparse, torch, math, time, random\n'), ((3262, 3289), 'utils.make_folder', 'make_folder', (['args.save_path'], {}), '(args.save_path)\n', (3273, 3289), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((3348, 3385), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {'log_dir': 'args.save_path'}), '(log_dir=args.save_path)\n', (3361, 3385), False, 'from tensorboardX import SummaryWriter\n'), ((3517, 3729), 'dataloader.dataloader1', 'dataloader1', ([], {'dset': 'args.dataset', 'path': 'args.data_path', 'bs': 'args.batch_size', 'num_workers': 'args.num_workers', 'num_labels': 'args.num_label', 'num_iters': 'args.total_steps', 'return_unlabel': '(True)', 'save_path': 'args.save_path'}), '(dset=args.dataset, path=args.data_path, bs=args.batch_size,\n num_workers=args.num_workers, num_labels=args.num_label, num_iters=args\n .total_steps, return_unlabel=True, save_path=args.save_path)\n', (3528, 3729), False, 'from dataloader import dataloader1\n'), ((12468, 12483), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (12481, 12483), False, 'import os, argparse, torch, math, time, random\n'), ((3063, 3100), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['args.seed'], {}), '(args.seed)\n', (3089, 3100), False, 'import os, argparse, torch, math, time, random\n'), ((3306, 3337), 'os.path.join', 'join', (['args.save_path', '"""log.txt"""'], {}), "(args.save_path, 'log.txt')\n", (3310, 3337), False, 'from os.path import join, isfile\n'), ((4003, 4042), 'model.ConvLarge', 'ConvLarge', ([], {'num_classes': 'args.num_classes'}), '(num_classes=args.num_classes)\n', (4012, 4042), False, 'from model import ConvLarge\n'), ((4719, 4738), 'os.path.isfile', 'isfile', (['args.resume'], {}), '(args.resume)\n', (4725, 4738), False, 'from os.path import join, isfile\n'), ((12640, 12651), 'time.time', 'time.time', ([], {}), '()\n', (12649, 12651), False, 'import os, argparse, torch, math, time, random\n'), ((13724, 13762), 'torch.multiprocessing.freeze_support', 'torch.multiprocessing.freeze_support', ([], {}), '()\n', (13760, 13762), False, 'import os, argparse, torch, math, time, random\n'), ((4118, 4185), 'torchvision.models.vgg11', 'torchvision.models.vgg11', ([], {'pretrained': '(False)', 'progress': '(True)'}), '(pretrained=False, progress=True, **kwargs)\n', (4142, 4185), False, 'import torchvision\n'), ((4589, 4615), 'torch.tensor', 'torch.tensor', (['[args.alpha]'], {}), '([args.alpha])\n', (4601, 4615), False, 'import os, argparse, torch, math, time, random\n'), ((4617, 4643), 'torch.tensor', 'torch.tensor', (['[args.alpha]'], {}), '([args.alpha])\n', (4629, 4643), False, 'import os, argparse, torch, math, time, random\n'), ((4831, 4854), 'torch.load', 'torch.load', (['args.resume'], {}), '(args.resume)\n', (4841, 4854), False, 'import os, argparse, torch, math, time, random\n'), ((5578, 5592), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (5590, 5592), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((5648, 5662), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (5660, 5662), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((5863, 5874), 'time.time', 'time.time', ([], {}), '()\n', (5872, 5874), False, 'import os, argparse, torch, math, time, random\n'), ((6236, 6247), 'time.time', 'time.time', ([], {}), '()\n', (6245, 6247), False, 'import os, argparse, torch, math, time, random\n'), ((6437, 6483), 'utils.compute_weight', 'compute_weight', (['args.weight', 'step', 'args.warmup'], {}), '(args.weight, step, args.warmup)\n', (6451, 6483), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((6778, 6833), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['label_pred', 'label_gt'], {'reduction': '"""mean"""'}), "(label_pred, label_gt, reduction='mean')\n", (6793, 6833), True, 'import torch.nn.functional as F\n'), ((9800, 9841), 'utils.accuracy', 'accuracy', (['label_pred', 'label_gt'], {'topk': '(1,)'}), '(label_pred, label_gt, topk=(1,))\n', (9808, 9841), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((9866, 9911), 'utils.accuracy', 'accuracy', (['unlabel_pred', 'unlabel_gt'], {'topk': '(1,)'}), '(unlabel_pred, unlabel_gt, topk=(1,))\n', (9874, 9911), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((12549, 12563), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (12561, 12563), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((12880, 12927), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['pred', 'target'], {'reduction': '"""mean"""'}), "(pred, target, reduction='mean')\n", (12895, 12927), True, 'import torch.nn.functional as F\n'), ((12988, 13021), 'utils.accuracy', 'accuracy', (['pred', 'target'], {'topk': '(1,)'}), '(pred, target, topk=(1,))\n', (12996, 13021), False, 'from utils import make_folder, AverageMeter, Logger, accuracy, save_checkpoint, compute_weight\n'), ((13207, 13218), 'time.time', 'time.time', ([], {}), '()\n', (13216, 13218), False, 'import os, argparse, torch, math, time, random\n'), ((4235, 4296), 'modified_vgg.vgg11', 'modified_vgg.vgg11', ([], {'pretrained': '(False)', 'progress': '(True)'}), '(pretrained=False, progress=True, **kwargs)\n', (4253, 4296), False, 'import modified_vgg\n'), ((6972, 6987), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6985, 6987), False, 'import os, argparse, torch, math, time, random\n'), ((7111, 7141), 'torch.nn.functional.softmax', 'F.softmax', (['unlabel_pred'], {'dim': '(1)'}), '(unlabel_pred, dim=1)\n', (7120, 7141), True, 'import torch.nn.functional as F\n'), ((8139, 8169), 'torch.relu_', 'torch.relu_', (['unlabel_pseudo_gt'], {}), '(unlabel_pseudo_gt)\n', (8150, 8169), False, 'import os, argparse, torch, math, time, random\n'), ((8189, 8238), 'torch.sum', 'torch.sum', (['unlabel_pseudo_gt'], {'dim': '(1)', 'keepdim': '(True)'}), '(unlabel_pseudo_gt, dim=1, keepdim=True)\n', (8198, 8238), False, 'import os, argparse, torch, math, time, random\n'), ((9271, 9326), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['label_pred', 'label_gt'], {'reduction': '"""mean"""'}), "(label_pred, label_gt, reduction='mean')\n", (9286, 9326), True, 'import torch.nn.functional as F\n'), ((6159, 6208), 'torch.nn.functional.one_hot', 'F.one_hot', (['label_gt'], {'num_classes': 'args.num_classes'}), '(label_gt, num_classes=args.num_classes)\n', (6168, 6208), True, 'import torch.nn.functional as F\n'), ((7907, 7941), 'torch.nn.functional.softmax', 'F.softmax', (['unlabel_pred_pos'], {'dim': '(1)'}), '(unlabel_pred_pos, dim=1)\n', (7916, 7941), True, 'import torch.nn.functional as F\n'), ((7944, 7978), 'torch.nn.functional.softmax', 'F.softmax', (['unlabel_pred_neg'], {'dim': '(1)'}), '(unlabel_pred_neg, dim=1)\n', (7953, 7978), True, 'import torch.nn.functional as F\n'), ((8296, 8317), 'torch.ones_like', 'torch.ones_like', (['sums'], {}), '(sums)\n', (8311, 8317), False, 'import os, argparse, torch, math, time, random\n'), ((8488, 8503), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8501, 8503), False, 'import os, argparse, torch, math, time, random\n'), ((9081, 9114), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['interp_pred'], {'dim': '(1)'}), '(interp_pred, dim=1)\n', (9094, 9114), True, 'import torch.nn.functional as F\n'), ((10024, 10035), 'time.time', 'time.time', ([], {}), '()\n', (10033, 10035), False, 'import os, argparse, torch, math, time, random\n'), ((13174, 13185), 'time.time', 'time.time', ([], {}), '()\n', (13183, 13185), False, 'import os, argparse, torch, math, time, random\n'), ((9465, 9495), 'torch.nn.functional.softmax', 'F.softmax', (['unlabel_pred'], {'dim': '(1)'}), '(unlabel_pred, dim=1)\n', (9474, 9495), True, 'import torch.nn.functional as F\n')] |
import pandas as pd
df = pd.read_csv('data.csv') # read the data
print(df)
def median(list_vales):
n_num = list_vales
n = len(n_num)
n_num.sort()
if n % 2 == 0:
median1 = n_num[n // 2]
median2 = n_num[n // 2 - 1]
median = (median1 + median2) / 2
else:
median = n_num[n // 2]
return median
def average(list_val):
return sum(list_val) / len(list_val)
def cleandata(df):
nan_rows = df[df['height'].isnull()] # get the row which is have null values
row = df.iloc[4].tolist() # the index of the row which is null
if row[1] == 1: # check if the gender is Male/female then remove the height values which are female
new_df = df.loc[df['gender'] == 1]
new_df = new_df.drop(nan_rows.index[0])
height = new_df['height'].tolist() # get all the values of height except the NAN value
median_val = median(height)
average_val = average(height)
# print("the median value", median_val)
# print(" the average value", int(average_val))
df.at[nan_rows.index[0], 'height'] = median_val # update the corrupted VALUE
df_median = df.copy() # make new table with median value
df.at[nan_rows.index[0], 'height'] = int(average_val) # update the corrupted VALUE
df_avg = df.copy() # make new table with avg value
return df_median, df_avg, int(average_val), int(median_val)
elif row[1] == 0: # check if the gender is Male/female then remove the height values which are male
new_df = df.loc[df['gender'] == 0]
new_df = new_df.drop(nan_rows.index[0])
height = new_df['height'].tolist()
median_val = median(height)
average_val = average(height)
print(median_val)
df.at[nan_rows.index[0], 'height'] = median_val # update the corrupted VALUE
df_median = df.copy() # make new table with median value
df.at[nan_rows.index[0], 'height'] = int(average_val)
df_avg = df.copy() # make new table with median value
return df_median, df_avg, int(average_val), int(median_val)
df_median, df_avg, average_val, median_val = cleandata(df)
print("------------------------------------------------------------")
print("the AVERAGE value is:", average_val)
print(df_avg)
print("------------------------------------------------------------")
print("the median value is:", median_val)
print(df_median)
| [
"pandas.read_csv"
] | [((26, 49), 'pandas.read_csv', 'pd.read_csv', (['"""data.csv"""'], {}), "('data.csv')\n", (37, 49), True, 'import pandas as pd\n')] |
from skimage.draw import line as sk_line
from skimage.draw import circle_perimeter as sk_circle_perimeter
from RGBMatrixEmulator.graphics.color import Color
from RGBMatrixEmulator.graphics.font import Font
def DrawText(canvas, font, x, y, color, text):
# Early return for empty string prevents bugs in bdfparser library
# and makes good sense anyway
if len(text) == 0:
return
# Support multiple spacings based on device width
character_widths = [font.CharacterWidth(ord(letter)) for letter in text]
first_char_width = character_widths[0]
max_char_width = max(character_widths)
total_width = sum(character_widths)
# Offscreen to the left, adjust by first character width
if x < 0:
adjustment = abs(x + first_char_width) // first_char_width
text = text[adjustment:]
if adjustment:
x += first_char_width * adjustment
# Offscreen to the right, rough adjustment by max width
if (total_width + x) > canvas.width:
text = text[: ((canvas.width + 1) // max_char_width) + 2]
# Draw the text!
if len(text) != 0:
# Ensure text doesn't get drawn as multiple lines
linelimit = len(text) * (font.headers['fbbx'] + 1)
text_map = font.bdf_font.draw(text, linelimit).todata(2)
font_y_offset = -(font.headers['fbby'] + font.headers['fbbyoff'])
for y2, row in enumerate(text_map):
for x2, value in enumerate(row):
if value == 1:
try:
if isinstance(color, tuple):
canvas.SetPixel(x + x2, y + y2 + font_y_offset, *color)
else:
canvas.SetPixel(x + x2, y + y2 + font_y_offset, color.r, color.g, color.b)
except Exception:
pass
return total_width
def DrawLine(canvas, x1, y1, x2, y2, color):
int_points = __coerce_int(x1, y1, x2, y2)
rows, cols = sk_line(*int_points)
for point in zip(rows, cols):
canvas.SetPixel(*point, color.r, color.g, color.b)
def DrawCircle(canvas, x, y, r, color):
int_points = __coerce_int(x, y)
rows, cols = sk_circle_perimeter(*int_points, r)
for point in zip(rows, cols):
canvas.SetPixel(*point, color.r, color.g, color.b)
def __coerce_int(*values):
return [int(value) for value in values]
| [
"skimage.draw.line",
"skimage.draw.circle_perimeter"
] | [((1993, 2013), 'skimage.draw.line', 'sk_line', (['*int_points'], {}), '(*int_points)\n', (2000, 2013), True, 'from skimage.draw import line as sk_line\n'), ((2202, 2237), 'skimage.draw.circle_perimeter', 'sk_circle_perimeter', (['*int_points', 'r'], {}), '(*int_points, r)\n', (2221, 2237), True, 'from skimage.draw import circle_perimeter as sk_circle_perimeter\n')] |
#
# Copyright © 2021 United States Government as represented by the Administrator
# of the National Aeronautics and Space Administration. No copyright is claimed
# in the United States under Title 17, U.S. Code. All Other Rights Reserved.
#
# SPDX-License-Identifier: NASA-1.3
#
"""Generate a grid of pointings on the sky."""
import astropy.units as u
from astropy.table import QTable
import numpy as np
from ligo.skymap.tool import ArgumentParser, FileType
from .. import skygrid
def parser():
p = ArgumentParser(prog='dorado-scheduling-skygrid')
p.add_argument('--area', default='50 deg2', type=u.Quantity,
help='Average area per tile')
p.add_argument('--method', default='healpix', help='Tiling algorithm',
choices=[key.replace('_', '-') for key in skygrid.__all__])
p.add_argument('-o', '--output', metavar='OUTPUT.ecsv', default='-',
type=FileType('w'), help='Output filename')
return p
def main(args=None):
args = parser().parse_args(args)
method = getattr(skygrid, args.method.replace('-', '_'))
coords = method(args.area)
table = QTable({'field_id': np.arange(len(coords)), 'center': coords})
table.write(args.output, format='ascii.ecsv')
if __name__ == '__main__':
main()
| [
"ligo.skymap.tool.FileType",
"ligo.skymap.tool.ArgumentParser"
] | [((507, 555), 'ligo.skymap.tool.ArgumentParser', 'ArgumentParser', ([], {'prog': '"""dorado-scheduling-skygrid"""'}), "(prog='dorado-scheduling-skygrid')\n", (521, 555), False, 'from ligo.skymap.tool import ArgumentParser, FileType\n'), ((921, 934), 'ligo.skymap.tool.FileType', 'FileType', (['"""w"""'], {}), "('w')\n", (929, 934), False, 'from ligo.skymap.tool import ArgumentParser, FileType\n')] |
# main.py
#----------------------------------------------------------------------#
#
#
#
#----------------------------------------------------------------------#
from math import floor
from sqlite3 import OperationalError
import string, sqlite3
from urllib.parse import urlparse
import http.server
import socketserver
from flask import Flask, request, render_template, redirect
#Assuming urls.db is in your app root folder
def table_check():
create_table = """
CREATE TABLE WEB_URL(
ID INT PRIMARY KEY AUTOINCREMENT,
URL TEXT NOT NULL
);
"""
with sqlite3.connect('urls.db') as conn:
cursor = conn.cursor()
try:
cursor.execute(create_table)
except OperationalError:
pass
# Base62 Encoder and Decoder
def toBase62(num, b = 62):
if b <= 0 or b > 62:
return 0
base = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
r = num % b
res = base[r];
q = floor(num / b)
while q:
r = q % b
q = floor(q / b)
res = base[int(r)] + res
return res
def toBase10(num, b = 62):
base = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
limit = len(num)
res = 0
for i in range(limit):
res = b * res + base.find(num[i])
return res
app = Flask(__name__)
# REDIRECTING
@app.route('/<short_url>')
def redirect_short_url(short_url):
decoded_string = toBase10(short_url)
redirect_url = 'https://marvelapp.com/asdf'
with sqlite3.connect('urls.db') as conn:
cursor = conn.cursor()
select_row = """
SELECT URL FROM WEB_URL
WHERE ID=%s
"""%(decoded_string)
result_cursor = cursor.execute(select_row)
try:
redirect_url = result_cursor.fetchone()[0]
except Exception as e:
print(e)
return redirect(redirect_url)
if __name__ == '__main__':
# This code checks whether database table is created or not
table_check()
app.run(port = 8000, debug=True)
| [
"math.floor",
"flask.redirect",
"sqlite3.connect",
"flask.Flask"
] | [((1344, 1359), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1349, 1359), False, 'from flask import Flask, request, render_template, redirect\n'), ((997, 1011), 'math.floor', 'floor', (['(num / b)'], {}), '(num / b)\n', (1002, 1011), False, 'from math import floor\n'), ((1920, 1942), 'flask.redirect', 'redirect', (['redirect_url'], {}), '(redirect_url)\n', (1928, 1942), False, 'from flask import Flask, request, render_template, redirect\n'), ((608, 634), 'sqlite3.connect', 'sqlite3.connect', (['"""urls.db"""'], {}), "('urls.db')\n", (623, 634), False, 'import string, sqlite3\n'), ((1055, 1067), 'math.floor', 'floor', (['(q / b)'], {}), '(q / b)\n', (1060, 1067), False, 'from math import floor\n'), ((1537, 1563), 'sqlite3.connect', 'sqlite3.connect', (['"""urls.db"""'], {}), "('urls.db')\n", (1552, 1563), False, 'import string, sqlite3\n')] |
import flask
from flask import Flask, url_for
from tensorflow.keras.applications.imagenet_utils import preprocess_input, decode_predictions
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing import image
import numpy as np
# instantiating a class object
app = Flask(__name__)
model_path = 'vgg19.h5'
# load the model
model = load_model(model_path)
# model._make_predict_function()
# preprocessing function
def model_predict(img_path, model):
# load the image and set the size to 224,224
img = image.load_img(img_path, target_size=(224,224))
# change the image to array
x = image.img_to_array(img)
# add dimension so we could pass it as an input to the network
x = np.expand_dims(x, axis=0)
# scale the input
x = preprocess_input(x)
# make predictions
preds = model.predict(x)
return preds
from image import routes | [
"tensorflow.keras.preprocessing.image.load_img",
"flask.Flask",
"tensorflow.keras.models.load_model",
"numpy.expand_dims",
"tensorflow.keras.preprocessing.image.img_to_array",
"tensorflow.keras.applications.imagenet_utils.preprocess_input"
] | [((294, 309), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (299, 309), False, 'from flask import Flask, url_for\n'), ((362, 384), 'tensorflow.keras.models.load_model', 'load_model', (['model_path'], {}), '(model_path)\n', (372, 384), False, 'from tensorflow.keras.models import load_model\n'), ((540, 588), 'tensorflow.keras.preprocessing.image.load_img', 'image.load_img', (['img_path'], {'target_size': '(224, 224)'}), '(img_path, target_size=(224, 224))\n', (554, 588), False, 'from tensorflow.keras.preprocessing import image\n'), ((629, 652), 'tensorflow.keras.preprocessing.image.img_to_array', 'image.img_to_array', (['img'], {}), '(img)\n', (647, 652), False, 'from tensorflow.keras.preprocessing import image\n'), ((728, 753), 'numpy.expand_dims', 'np.expand_dims', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (742, 753), True, 'import numpy as np\n'), ((784, 803), 'tensorflow.keras.applications.imagenet_utils.preprocess_input', 'preprocess_input', (['x'], {}), '(x)\n', (800, 803), False, 'from tensorflow.keras.applications.imagenet_utils import preprocess_input, decode_predictions\n')] |
from flask import request, jsonify
from functools import wraps
from schema import SchemaError
def json_required(func):
@wraps(func)
def wrapper(*args, **kwargs):
if not request.data or not request.json:
return jsonify({
'status': 'Error',
'reason': 'No JSON provided'
})
return func(*args, **kwargs)
return wrapper
def validate_schema(schema):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
schema.validate(request.json)
except SchemaError:
return jsonify({
'status': 'Error',
'reason': 'Invalid JSON provided'
})
return func(*args, **kwargs)
return wrapper
return decorator | [
"functools.wraps",
"flask.jsonify"
] | [((126, 137), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (131, 137), False, 'from functools import wraps\n'), ((466, 477), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (471, 477), False, 'from functools import wraps\n'), ((240, 298), 'flask.jsonify', 'jsonify', (["{'status': 'Error', 'reason': 'No JSON provided'}"], {}), "({'status': 'Error', 'reason': 'No JSON provided'})\n", (247, 298), False, 'from flask import request, jsonify\n'), ((634, 697), 'flask.jsonify', 'jsonify', (["{'status': 'Error', 'reason': 'Invalid JSON provided'}"], {}), "({'status': 'Error', 'reason': 'Invalid JSON provided'})\n", (641, 697), False, 'from flask import request, jsonify\n')] |
import os
import feedparser
import json
from flask import Flask, request
# Add your Slack token to the variable below.
SLACK_TOKEN = ""
url = ""
payload = {}
headers = {'content-type': 'application/json'}
app = Flask(__name__)
# this endpoint listens for incoming slash commands from Slack.
@app.route('/horos', methods=['POST'])
def horos():
if request.method == "POST" and request.form.get('token') == SLACK_TOKEN:
from_number = request.form.get('text')
from_number = from_number[11:]
channel = request.form.get('channel_name')
message = matchHoroscope(from_number)
payload = {
"text": message,
"channel": channel,
"username": "Star-Messenger",
}
return json.dumps(payload)
final = ""
signs = ["aries", "taurus", "gemini", "cancer", "leo", "virgo", "libra", "scorpio", "sagittarius", "capricorn", "aquarius", "pisces"]
# One half of the process of matching a horoscope with an actual reading.
def matchHoroscope(sign):
if sign.lower() in signs:
return getHoroscope(sign)
else:
return "The answer you seek is not written in the stars."
# This function pulls today's horoscope through the RSS feeds of FindYourFate.com
def getHoroscope(sign):
url = 'http://www.findyourfate.com/rss/dailyhoroscope-feed.asp?sign=' + sign.title()
d = feedparser.parse(url)
container = d.entries[0]
horoscope = container['summary_detail']['value']
return horoscope | [
"feedparser.parse",
"json.dumps",
"flask.request.form.get",
"flask.Flask"
] | [((213, 228), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (218, 228), False, 'from flask import Flask, request\n'), ((1270, 1291), 'feedparser.parse', 'feedparser.parse', (['url'], {}), '(url)\n', (1286, 1291), False, 'import feedparser\n'), ((437, 461), 'flask.request.form.get', 'request.form.get', (['"""text"""'], {}), "('text')\n", (453, 461), False, 'from flask import Flask, request\n'), ((507, 539), 'flask.request.form.get', 'request.form.get', (['"""channel_name"""'], {}), "('channel_name')\n", (523, 539), False, 'from flask import Flask, request\n'), ((683, 702), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (693, 702), False, 'import json\n'), ((379, 404), 'flask.request.form.get', 'request.form.get', (['"""token"""'], {}), "('token')\n", (395, 404), False, 'from flask import Flask, request\n')] |
import base64
import hashlib
import json
import logging
from dataclasses import dataclass
import boto3
log = logging.getLogger()
region = "us-east-1"
def handle(event: dict, context):
request = event["Records"][0]["cf"]["request"]
try:
authenticate(request["headers"])
except Exception as e:
log.error(repr(e))
return unauthorized
return request
def authenticate(headers: dict):
domain = headers["host"][0]["value"]
auth = headers["authorization"][0]["value"]
auth_type, creds = auth.split(" ")
if auth_type != "Basic":
raise ValueError("Invalid auth type: " + auth_type)
username, password = base64.b64decode(creds).decode().split(":")
user = get_user(domain, username)
if hash_password(password, user.password_salt) != user.password_hash:
raise ValueError("Invalid password for " + username)
@dataclass
class User:
username: str
password_hash: str
password_salt: str
def get_user(domain: str, username: str) -> User:
data = boto3.client("ssm", region_name=region).get_parameter(
Name=f"/s3pypi/{domain}/users/{username}",
WithDecryption=True,
)["Parameter"]["Value"]
return User(username, **json.loads(data))
def hash_password(password: str, salt: str) -> str:
return hashlib.sha1((password + salt).encode()).hexdigest()
unauthorized = dict(
status="401",
statusDescription="Unauthorized",
headers={
"www-authenticate": [
{"key": "WWW-Authenticate", "value": 'Basic realm="Login"'}
]
},
)
| [
"logging.getLogger",
"json.loads",
"boto3.client",
"base64.b64decode"
] | [((111, 130), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (128, 130), False, 'import logging\n'), ((1231, 1247), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (1241, 1247), False, 'import json\n'), ((670, 693), 'base64.b64decode', 'base64.b64decode', (['creds'], {}), '(creds)\n', (686, 693), False, 'import base64\n'), ((1040, 1079), 'boto3.client', 'boto3.client', (['"""ssm"""'], {'region_name': 'region'}), "('ssm', region_name=region)\n", (1052, 1079), False, 'import boto3\n')] |
#Copyright (c) 2017 <NAME>.
#Cura is released under the terms of the LGPLv3 or higher.
import gc
from UM.Job import Job
from UM.Application import Application
from UM.Mesh.MeshData import MeshData
from UM.Preferences import Preferences
from UM.View.GL.OpenGLContext import OpenGLContext
from UM.Message import Message
from UM.i18n import i18nCatalog
from UM.Logger import Logger
from UM.Math.Vector import Vector
from cura.Scene.BuildPlateDecorator import BuildPlateDecorator
from cura.Scene.CuraSceneNode import CuraSceneNode
from cura.Settings.ExtruderManager import ExtruderManager
from cura import LayerDataBuilder
from cura import LayerDataDecorator
from cura import LayerPolygon
import numpy
from time import time
from cura.Settings.ExtrudersModel import ExtrudersModel
catalog = i18nCatalog("cura")
## Return a 4-tuple with floats 0-1 representing the html color code
#
# \param color_code html color code, i.e. "#FF0000" -> red
def colorCodeToRGBA(color_code):
if color_code is None:
Logger.log("w", "Unable to convert color code, returning default")
return [0, 0, 0, 1]
return [
int(color_code[1:3], 16) / 255,
int(color_code[3:5], 16) / 255,
int(color_code[5:7], 16) / 255,
1.0]
class ProcessSlicedLayersJob(Job):
def __init__(self, layers):
super().__init__()
self._layers = layers
self._scene = Application.getInstance().getController().getScene()
self._progress_message = Message(catalog.i18nc("@info:status", "Processing Layers"), 0, False, -1)
self._abort_requested = False
self._build_plate_number = None
## Aborts the processing of layers.
#
# This abort is made on a best-effort basis, meaning that the actual
# job thread will check once in a while to see whether an abort is
# requested and then stop processing by itself. There is no guarantee
# that the abort will stop the job any time soon or even at all.
def abort(self):
self._abort_requested = True
def setBuildPlate(self, new_value):
self._build_plate_number = new_value
def getBuildPlate(self):
return self._build_plate_number
def run(self):
Logger.log("d", "Processing new layer for build plate %s..." % self._build_plate_number)
start_time = time()
view = Application.getInstance().getController().getActiveView()
if view.getPluginId() == "SimulationView":
view.resetLayerData()
self._progress_message.show()
Job.yieldThread()
if self._abort_requested:
if self._progress_message:
self._progress_message.hide()
return
Application.getInstance().getController().activeViewChanged.connect(self._onActiveViewChanged)
# The no_setting_override is here because adding the SettingOverrideDecorator will trigger a reslice
new_node = CuraSceneNode(no_setting_override = True)
new_node.addDecorator(BuildPlateDecorator(self._build_plate_number))
# Force garbage collection.
# For some reason, Python has a tendency to keep the layer data
# in memory longer than needed. Forcing the GC to run here makes
# sure any old layer data is really cleaned up before adding new.
gc.collect()
mesh = MeshData()
layer_data = LayerDataBuilder.LayerDataBuilder()
layer_count = len(self._layers)
# Find the minimum layer number
# When using a raft, the raft layers are sent as layers < 0. Instead of allowing layers < 0, we
# instead simply offset all other layers so the lowest layer is always 0. It could happens that
# the first raft layer has value -8 but there are just 4 raft (negative) layers.
min_layer_number = 0
negative_layers = 0
for layer in self._layers:
if layer.id < min_layer_number:
min_layer_number = layer.id
if layer.id < 0:
negative_layers += 1
current_layer = 0
for layer in self._layers:
# Negative layers are offset by the minimum layer number, but the positive layers are just
# offset by the number of negative layers so there is no layer gap between raft and model
abs_layer_number = layer.id + abs(min_layer_number) if layer.id < 0 else layer.id + negative_layers
layer_data.addLayer(abs_layer_number)
this_layer = layer_data.getLayer(abs_layer_number)
layer_data.setLayerHeight(abs_layer_number, layer.height)
layer_data.setLayerThickness(abs_layer_number, layer.thickness)
for p in range(layer.repeatedMessageCount("path_segment")):
polygon = layer.getRepeatedMessage("path_segment", p)
extruder = polygon.extruder
line_types = numpy.fromstring(polygon.line_type, dtype="u1") # Convert bytearray to numpy array
line_types = line_types.reshape((-1,1))
points = numpy.fromstring(polygon.points, dtype="f4") # Convert bytearray to numpy array
if polygon.point_type == 0: # Point2D
points = points.reshape((-1,2)) # We get a linear list of pairs that make up the points, so make numpy interpret them correctly.
else: # Point3D
points = points.reshape((-1,3))
line_widths = numpy.fromstring(polygon.line_width, dtype="f4") # Convert bytearray to numpy array
line_widths = line_widths.reshape((-1,1)) # We get a linear list of pairs that make up the points, so make numpy interpret them correctly.
line_thicknesses = numpy.fromstring(polygon.line_thickness, dtype="f4") # Convert bytearray to numpy array
line_thicknesses = line_thicknesses.reshape((-1,1)) # We get a linear list of pairs that make up the points, so make numpy interpret them correctly.
line_feedrates = numpy.fromstring(polygon.line_feedrate, dtype="f4") # Convert bytearray to numpy array
line_feedrates = line_feedrates.reshape((-1,1)) # We get a linear list of pairs that make up the points, so make numpy interpret them correctly.
# Create a new 3D-array, copy the 2D points over and insert the right height.
# This uses manual array creation + copy rather than numpy.insert since this is
# faster.
new_points = numpy.empty((len(points), 3), numpy.float32)
if polygon.point_type == 0: # Point2D
new_points[:, 0] = points[:, 0]
new_points[:, 1] = layer.height / 1000 # layer height value is in backend representation
new_points[:, 2] = -points[:, 1]
else: # Point3D
new_points[:, 0] = points[:, 0]
new_points[:, 1] = points[:, 2]
new_points[:, 2] = -points[:, 1]
this_poly = LayerPolygon.LayerPolygon(extruder, line_types, new_points, line_widths, line_thicknesses, line_feedrates)
this_poly.buildCache()
this_layer.polygons.append(this_poly)
Job.yieldThread()
Job.yieldThread()
current_layer += 1
progress = (current_layer / layer_count) * 99
# TODO: Rebuild the layer data mesh once the layer has been processed.
# This needs some work in LayerData so we can add the new layers instead of recreating the entire mesh.
if self._abort_requested:
if self._progress_message:
self._progress_message.hide()
return
if self._progress_message:
self._progress_message.setProgress(progress)
# We are done processing all the layers we got from the engine, now create a mesh out of the data
# Find out colors per extruder
global_container_stack = Application.getInstance().getGlobalContainerStack()
manager = ExtruderManager.getInstance()
extruders = list(manager.getMachineExtruders(global_container_stack.getId()))
if extruders:
material_color_map = numpy.zeros((len(extruders), 4), dtype=numpy.float32)
for extruder in extruders:
position = int(extruder.getMetaDataEntry("position", default="0")) # Get the position
try:
default_color = ExtrudersModel.defaultColors[position]
except IndexError:
default_color = "#e0e000"
color_code = extruder.material.getMetaDataEntry("color_code", default=default_color)
color = colorCodeToRGBA(color_code)
material_color_map[position, :] = color
else:
# Single extruder via global stack.
material_color_map = numpy.zeros((1, 4), dtype=numpy.float32)
color_code = global_container_stack.material.getMetaDataEntry("color_code", default="#e0e000")
color = colorCodeToRGBA(color_code)
material_color_map[0, :] = color
# We have to scale the colors for compatibility mode
if OpenGLContext.isLegacyOpenGL() or bool(Preferences.getInstance().getValue("view/force_layer_view_compatibility_mode")):
line_type_brightness = 0.5 # for compatibility mode
else:
line_type_brightness = 1.0
layer_mesh = layer_data.build(material_color_map, line_type_brightness)
if self._abort_requested:
if self._progress_message:
self._progress_message.hide()
return
# Add LayerDataDecorator to scene node to indicate that the node has layer data
decorator = LayerDataDecorator.LayerDataDecorator()
decorator.setLayerData(layer_mesh)
new_node.addDecorator(decorator)
new_node.setMeshData(mesh)
# Set build volume as parent, the build volume can move as a result of raft settings.
# It makes sense to set the build volume as parent: the print is actually printed on it.
new_node_parent = Application.getInstance().getBuildVolume()
new_node.setParent(new_node_parent) # Note: After this we can no longer abort!
settings = Application.getInstance().getGlobalContainerStack()
if not settings.getProperty("machine_center_is_zero", "value"):
new_node.setPosition(Vector(-settings.getProperty("machine_width", "value") / 2, 0.0, settings.getProperty("machine_depth", "value") / 2))
if self._progress_message:
self._progress_message.setProgress(100)
if self._progress_message:
self._progress_message.hide()
# Clear the unparsed layers. This saves us a bunch of memory if the Job does not get destroyed.
self._layers = None
Logger.log("d", "Processing layers took %s seconds", time() - start_time)
def _onActiveViewChanged(self):
if self.isRunning():
if Application.getInstance().getController().getActiveView().getPluginId() == "SimulationView":
if not self._progress_message:
self._progress_message = Message(catalog.i18nc("@info:status", "Processing Layers"), 0, False, 0, catalog.i18nc("@info:title", "Information"))
if self._progress_message.getProgress() != 100:
self._progress_message.show()
else:
if self._progress_message:
self._progress_message.hide()
| [
"UM.Logger.Logger.log",
"UM.Preferences.Preferences.getInstance",
"cura.Scene.BuildPlateDecorator.BuildPlateDecorator",
"cura.LayerDataDecorator.LayerDataDecorator",
"cura.Scene.CuraSceneNode.CuraSceneNode",
"UM.Job.Job.yieldThread",
"cura.LayerDataBuilder.LayerDataBuilder",
"cura.LayerPolygon.LayerPo... | [((792, 811), 'UM.i18n.i18nCatalog', 'i18nCatalog', (['"""cura"""'], {}), "('cura')\n", (803, 811), False, 'from UM.i18n import i18nCatalog\n'), ((1015, 1081), 'UM.Logger.Logger.log', 'Logger.log', (['"""w"""', '"""Unable to convert color code, returning default"""'], {}), "('w', 'Unable to convert color code, returning default')\n", (1025, 1081), False, 'from UM.Logger import Logger\n'), ((2227, 2320), 'UM.Logger.Logger.log', 'Logger.log', (['"""d"""', "('Processing new layer for build plate %s...' % self._build_plate_number)"], {}), "('d', 'Processing new layer for build plate %s...' % self.\n _build_plate_number)\n", (2237, 2320), False, 'from UM.Logger import Logger\n'), ((2337, 2343), 'time.time', 'time', ([], {}), '()\n', (2341, 2343), False, 'from time import time\n'), ((2961, 3000), 'cura.Scene.CuraSceneNode.CuraSceneNode', 'CuraSceneNode', ([], {'no_setting_override': '(True)'}), '(no_setting_override=True)\n', (2974, 3000), False, 'from cura.Scene.CuraSceneNode import CuraSceneNode\n'), ((3344, 3356), 'gc.collect', 'gc.collect', ([], {}), '()\n', (3354, 3356), False, 'import gc\n'), ((3373, 3383), 'UM.Mesh.MeshData.MeshData', 'MeshData', ([], {}), '()\n', (3381, 3383), False, 'from UM.Mesh.MeshData import MeshData\n'), ((3405, 3440), 'cura.LayerDataBuilder.LayerDataBuilder', 'LayerDataBuilder.LayerDataBuilder', ([], {}), '()\n', (3438, 3440), False, 'from cura import LayerDataBuilder\n'), ((8144, 8173), 'cura.Settings.ExtruderManager.ExtruderManager.getInstance', 'ExtruderManager.getInstance', ([], {}), '()\n', (8171, 8173), False, 'from cura.Settings.ExtruderManager import ExtruderManager\n'), ((9872, 9911), 'cura.LayerDataDecorator.LayerDataDecorator', 'LayerDataDecorator.LayerDataDecorator', ([], {}), '()\n', (9909, 9911), False, 'from cura import LayerDataDecorator\n'), ((2556, 2573), 'UM.Job.Job.yieldThread', 'Job.yieldThread', ([], {}), '()\n', (2571, 2573), False, 'from UM.Job import Job\n'), ((3033, 3078), 'cura.Scene.BuildPlateDecorator.BuildPlateDecorator', 'BuildPlateDecorator', (['self._build_plate_number'], {}), '(self._build_plate_number)\n', (3052, 3078), False, 'from cura.Scene.BuildPlateDecorator import BuildPlateDecorator\n'), ((7333, 7350), 'UM.Job.Job.yieldThread', 'Job.yieldThread', ([], {}), '()\n', (7348, 7350), False, 'from UM.Job import Job\n'), ((8992, 9032), 'numpy.zeros', 'numpy.zeros', (['(1, 4)'], {'dtype': 'numpy.float32'}), '((1, 4), dtype=numpy.float32)\n', (9003, 9032), False, 'import numpy\n'), ((9306, 9336), 'UM.View.GL.OpenGLContext.OpenGLContext.isLegacyOpenGL', 'OpenGLContext.isLegacyOpenGL', ([], {}), '()\n', (9334, 9336), False, 'from UM.View.GL.OpenGLContext import OpenGLContext\n'), ((4923, 4970), 'numpy.fromstring', 'numpy.fromstring', (['polygon.line_type'], {'dtype': '"""u1"""'}), "(polygon.line_type, dtype='u1')\n", (4939, 4970), False, 'import numpy\n'), ((5089, 5133), 'numpy.fromstring', 'numpy.fromstring', (['polygon.points'], {'dtype': '"""f4"""'}), "(polygon.points, dtype='f4')\n", (5105, 5133), False, 'import numpy\n'), ((5490, 5538), 'numpy.fromstring', 'numpy.fromstring', (['polygon.line_width'], {'dtype': '"""f4"""'}), "(polygon.line_width, dtype='f4')\n", (5506, 5538), False, 'import numpy\n'), ((5767, 5819), 'numpy.fromstring', 'numpy.fromstring', (['polygon.line_thickness'], {'dtype': '"""f4"""'}), "(polygon.line_thickness, dtype='f4')\n", (5783, 5819), False, 'import numpy\n'), ((6056, 6107), 'numpy.fromstring', 'numpy.fromstring', (['polygon.line_feedrate'], {'dtype': '"""f4"""'}), "(polygon.line_feedrate, dtype='f4')\n", (6072, 6107), False, 'import numpy\n'), ((7085, 7195), 'cura.LayerPolygon.LayerPolygon', 'LayerPolygon.LayerPolygon', (['extruder', 'line_types', 'new_points', 'line_widths', 'line_thicknesses', 'line_feedrates'], {}), '(extruder, line_types, new_points, line_widths,\n line_thicknesses, line_feedrates)\n', (7110, 7195), False, 'from cura import LayerPolygon\n'), ((7303, 7320), 'UM.Job.Job.yieldThread', 'Job.yieldThread', ([], {}), '()\n', (7318, 7320), False, 'from UM.Job import Job\n'), ((8074, 8099), 'UM.Application.Application.getInstance', 'Application.getInstance', ([], {}), '()\n', (8097, 8099), False, 'from UM.Application import Application\n'), ((10249, 10274), 'UM.Application.Application.getInstance', 'Application.getInstance', ([], {}), '()\n', (10272, 10274), False, 'from UM.Application import Application\n'), ((10400, 10425), 'UM.Application.Application.getInstance', 'Application.getInstance', ([], {}), '()\n', (10423, 10425), False, 'from UM.Application import Application\n'), ((11036, 11042), 'time.time', 'time', ([], {}), '()\n', (11040, 11042), False, 'from time import time\n'), ((1404, 1429), 'UM.Application.Application.getInstance', 'Application.getInstance', ([], {}), '()\n', (1427, 1429), False, 'from UM.Application import Application\n'), ((2359, 2384), 'UM.Application.Application.getInstance', 'Application.getInstance', ([], {}), '()\n', (2382, 2384), False, 'from UM.Application import Application\n'), ((9345, 9370), 'UM.Preferences.Preferences.getInstance', 'Preferences.getInstance', ([], {}), '()\n', (9368, 9370), False, 'from UM.Preferences import Preferences\n'), ((2737, 2762), 'UM.Application.Application.getInstance', 'Application.getInstance', ([], {}), '()\n', (2760, 2762), False, 'from UM.Application import Application\n'), ((11138, 11163), 'UM.Application.Application.getInstance', 'Application.getInstance', ([], {}), '()\n', (11161, 11163), False, 'from UM.Application import Application\n')] |
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length
from app.models import User
from flask import request
class CreateChallengeForm(FlaskForm):
name = StringField('Name', validators = [DataRequired()])
description = TextAreaField('About the challenge', validators = [Length(min = 0, max = 600)])
total_days = StringField('Total days', validators=[DataRequired()])
interval = StringField('Interval')
type = StringField('Type')
submit = SubmitField('Add challenge')
class EditChallengeForm(FlaskForm):
description = TextAreaField('About the challenge', validators = [Length(min = 0, max = 600)])
total_days = StringField('Total days', validators=[DataRequired()])
interval = StringField('Interval')
submit = SubmitField('Edit challenge')
def __init__(self, *args, **kwargs):
super(EditChallengeForm, self).__init__(*args, **kwargs)
| [
"wtforms.validators.Length",
"wtforms.SubmitField",
"wtforms.validators.DataRequired",
"wtforms.StringField"
] | [((534, 557), 'wtforms.StringField', 'StringField', (['"""Interval"""'], {}), "('Interval')\n", (545, 557), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField\n'), ((566, 585), 'wtforms.StringField', 'StringField', (['"""Type"""'], {}), "('Type')\n", (577, 585), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField\n'), ((596, 624), 'wtforms.SubmitField', 'SubmitField', (['"""Add challenge"""'], {}), "('Add challenge')\n", (607, 624), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField\n'), ((838, 861), 'wtforms.StringField', 'StringField', (['"""Interval"""'], {}), "('Interval')\n", (849, 861), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField\n'), ((872, 901), 'wtforms.SubmitField', 'SubmitField', (['"""Edit challenge"""'], {}), "('Edit challenge')\n", (883, 901), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField\n'), ((341, 355), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (353, 355), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length\n'), ((424, 446), 'wtforms.validators.Length', 'Length', ([], {'min': '(0)', 'max': '(600)'}), '(min=0, max=600)\n', (430, 446), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length\n'), ((505, 519), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (517, 519), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length\n'), ((728, 750), 'wtforms.validators.Length', 'Length', ([], {'min': '(0)', 'max': '(600)'}), '(min=0, max=600)\n', (734, 750), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length\n'), ((809, 823), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (821, 823), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length\n')] |
"""
Given a permutation of any length, generate the next permutation in lexicographic order.
For example, this are the permutations for [1,2,3] in lexicographic order.
# >>> list(it.permutations([1,2,3]))
[(1, 2, 3), (1, 3, 2), (2, 1, 3), (2, 3, 1), (3, 1, 2), (3, 2, 1)]
Then, your function next_permutation(t:tuple)->tuple should do the following
# >>> next_permutation((2,3,1))
(3,1,2)
Because (3,1,2) is the next permutation in lexicographic order. Here is another example:
# >>> next_permutation((0, 5, 2, 1, 4, 7, 3, 6))
(0, 5, 2, 1, 4, 7, 6, 3)
Your function should work for very long input tuples so the autograder will time-out if you
try to brute force your solution. The last permutation should wrap aruond to the first.
# >>> next_permutation((3,2,1,0))
(0, 1, 2, 3)
"""
def next_permutation(t: tuple) -> tuple:
"""
Prints the permutation of the input element t immediately following t in lexicographic order
:param t:
:return:
"""
assert isinstance(t, tuple)
for elem in t:
assert isinstance(elem, int)
assert len(t) == len(set(t))
RIGHT = -1
LEFT = RIGHT - 1
left = t[LEFT]
right = t[RIGHT]
while True:
if left < right:
if RIGHT == -1: # if first two:
l = list()
l.extend(t[:LEFT])
l.append(t[RIGHT])
l.append(t[LEFT])
return tuple(l)
else:
# pick the second highest, that's left
# add the rest of the elements in sorted order
l = list()
l.extend(t[:LEFT])
remaining = sorted(t[LEFT:])
second_highest = remaining.pop(remaining.index(left) + 1)
l.append(second_highest)
l.extend(sorted(remaining))
return tuple(l)
else: # left > right
try:
RIGHT -= 1
LEFT = RIGHT - 1
left = t[LEFT]
right = t[RIGHT]
except IndexError: # fully reversed list, return start condition
return tuple(sorted(t))
if __name__ == '__main__':
from itertools import permutations
##### Arguments
p = (1,2,3,4,5)
##### End Arguments
ps = permutations(p)
print("Start permutation:", p)
print("\nExpected | Actual | Matching")
print("-"*20)
for i in range(10):
p_n = next(ps)
print(p, p_n, p==p_n)
p = next_permutation(p)
| [
"itertools.permutations"
] | [((2341, 2356), 'itertools.permutations', 'permutations', (['p'], {}), '(p)\n', (2353, 2356), False, 'from itertools import permutations\n')] |
from unihan_db.tables import (
UnhnLocation,
UnhnLocationkXHC1983,
UnhnReading,
kCantonese,
kCCCII,
kCheungBauer,
kCheungBauerIndex,
kCihaiT,
kDaeJaweon,
kDefinition,
kFenn,
kFennIndex,
kGSR,
kHanYu,
kHanyuPinlu,
kHanyuPinyin,
kHDZRadBreak,
kIICore,
kIICoreSource,
kUnihanCore2020,
kIRG_GSource,
kIRG_HSource,
kIRG_JSource,
kIRG_KPSource,
kIRG_KSource,
kIRG_MSource,
kIRG_TSource,
kIRG_USource,
kIRG_VSource,
kIRG_SSource,
kIRG_UKSource,
kIRGDaeJaweon,
kIRGHanyuDaZidian,
kIRGKangXi,
kMandarin,
kRSAdobe_Japan1_6,
kRSJapanese,
kRSKangXi,
kRSKanWa,
kRSKorean,
kRSUnicode,
kSBGY,
kTotalStrokes,
kXHC1983,
kTGHZ2013,
kSimplifiedVariant,
kTraditionalVariant,
kSpoofingVariant,
kZVariant,
kSemanticVariant,
kSpecializedSemanticVariant,
UnhnVariantSource,
SemanticVariantSource
)
def import_char(c, char): # NOQA: C901
if 'kDefinition' in char:
for d in char['kDefinition']:
c.kDefinition.append(kDefinition(definition=d))
if 'kCantonese' in char:
for d in char['kCantonese']:
c.kCantonese.append(kCantonese(definition=d))
if 'kCCCII' in char:
for d in char['kCCCII']:
c.kCCCII.append(kCCCII(hex=d))
if 'kMandarin' in char:
d = char['kMandarin']
c.kMandarin.append(kMandarin(hans=d['zh-Hans'], hant=d['zh-Hant']))
if 'kTotalStrokes' in char:
d = char['kTotalStrokes']
c.kTotalStrokes.append(kTotalStrokes(hans=d['zh-Hans'], hant=d['zh-Hant']))
if 'kHanyuPinyin' in char:
for d in char['kHanyuPinyin']:
k = kHanyuPinyin()
for loc in d['locations']:
k.locations.append(
UnhnLocation(
volume=loc['volume'],
page=loc['page'],
character=loc['character'],
virtual=loc['virtual'],
)
)
for reading in d['readings']:
k.readings.append(UnhnReading(reading=reading))
c.kHanyuPinyin.append(k)
if 'kHanYu' in char:
k = kHanYu()
for d in char['kHanYu']:
k.locations.append(
UnhnLocation(
volume=d['volume'],
page=d['page'],
character=d['character'],
virtual=d['virtual'],
)
)
c.kHanYu.append(k)
if 'kIRGHanyuDaZidian' in char:
for d in char['kIRGHanyuDaZidian']:
k = kIRGHanyuDaZidian()
k.locations.append(
UnhnLocation(
volume=d['volume'],
page=d['page'],
character=d['character'],
virtual=d['virtual'],
)
)
c.kIRGHanyuDaZidian.append(k)
if 'kXHC1983' in char:
for d in char['kXHC1983']:
k = kXHC1983()
for loc in d['locations']:
k.locations.append(
UnhnLocationkXHC1983(
page=loc['page'],
character=loc['character'],
entry=loc['entry'],
substituted=loc['substituted'],
)
)
k.readings.append(UnhnReading(reading=d['reading']))
c.kXHC1983.append(k)
if 'kTGHZ2013' in char:
for d in char['kTGHZ2013']:
k = kTGHZ2013()
for loc in d['locations']:
k.locations.append(
UnhnLocation(
page=loc['page'],
character=loc['character'],
)
)
k.readings.append(UnhnReading(reading=d['reading']))
c.kTGHZ2013.append(k)
if 'kCheungBauer' in char:
for d in char['kCheungBauer']:
k = kCheungBauer(
radical=d['radical'], strokes=d['strokes'], cangjie=d['cangjie']
)
for reading in d['readings']:
k.readings.append(UnhnReading(reading=reading))
c.kCheungBauer.append(k)
if 'kRSAdobe_Japan1_6' in char:
for d in char['kRSAdobe_Japan1_6']:
c.kRSAdobe_Japan1_6.append(
kRSAdobe_Japan1_6(
type=d['type'],
cid=d['cid'],
radical=d['radical'],
strokes=d['strokes'],
strokes_residue=d['strokes-residue'],
)
)
if 'kCihaiT' in char:
for d in char['kCihaiT']:
c.kCihaiT.append(
kCihaiT(page=d['page'], row=d['row'], character=d['character'])
)
if 'kIICore' in char:
for d in char['kIICore']:
k = kIICore(priority=d['priority'])
for s in d['sources']:
k.sources.append(kIICoreSource(source=s))
c.kIICore.append(k)
if 'kUnihanCore2020' in char:
for s in char['kUnihanCore2020']:
c.kUnihanCore2020.append(kUnihanCore2020(source=s))
if 'kDaeJaweon' in char:
k = kDaeJaweon()
d = char['kDaeJaweon']
k.locations.append(
UnhnLocation(page=d['page'], character=d['character'], virtual=d['virtual'])
)
c.kDaeJaweon.append(k)
if 'kIRGKangXi' in char:
k = kIRGKangXi()
for d in char['kIRGKangXi']:
k.locations.append(
UnhnLocation(
page=d['page'], character=d['character'], virtual=d['virtual']
)
)
c.kIRGKangXi.append(k)
if 'kIRGDaeJaweon' in char:
k = kIRGDaeJaweon()
for d in char['kIRGDaeJaweon']:
k.locations.append(
UnhnLocation(
page=d['page'], character=d['character'], virtual=d['virtual']
)
)
c.kIRGDaeJaweon.append(k)
if 'kFenn' in char:
for d in char['kFenn']:
c.kFenn.append(kFenn(phonetic=d['phonetic'], frequency=d['frequency']))
if 'kHanyuPinlu' in char:
for d in char['kHanyuPinlu']:
c.kHanyuPinlu.append(
kHanyuPinlu(phonetic=d['phonetic'], frequency=d['frequency'])
)
if 'kHDZRadBreak' in char:
d = char['kHDZRadBreak']
k = kHDZRadBreak(radical=d['radical'], ucn=d['ucn'])
k.locations.append(
UnhnLocation(
volume=d['location']['volume'],
page=d['location']['page'],
character=d['location']['character'],
virtual=d['location']['virtual'],
)
)
c.kHDZRadBreak.append(k)
if 'kSBGY' in char:
for d in char['kSBGY']:
k = kSBGY()
k.locations.append(UnhnLocation(page=d['page'], character=d['character']))
c.kSBGY.append(k)
rs_fields = ( # radical-stroke fields, since they're the same structure
('kRSUnicode', kRSUnicode, c.kRSUnicode),
('kRSJapanese', kRSJapanese, c.kRSJapanese),
('kRSKangXi', kRSKangXi, c.kRSKangXi),
('kRSKanWa', kRSKanWa, c.kRSKanWa),
('kRSKorean', kRSKorean, c.kRSKorean),
)
for f, model, column in rs_fields:
if f in char:
for d in char[f]:
k = model(
radical=d['radical'],
strokes=d['strokes'],
simplified=d['simplified'],
)
column.append(k)
irg_fields = ( # IRG, since they're the same structure
('kIRG_GSource', kIRG_GSource, c.kIRG_GSource),
('kIRG_HSource', kIRG_HSource, c.kIRG_HSource),
('kIRG_JSource', kIRG_JSource, c.kIRG_JSource),
('kIRG_KPSource', kIRG_KPSource, c.kIRG_KPSource),
('kIRG_KSource', kIRG_KSource, c.kIRG_KSource),
('kIRG_MSource', kIRG_MSource, c.kIRG_MSource),
('kIRG_TSource', kIRG_TSource, c.kIRG_TSource),
('kIRG_USource', kIRG_USource, c.kIRG_USource),
('kIRG_VSource', kIRG_VSource, c.kIRG_VSource),
('kIRG_SSource', kIRG_SSource, c.kIRG_SSource),
('kIRG_UKSource', kIRG_UKSource, c.kIRG_UKSource),
)
for f, model, column in irg_fields:
if f in char:
d = char[f]
k = model(source=d['source'], location=d['location'])
column.append(k)
if 'kGSR' in char:
for d in char['kGSR']:
k = kGSR(set=d['set'], letter=d['letter'], apostrophe=d['apostrophe'])
c.kGSR.append(k)
if 'kCheungBauerIndex' in char:
d = char['kCheungBauerIndex']
k = kCheungBauerIndex()
k.locations.append(
UnhnLocation(
page=d['location']['page'], character=d['location']['character']
)
)
c.kCheungBauerIndex.append(k)
if 'kFennIndex' in char:
d = char['kFennIndex']
k = kFennIndex()
k.locations.append(
UnhnLocation(
page=d['location']['page'], character=d['location']['character']
)
)
c.kFennIndex.append(k)
simple_variant_fields = (
('kSimplifiedVariant', kSimplifiedVariant, c.kSimplifiedVariant),
('kTraditionalVariant', kTraditionalVariant, c.kTraditionalVariant),
('kSpoofingVariant', kSpoofingVariant, c.kSpoofingVariant),
)
for f, model, column in simple_variant_fields:
if f in char:
for d in char[f]:
column.append(model(ucn=d))
sourced_variant_fields = (
('kZVariant', kZVariant, c.kZVariant, UnhnVariantSource),
('kSemanticVariant', kSemanticVariant, c.kSemanticVariant, SemanticVariantSource),
('kSpecializedSemanticVariant', kSpecializedSemanticVariant, c.kSpecializedSemanticVariant, SemanticVariantSource),
)
for f, model, column, source_model in sourced_variant_fields:
if f in char:
for d in char[f]:
m = model(ucn=d['ucn'])
for s in d.get('sources', []):
m.sources.append(source_model(**s))
column.append(m)
| [
"unihan_db.tables.kCihaiT",
"unihan_db.tables.kGSR",
"unihan_db.tables.kFennIndex",
"unihan_db.tables.kIRGKangXi",
"unihan_db.tables.kHanyuPinlu",
"unihan_db.tables.kHanyuPinyin",
"unihan_db.tables.kUnihanCore2020",
"unihan_db.tables.kXHC1983",
"unihan_db.tables.kIICoreSource",
"unihan_db.tables.k... | [((2286, 2294), 'unihan_db.tables.kHanYu', 'kHanYu', ([], {}), '()\n', (2292, 2294), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5346, 5358), 'unihan_db.tables.kDaeJaweon', 'kDaeJaweon', ([], {}), '()\n', (5356, 5358), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5590, 5602), 'unihan_db.tables.kIRGKangXi', 'kIRGKangXi', ([], {}), '()\n', (5600, 5602), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5893, 5908), 'unihan_db.tables.kIRGDaeJaweon', 'kIRGDaeJaweon', ([], {}), '()\n', (5906, 5908), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((6573, 6621), 'unihan_db.tables.kHDZRadBreak', 'kHDZRadBreak', ([], {'radical': "d['radical']", 'ucn': "d['ucn']"}), "(radical=d['radical'], ucn=d['ucn'])\n", (6585, 6621), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((8879, 8898), 'unihan_db.tables.kCheungBauerIndex', 'kCheungBauerIndex', ([], {}), '()\n', (8896, 8898), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((9169, 9181), 'unihan_db.tables.kFennIndex', 'kFennIndex', ([], {}), '()\n', (9179, 9181), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((1466, 1513), 'unihan_db.tables.kMandarin', 'kMandarin', ([], {'hans': "d['zh-Hans']", 'hant': "d['zh-Hant']"}), "(hans=d['zh-Hans'], hant=d['zh-Hant'])\n", (1475, 1513), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((1613, 1664), 'unihan_db.tables.kTotalStrokes', 'kTotalStrokes', ([], {'hans': "d['zh-Hans']", 'hant': "d['zh-Hant']"}), "(hans=d['zh-Hans'], hant=d['zh-Hant'])\n", (1626, 1664), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((1753, 1767), 'unihan_db.tables.kHanyuPinyin', 'kHanyuPinyin', ([], {}), '()\n', (1765, 1767), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((2710, 2729), 'unihan_db.tables.kIRGHanyuDaZidian', 'kIRGHanyuDaZidian', ([], {}), '()\n', (2727, 2729), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((3109, 3119), 'unihan_db.tables.kXHC1983', 'kXHC1983', ([], {}), '()\n', (3117, 3119), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((3650, 3661), 'unihan_db.tables.kTGHZ2013', 'kTGHZ2013', ([], {}), '()\n', (3659, 3661), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((4091, 4169), 'unihan_db.tables.kCheungBauer', 'kCheungBauer', ([], {'radical': "d['radical']", 'strokes': "d['strokes']", 'cangjie': "d['cangjie']"}), "(radical=d['radical'], strokes=d['strokes'], cangjie=d['cangjie'])\n", (4103, 4169), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5006, 5037), 'unihan_db.tables.kIICore', 'kIICore', ([], {'priority': "d['priority']"}), "(priority=d['priority'])\n", (5013, 5037), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5430, 5506), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'page': "d['page']", 'character': "d['character']", 'virtual': "d['virtual']"}), "(page=d['page'], character=d['character'], virtual=d['virtual'])\n", (5442, 5506), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((6662, 6810), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'volume': "d['location']['volume']", 'page': "d['location']['page']", 'character': "d['location']['character']", 'virtual': "d['location']['virtual']"}), "(volume=d['location']['volume'], page=d['location']['page'],\n character=d['location']['character'], virtual=d['location']['virtual'])\n", (6674, 6810), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((7002, 7009), 'unihan_db.tables.kSBGY', 'kSBGY', ([], {}), '()\n', (7007, 7009), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((8696, 8762), 'unihan_db.tables.kGSR', 'kGSR', ([], {'set': "d['set']", 'letter': "d['letter']", 'apostrophe': "d['apostrophe']"}), "(set=d['set'], letter=d['letter'], apostrophe=d['apostrophe'])\n", (8700, 8762), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((8939, 9017), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'page': "d['location']['page']", 'character': "d['location']['character']"}), "(page=d['location']['page'], character=d['location']['character'])\n", (8951, 9017), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((9222, 9300), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'page': "d['location']['page']", 'character': "d['location']['character']"}), "(page=d['location']['page'], character=d['location']['character'])\n", (9234, 9300), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((1129, 1154), 'unihan_db.tables.kDefinition', 'kDefinition', ([], {'definition': 'd'}), '(definition=d)\n', (1140, 1154), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((1254, 1278), 'unihan_db.tables.kCantonese', 'kCantonese', ([], {'definition': 'd'}), '(definition=d)\n', (1264, 1278), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((1366, 1379), 'unihan_db.tables.kCCCII', 'kCCCII', ([], {'hex': 'd'}), '(hex=d)\n', (1372, 1379), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((2376, 2476), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'volume': "d['volume']", 'page': "d['page']", 'character': "d['character']", 'virtual': "d['virtual']"}), "(volume=d['volume'], page=d['page'], character=d['character'],\n virtual=d['virtual'])\n", (2388, 2476), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((2778, 2878), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'volume': "d['volume']", 'page': "d['page']", 'character': "d['character']", 'virtual': "d['virtual']"}), "(volume=d['volume'], page=d['page'], character=d['character'],\n virtual=d['virtual'])\n", (2790, 2878), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((3501, 3534), 'unihan_db.tables.UnhnReading', 'UnhnReading', ([], {'reading': "d['reading']"}), "(reading=d['reading'])\n", (3512, 3534), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((3935, 3968), 'unihan_db.tables.UnhnReading', 'UnhnReading', ([], {'reading': "d['reading']"}), "(reading=d['reading'])\n", (3946, 3968), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((4481, 4614), 'unihan_db.tables.kRSAdobe_Japan1_6', 'kRSAdobe_Japan1_6', ([], {'type': "d['type']", 'cid': "d['cid']", 'radical': "d['radical']", 'strokes': "d['strokes']", 'strokes_residue': "d['strokes-residue']"}), "(type=d['type'], cid=d['cid'], radical=d['radical'],\n strokes=d['strokes'], strokes_residue=d['strokes-residue'])\n", (4498, 4614), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((4851, 4914), 'unihan_db.tables.kCihaiT', 'kCihaiT', ([], {'page': "d['page']", 'row': "d['row']", 'character': "d['character']"}), "(page=d['page'], row=d['row'], character=d['character'])\n", (4858, 4914), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5277, 5302), 'unihan_db.tables.kUnihanCore2020', 'kUnihanCore2020', ([], {'source': 's'}), '(source=s)\n', (5292, 5302), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5688, 5764), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'page': "d['page']", 'character': "d['character']", 'virtual': "d['virtual']"}), "(page=d['page'], character=d['character'], virtual=d['virtual'])\n", (5700, 5764), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5997, 6073), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'page': "d['page']", 'character': "d['character']", 'virtual': "d['virtual']"}), "(page=d['page'], character=d['character'], virtual=d['virtual'])\n", (6009, 6073), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((6244, 6299), 'unihan_db.tables.kFenn', 'kFenn', ([], {'phonetic': "d['phonetic']", 'frequency': "d['frequency']"}), "(phonetic=d['phonetic'], frequency=d['frequency'])\n", (6249, 6299), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((6420, 6481), 'unihan_db.tables.kHanyuPinlu', 'kHanyuPinlu', ([], {'phonetic': "d['phonetic']", 'frequency': "d['frequency']"}), "(phonetic=d['phonetic'], frequency=d['frequency'])\n", (6431, 6481), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((7041, 7095), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'page': "d['page']", 'character': "d['character']"}), "(page=d['page'], character=d['character'])\n", (7053, 7095), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((1863, 1972), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'volume': "loc['volume']", 'page': "loc['page']", 'character': "loc['character']", 'virtual': "loc['virtual']"}), "(volume=loc['volume'], page=loc['page'], character=loc[\n 'character'], virtual=loc['virtual'])\n", (1875, 1972), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((2181, 2209), 'unihan_db.tables.UnhnReading', 'UnhnReading', ([], {'reading': 'reading'}), '(reading=reading)\n', (2192, 2209), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((3215, 3338), 'unihan_db.tables.UnhnLocationkXHC1983', 'UnhnLocationkXHC1983', ([], {'page': "loc['page']", 'character': "loc['character']", 'entry': "loc['entry']", 'substituted': "loc['substituted']"}), "(page=loc['page'], character=loc['character'], entry=\n loc['entry'], substituted=loc['substituted'])\n", (3235, 3338), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((3757, 3815), 'unihan_db.tables.UnhnLocation', 'UnhnLocation', ([], {'page': "loc['page']", 'character': "loc['character']"}), "(page=loc['page'], character=loc['character'])\n", (3769, 3815), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((4277, 4305), 'unihan_db.tables.UnhnReading', 'UnhnReading', ([], {'reading': 'reading'}), '(reading=reading)\n', (4288, 4305), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n'), ((5106, 5129), 'unihan_db.tables.kIICoreSource', 'kIICoreSource', ([], {'source': 's'}), '(source=s)\n', (5119, 5129), False, 'from unihan_db.tables import UnhnLocation, UnhnLocationkXHC1983, UnhnReading, kCantonese, kCCCII, kCheungBauer, kCheungBauerIndex, kCihaiT, kDaeJaweon, kDefinition, kFenn, kFennIndex, kGSR, kHanYu, kHanyuPinlu, kHanyuPinyin, kHDZRadBreak, kIICore, kIICoreSource, kUnihanCore2020, kIRG_GSource, kIRG_HSource, kIRG_JSource, kIRG_KPSource, kIRG_KSource, kIRG_MSource, kIRG_TSource, kIRG_USource, kIRG_VSource, kIRG_SSource, kIRG_UKSource, kIRGDaeJaweon, kIRGHanyuDaZidian, kIRGKangXi, kMandarin, kRSAdobe_Japan1_6, kRSJapanese, kRSKangXi, kRSKanWa, kRSKorean, kRSUnicode, kSBGY, kTotalStrokes, kXHC1983, kTGHZ2013, kSimplifiedVariant, kTraditionalVariant, kSpoofingVariant, kZVariant, kSemanticVariant, kSpecializedSemanticVariant, UnhnVariantSource, SemanticVariantSource\n')] |
from __future__ import absolute_import, division, print_function
import iotbx.pdb
import mmtbx.model
from mmtbx.building.cablam_idealization import cablam_idealization, master_phil
import sys
import libtbx.load_env
pdb_str = """\
ATOM 2327 N GLY A 318 169.195 115.930 63.690 1.00216.32 N
ATOM 2328 CA GLY A 318 169.975 114.907 64.348 1.00193.16 C
ATOM 2329 C GLY A 318 169.246 113.598 64.539 1.00197.19 C
ATOM 2330 O GLY A 318 168.148 113.399 64.016 1.00193.16 O
ATOM 2331 N GLN A 319 169.849 112.700 65.308 1.00184.03 N
ATOM 2332 CA GLN A 319 169.232 111.415 65.589 1.00195.95 C
ATOM 2333 C GLN A 319 169.246 111.137 67.080 1.00193.64 C
ATOM 2334 O GLN A 319 168.185 111.047 67.708 1.00229.34 O
ATOM 2335 CB GLN A 319 169.941 110.308 64.822 1.00201.09 C
ATOM 2336 CG GLN A 319 169.719 110.407 63.336 1.00236.37 C
ATOM 2337 CD GLN A 319 168.255 110.312 62.966 1.00254.36 C
ATOM 2338 OE1 GLN A 319 167.506 109.520 63.536 1.00280.71 O
ATOM 2339 NE2 GLN A 319 167.836 111.126 62.007 1.00220.80 N
ATOM 2340 N ALA A 320 170.446 111.006 67.646 1.00140.99 N
ATOM 2341 CA ALA A 320 170.595 110.942 69.090 1.00197.51 C
ATOM 2342 C ALA A 320 169.906 109.734 69.704 1.00203.65 C
ATOM 2343 O ALA A 320 168.789 109.863 70.203 1.00242.54 O
ATOM 2344 CB ALA A 320 170.069 112.226 69.727 1.00240.45 C
ATOM 2345 N LYS A 321 170.554 108.566 69.662 1.00164.18 N
ATOM 2346 CA LYS A 321 169.963 107.306 70.104 1.00134.95 C
ATOM 2347 C LYS A 321 169.103 107.477 71.344 1.00134.95 C
ATOM 2348 O LYS A 321 167.904 107.194 71.302 1.00134.95 O
ATOM 2349 CB LYS A 321 171.040 106.265 70.421 1.00145.47 C
ATOM 2350 CG LYS A 321 171.950 105.868 69.279 1.00164.10 C
ATOM 2351 CD LYS A 321 171.197 105.229 68.138 1.00145.47 C
ATOM 2352 CE LYS A 321 172.173 104.777 67.070 1.00145.80 C
ATOM 2353 NZ LYS A 321 171.487 104.149 65.918 1.00145.93 N
ATOM 2354 N ARG A 322 169.682 107.900 72.454 1.00185.62 N
ATOM 2355 CA ARG A 322 168.888 108.089 73.652 1.00142.57 C
ATOM 2356 C ARG A 322 169.546 109.124 74.551 1.00128.56 C
ATOM 2357 O ARG A 322 170.758 109.341 74.474 1.00128.56 O
ATOM 2358 CB ARG A 322 168.719 106.769 74.369 1.00115.16 C
ATOM 2359 CG ARG A 322 167.669 106.817 75.430 1.00133.21 C
ATOM 2360 CD ARG A 322 167.578 105.605 76.270 1.00149.50 C
ATOM 2361 NE ARG A 322 168.665 105.482 77.219 1.00115.16 N
ATOM 2362 CZ ARG A 322 168.912 104.370 77.883 1.00115.16 C
ATOM 2363 NH1 ARG A 322 168.133 103.302 77.715 1.00116.90 N
ATOM 2364 NH2 ARG A 322 169.915 104.340 78.745 1.00115.16 N
ATOM 2365 N VAL A 323 168.740 109.783 75.382 1.00121.29 N
ATOM 2366 CA VAL A 323 169.198 110.884 76.220 1.00121.29 C
ATOM 2367 C VAL A 323 168.668 110.712 77.632 1.00139.86 C
ATOM 2368 O VAL A 323 167.480 110.430 77.835 1.00121.29 O
ATOM 2369 CB VAL A 323 168.795 112.246 75.654 1.00117.56 C
ATOM 2370 CG1 VAL A 323 168.912 113.310 76.710 1.00151.93 C
ATOM 2371 CG2 VAL A 323 169.721 112.603 74.534 1.00121.07 C
"""
def exercise_no_sidechains(prefix="tst_one_resid_rotation_no_sidechains"):
pdb_inp = iotbx.pdb.input(lines=pdb_str.split('\n'), source_info=None)
model = mmtbx.model.manager(
model_input = pdb_inp)
with open("%s_start.pdb" % prefix, 'w') as f:
f.write(model.model_as_pdb())
s = model.selection("name N or name CA or name C or name O")
model = model.select(s)
ci = cablam_idealization(model = model, params=master_phil.extract().cablam_idealization, log=sys.stdout)
pdb_txt = model.model_as_pdb()
def exercise_yes_sidechains(prefix="tst_one_resid_rotation_yes_sidechains"):
pdb_inp = iotbx.pdb.input(lines=pdb_str.split('\n'), source_info=None)
model = mmtbx.model.manager(
model_input = pdb_inp)
with open("%s_start.pdb" % prefix, 'w') as f:
f.write(model.model_as_pdb())
ci = cablam_idealization(model = model, params=master_phil.extract().cablam_idealization, log=sys.stdout)
pdb_txt = model.model_as_pdb()
if __name__ == '__main__':
if (not libtbx.env.has_module(name="probe")):
print("Skipping: probe not configured")
else:
exercise_no_sidechains()
exercise_yes_sidechains()
| [
"mmtbx.building.cablam_idealization.master_phil.extract"
] | [((4220, 4241), 'mmtbx.building.cablam_idealization.master_phil.extract', 'master_phil.extract', ([], {}), '()\n', (4239, 4241), False, 'from mmtbx.building.cablam_idealization import cablam_idealization, master_phil\n'), ((4654, 4675), 'mmtbx.building.cablam_idealization.master_phil.extract', 'master_phil.extract', ([], {}), '()\n', (4673, 4675), False, 'from mmtbx.building.cablam_idealization import cablam_idealization, master_phil\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 25 16:02:58 2022
@author: erri
"""
import os
import numpy as np
import math
from morph_quantities_func_v2 import morph_quantities
import matplotlib.pyplot as plt
# SINGLE RUN NAME
run = 'q07_1'
DoD_name = 'DoD_s1-s0_filt_nozero_rst.txt'
# Step between surveys
DoD_delta = 1
# Base length in terms of columns. If the windows dimensions are channel width
# multiples, the windows_length_base is 12 columns
windows_length_base = 12
window_mode = 1
'''
windows_mode:
0 = fixed windows (all the channel)
1 = expanding window
2 = floating fixed windows (WxW, Wx2W, Wx3W, ...) without overlapping
3 = floating fixed windows (WxW, Wx2W, Wx3W, ...) with overlapping
'''
plot_mode = 2
'''
plot_mode:
1 = only summary plot
2 = all single DoD plot
'''
# Parameters
# Survey pixel dimension
px_x = 50 # [mm]
px_y = 5 # [mm]
W = 0.6 # Width [m]
d50 = 0.001
NaN = -999
# setup working directory and DEM's name
home_dir = os.getcwd()
# Source DoDs folder
DoDs_folder = os.path.join(home_dir, 'DoDs', 'DoD_'+run)
DoDs_name_array = [] # List the file's name of the DoDs with step of delta_step
for f in sorted(os.listdir(DoDs_folder)):
if f.endswith('_filt_nozero_rst.txt') and f.startswith('DoD_'):
delta = eval(f[5]) - eval(f[8])
if delta == DoD_delta:
DoDs_name_array = np.append(DoDs_name_array, f)
else:
pass
# Initialize overall arrays
dep_vol_w_array_all = []
sco_vol_w_array_all = []
# Loop over the DoDs with step of delta_step
for f in DoDs_name_array:
DoD_name = f
print(f)
DoD_path = os.path.join(DoDs_folder,DoD_name)
DoD_filt_nozero = np.loadtxt(DoD_path, delimiter='\t')
# DoD length
DoD_length = DoD_filt_nozero.shape[1]*px_x/1000 # DoD length [m]
dim_x = DoD_filt_nozero.shape[1]
# Initialize array
# Define total volume matrix, Deposition matrix and Scour matrix
DoD_vol = np.where(np.isnan(DoD_filt_nozero), 0, DoD_filt_nozero) # Total volume matrix
DoD_vol = np.where(DoD_vol==NaN, 0, DoD_vol)
dep_DoD = (DoD_vol>0)*DoD_vol # DoD of only deposition data
sco_DoD = (DoD_vol<0)*DoD_vol # DoD of only scour data
# Active pixel matrix:
act_px_matrix = np.where(DoD_vol!=0, 1, 0) # Active pixel matrix, both scour and deposition
act_px_matrix_dep = np.where(dep_DoD != 0, 1, 0) # Active deposition matrix
act_px_matrix_sco = np.where(sco_DoD != 0, 1, 0) # Active scour matrix
# Initialize array for each window dimension
###################################################################
# MOVING WINDOWS ANALYSIS
###################################################################
array = DoD_filt_nozero
W=windows_length_base
mean_array_tot = []
std_array_tot= []
window_boundary = np.array([0,0])
x_data_tot=[]
tot_vol_array=[] # Tot volume
tot_vol_mean_array=[]
tot_vol_std_array=[]
sum_vol_array=[] # Sum of scour and deposition volume
dep_vol_array=[] # Deposition volume
sco_vol_array=[] # Scour volume
morph_act_area_array=[] # Total active area array
morph_act_area_dep_array=[] # Deposition active area array
morph_act_area_sco_array=[] # Active active area array
act_width_mean_array=[] # Total active width mean array
act_width_mean_dep_array=[] # Deposition active width mean array
act_width_mean_sco_array=[] # Scour active width mean array
if window_mode == 1:
# With overlapping
for w in range(1, int(math.floor(array.shape[1]/W))+1): # W*w is the dimension of every possible window
# Initialize arrays that stock data for each window position
x_data=[]
tot_vol_w_array = []
sum_vol_w_array = []
dep_vol_w_array = []
sco_vol_w_array =[]
morph_act_area_w_array = []
morph_act_area_dep_w_array = []
morph_act_area_sco_w_array = []
act_width_mean_w_array = []
act_width_mean_dep_w_array = []
act_width_mean_sco_w_array = []
act_thickness_w_array = []
act_thickness_dep_w_array = []
act_thickness_sco_w_array = []
for i in range(0, array.shape[1]+1):
if i+w*W <= array.shape[1]:
window = array[:, i:W*w+i]
boundary = np.array([i,W*w+i])
window_boundary = np.vstack((window_boundary, boundary))
x_data=np.append(x_data, w)
# Calculate morphological quantities
tot_vol, sum_vol, dep_vol, sco_vol, morph_act_area, morph_act_area_dep, morph_act_area_sco, act_width_mean, act_width_mean_dep, act_width_mean_sco, act_thickness, act_thickness_dep, act_thickness_sco = morph_quantities(window)
# Append single data to array
# For each window position the calculated parameters will be appended to _array
tot_vol_w_array=np.append(tot_vol_w_array, tot_vol)
sum_vol_w_array=np.append(sum_vol_w_array, sum_vol)
dep_vol_w_array=np.append(dep_vol_w_array, dep_vol)
sco_vol_w_array=np.append(sco_vol_w_array, sco_vol)
morph_act_area_w_array=np.append(morph_act_area_w_array, morph_act_area)
morph_act_area_dep_w_array=np.append(morph_act_area_dep_w_array, morph_act_area_dep)
morph_act_area_sco_w_array=np.append(morph_act_area_sco_w_array, morph_act_area_sco)
act_width_mean_w_array=np.append(act_width_mean_w_array, act_width_mean)
act_width_mean_dep_w_array=np.append(act_width_mean_dep_w_array, act_width_mean_dep)
act_width_mean_sco_w_array=np.append(act_width_mean_sco_w_array, act_width_mean_sco)
act_thickness_w_array=np.append(act_thickness_w_array, act_thickness)
act_thickness_dep_w_array=np.append(act_thickness_dep_w_array, act_thickness_dep)
act_thickness_sco_w_array=np.append(act_thickness_sco_w_array, act_thickness_sco)
# For each window dimension w*W,
x_data_tot=np.append(x_data_tot, np.nanmean(x_data)) # Append one value of x_data
tot_vol_mean_array=np.append(tot_vol_mean_array, np.nanmean(tot_vol_w_array)) # Append the tot_vol_array mean
tot_vol_std_array=np.append(tot_vol_std_array, np.nanstd(tot_vol_w_array)) # Append the tot_vol_array mean
# sum_vol_array=
# dep_vol_array=
# sco_vol_array=
# morph_act_area_array=
# morph_act_area_dep_array=
# morph_act_area_sco_array=
# act_width_mean_array=
# act_width_mean_dep_array=
# act_width_mean_sco_array=
# Slice window boundaries array to delete [0,0] when initialized
window_boundary = window_boundary[1,:]
if window_mode == 2:
# Without overlapping
for w in range(1, int(math.floor(array.shape[1]/W))+1): # W*w is the dimension of every possible window
mean_array = []
std_array= []
x_data=[]
for i in range(0, array.shape[1]+1):
if W*w*(i+1) <= array.shape[1]:
window = array[:, W*w*i:W*w*(i+1)]
boundary = np.array([W*w*i,W*w*(i+1)])
window_boundary = np.vstack((window_boundary, boundary))
mean = np.nanmean(window)
std = np.nanstd(window)
mean_array = np.append(mean_array, mean)
std_array = np.append(std_array, std)
x_data=np.append(x_data, w)
mean_array_tot = np.append(mean_array_tot, np.nanmean(mean_array))
std_array_tot= np.append(std_array_tot, np.nanstd(std_array)) #TODO check this
x_data_tot=np.append(x_data_tot, np.nanmean(x_data))
# Slice window boundaries array to delete [0,0] when initialized
window_boundary = window_boundary[1,:]
if window_mode == 3:
# Increasing window dimension keeping still the upstream cross section
mean_array = []
std_array= []
x_data=[]
for i in range(0, array.shape[1]+1):
if W*(i+1) <= array.shape[1]:
window = array[:, 0:W*(i+1)]
boundary = np.array([0,W*(i+1)])
window_boundary = np.vstack((window_boundary, boundary))
mean = np.nanmean(window)
std = np.nanstd(window)
mean_array = np.append(mean_array, mean)
std_array = np.append(std_array, std)
x_data=np.append(x_data, i)
mean_array_tot = np.append(mean_array_tot, np.nanmean(mean_array))
std_array_tot= np.append(std_array_tot, np.nanstd(std_array)) #TODO check this
x_data_tot=np.append(x_data_tot, np.nanmean(x_data))
# Slice window boundaries array to delete [0,0] when initialized
window_boundary = window_boundary[1,:]
# # TODO Go on with this section
# if windows_mode == 1:
# # Define x_data for plots
# x_data = np.linspace(W,dim_x,math.floor(DoD_length/W))*px_x/1e03
# for n in range(1,math.floor(DoD_length/W)+1):
# w_cols = n*round(W/(px_x/1000)) # Window analysis length in number of columns
# w_len = round(n*W,1) # Window analysis lenght im meter [m]
# # Define total volume matrix, Deposition matrix and Scour matrix
# DoD_vol_w = DoD_vol[:,0:w_cols] # Total volume matrix
# dep_DoD_w = dep_DoD[:,0:w_cols] # DoD of only deposition data
# sco_DoD_w = sco_DoD[:,0:w_cols] # DoD of only scour data
# # Define active pixel matrix
# act_px_matrix_w = act_px_matrix[:,0:w_cols] # Active pixel matrix, both scour and deposition
# act_px_matrix_dep_w = act_px_matrix_dep[:,0:w_cols] # Active deposition matrix
# act_px_matrix_sco_w = act_px_matrix_sco[:,0:w_cols] # Active scour matrix
# # Calculate principal quantities:
# # Volumes
# tot_vol_w = np.sum(DoD_vol_w)*px_x*px_y/(W*w_len*d50*1e09)# Total volume as V/(L*W*d50) [-] considering negative sign for scour
# sum_vol_w = np.sum(np.abs(DoD_vol_w))*px_x*px_y/(W*w_len*d50*1e09) # Sum of scour and deposition volume as V/(L*W*d50) [-]
# dep_vol_w = np.sum(dep_DoD_w)*px_x*px_y/(W*w_len*d50*1e09) # Deposition volume as V/(L*W*d50) [-]
# sco_vol_w = np.sum(sco_DoD_w)*px_x*px_y/(W*w_len*d50*1e09) # Scour volume as V/(L*W*d50) [-]
# # Areas:
# morph_act_area_w = np.count_nonzero(act_px_matrix_w)*px_x*px_y/(W*w_len*1e06) # Active area both in terms of scour and deposition as A/(W*L) [-]
# morph_act_area_dep_w = np.count_nonzero(act_px_matrix_dep_w)*px_x*px_y/(W*w_len*1e06) # Active deposition area as A/(W*L) [-]
# morph_act_area_sco_w = np.count_nonzero(act_px_matrix_sco_w)*px_x*px_y/(W*w_len*1e06) # Active scour area as A/(W*L) [-]
# # Widths:
# act_width_mean_w = np.count_nonzero(act_px_matrix_w)*px_x*px_y/(W*w_len*1e06) # Total mean active width [%] - Wact/W
# act_width_mean_dep_w = np.count_nonzero(act_px_matrix_dep_w)*px_x*px_y/(W*w_len*1e06) # Deposition mean active width [%] - Wact/W
# act_width_mean_sco_w = np.count_nonzero(act_px_matrix_sco_w)*px_x*px_y/(W*w_len*1e06) # Scour mean active width [%] - Wact/W
# # Thicknesses:
# act_thickness_w = sum_vol_w/morph_act_area_w*(d50*1e03) # Total active thickness (abs(V_sco) + V_dep)/act_area [mm]
# act_thickness_dep_w = dep_vol_w/morph_act_area_dep_w*(d50*1e03) # Deposition active thickness V_dep/act_area [mm]
# act_thickness_sco_w = sco_vol_w/act_width_mean_sco_w*(d50*1e03) # Scour active thickness V_sco/act_area [mm]
# # Append all values in arrays
# tot_vol_w_array = np.append(tot_vol_w_array, tot_vol_w)
# sum_vol_w_array = np.append(sum_vol_w_array, sum_vol_w)
# dep_vol_w_array = np.append(dep_vol_w_array, dep_vol_w)
# sco_vol_w_array = np.append(sco_vol_w_array, sco_vol_w)
# morph_act_area_w_array = np.append(morph_act_area_w_array, morph_act_area_w)
# morph_act_area_dep_w_array = np.append(morph_act_area_dep_w_array, morph_act_area_dep_w)
# morph_act_area_sco_w_array = np.append(morph_act_area_sco_w_array, morph_act_area_sco_w)
# act_width_mean_w_array = np.append(act_width_mean_w_array, act_width_mean_w)
# act_width_mean_dep_w_array = np.append(act_width_mean_dep_w_array, act_width_mean_dep_w)
# act_width_mean_sco_w_array = np.append(act_width_mean_sco_w_array, act_width_mean_sco_w)
# act_thickness_w_array = np.append(act_thickness_w_array, act_thickness_w)
# act_thickness_dep_w_array = np.append(act_thickness_dep_w_array, act_thickness_dep_w)
# act_thickness_sco_w_array = np.append(act_thickness_sco_w_array, act_thickness_sco_w)
# if plot_mode ==2:
# # Plots
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, dep_vol_w_array, '-', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Deposition volumes V/(W*L*d50) [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, sco_vol_w_array, '-', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Scour volumes V/(W*L*d50) [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, act_width_mean_w_array, '-', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Active width actW/W [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, act_thickness_w_array, '-', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Longitudinal coordinate [m]')
# axs.set_ylabel('Active thickness [mm]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# # Fixed window without overlapping
# if windows_mode == 2:
# # Calculate the number of suitable windows in the channel length
# c_array = []
# W_cols = int(W/px_x*1e03)
# for i in range(1, round(dim_x/W_cols)):
# c = math.floor(dim_x/(W_cols*i))
# if c*W_cols*i<=dim_x:
# c_array = np.append(c_array, c)
# else:
# pass
# # Define the components of the slicing operation (exclude the first one)
# f_cols_array = [0,0]
# x_data = [] # X data for the plot
# n = 0 # Initialize variable count
# for m in range(0,len(c_array)):
# # m is the window dimension in columns
# n+=1
# for i in range(1,(math.floor(dim_x/(W_cols*(m+1)))+1)):
# f_cols = [round(W_cols*(m+1)*(i-1), 1), round(W_cols*(m+1)*(i),1)]
# f_cols_array = np.vstack((f_cols_array, f_cols))
# x_data = np.append(x_data, n)
# x_data = (x_data)*W
# # Resize f_cols_array
# f_cols_array = f_cols_array[1:]
# for p in range(0, f_cols_array.shape[0]): # Loop over all the available window
# w_len = (f_cols_array[p,1] - f_cols_array[p,0])*px_x/1e03 # Define the window lwgth
# # Define total volume matrix, Deposition matrix and Scour matrix
# DoD_vol_w = DoD_vol[:, f_cols_array[p,0]:f_cols_array[p,1]] # Total volume matrix
# dep_DoD_w = dep_DoD[:, f_cols_array[p,0]:f_cols_array[p,1]] # DoD of only deposition data
# sco_DoD_w = sco_DoD[:, f_cols_array[p,0]:f_cols_array[p,1]] # DoD of only scour data
# # Define active pixel matrix
# act_px_matrix_w = act_px_matrix[:, f_cols_array[p,0]:f_cols_array[p,1]] # Active pixel matrix, both scour and deposition
# act_px_matrix_dep_w = act_px_matrix_dep[:, f_cols_array[p,0]:f_cols_array[p,1]] # Active deposition matrix
# act_px_matrix_sco_w = act_px_matrix_sco[:, f_cols_array[p,0]:f_cols_array[p,1]] # Active scour matrix
# # Calculate principal quantities:
# # Volumes
# tot_vol_w = np.sum(DoD_vol_w)*px_x*px_y/(W*w_len*d50*1e09)# Total volume as V/(L*W*d50) [-] considering negative sign for scour
# sum_vol_w = np.sum(np.abs(DoD_vol_w))*px_x*px_y/(W*w_len*d50*1e09) # Sum of scour and deposition volume as V/(L*W*d50) [-]
# dep_vol_w = np.sum(dep_DoD_w)*px_x*px_y/(W*w_len*d50*1e09) # Deposition volume as V/(L*W*d50) [-]
# sco_vol_w = np.sum(sco_DoD_w)*px_x*px_y/(W*w_len*d50*1e09) # Scour volume as V/(L*W*d50) [-]
# # Areas:
# morph_act_area_w = np.count_nonzero(act_px_matrix_w)*px_x*px_y/(W*w_len*1e06) # Active area both in terms of scour and deposition as A/(W*L) [-]
# morph_act_area_dep_w = np.count_nonzero(act_px_matrix_dep_w)*px_x*px_y/(W*w_len*1e06) # Active deposition area as A/(W*L) [-]
# morph_act_area_sco_w = np.count_nonzero(act_px_matrix_sco_w)*px_x*px_y/(W*w_len*1e06) # Active scour area as A/(W*L) [-]
# # Widths:
# act_width_mean_w = np.count_nonzero(act_px_matrix_w)*px_x*px_y/(W*w_len*1e06) # Total mean active width [%] - Wact/W
# act_width_mean_dep_w = np.count_nonzero(act_px_matrix_dep_w)*px_x*px_y/(W*w_len*1e06) # Deposition mean active width [%] - Wact/W
# act_width_mean_sco_w = np.count_nonzero(act_px_matrix_sco_w)*px_x*px_y/(W*w_len*1e06) # Scour mean active width [%] - Wact/W
# # Thicknesses:
# act_thickness_w = sum_vol_w/morph_act_area_w*(d50*1e03) # Total active thickness (abs(V_sco) + V_dep)/act_area [mm]
# act_thickness_dep_w = dep_vol_w/morph_act_area_dep_w*(d50*1e03) # Deposition active thickness V_dep/act_area [mm]
# act_thickness_sco_w = sco_vol_w/act_width_mean_sco_w*(d50*1e03) # Scour active thickness V_sco/act_area [mm]
# # Append all values in arrays
# tot_vol_w_array = np.append(tot_vol_w_array, tot_vol_w)
# sum_vol_w_array = np.append(sum_vol_w_array, sum_vol_w)
# dep_vol_w_array = np.append(dep_vol_w_array, dep_vol_w)
# sco_vol_w_array = np.append(sco_vol_w_array, sco_vol_w)
# morph_act_area_w_array = np.append(morph_act_area_w_array, morph_act_area_w)
# morph_act_area_dep_w_array = np.append(morph_act_area_dep_w_array, morph_act_area_dep_w)
# morph_act_area_sco_w_array = np.append(morph_act_area_sco_w_array, morph_act_area_sco_w)
# act_width_mean_w_array = np.append(act_width_mean_w_array, act_width_mean_w)
# act_width_mean_dep_w_array = np.append(act_width_mean_dep_w_array, act_width_mean_dep_w)
# act_width_mean_sco_w_array = np.append(act_width_mean_sco_w_array, act_width_mean_sco_w)
# act_thickness_w_array = np.append(act_thickness_w_array, act_thickness_w)
# act_thickness_dep_w_array = np.append(act_thickness_dep_w_array, act_thickness_dep_w)
# act_thickness_sco_w_array = np.append(act_thickness_sco_w_array, act_thickness_sco_w)
# if plot_mode ==2:
# # Plots
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, dep_vol_w_array, 'o', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Deposition volumes V/(W*L*d50) [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, sco_vol_w_array, 'o', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Scour volumes V/(W*L*d50) [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, act_width_mean_w_array, 'o', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Active width actW/W [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, act_thickness_w_array, 'o', c='brown')
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Active thickness [mm]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# # Fixed window with overlapping
# if windows_mode == 3:
# # Calculate the number of suitable windows in the channel length
# c_array = []
# W_cols = int(W/px_x*1e03) # Minimum windows length WxW dimension in columns
# for i in range(1, math.floor(dim_x/W_cols)+1): # per each windows analysis WxWi
# c = dim_x - W_cols*i
# c_array = np.append(c_array, c) # Contains the number of windows for each dimension WxW*i
# else:
# pass
# f_cols_array = [0,0]
# x_data = []
# n = 0
# for m in range(1,int(dim_x/W_cols)+1):
# w_length = m*W_cols # Analysis windows length
# # print(w_length)
# n+=1
# for i in range(0,dim_x): # i is the lower limit of the analysis window
# low_lim = i # Analisys window lower limit
# upp_lim = i + w_length # Analisys window upper limit
# if upp_lim<=dim_x:
# # print(low_lim, upp_lim)
# # print(i+w_length)
# f_cols = [low_lim, upp_lim] # Lower and upper boundary of the analysis window
# f_cols_array = np.vstack((f_cols_array, f_cols))
# x_data = np.append(x_data, n)
# else:
# pass
# x_data = x_data*W
# # Resize f_cols_array
# f_cols_array = f_cols_array[1:]
# for p in range(0, f_cols_array.shape[0]):
# w_len = (f_cols_array[p,1] - f_cols_array[p,0])*px_x/1e03 # Define the window length
# # print()
# # print(f_cols_array[p,:])
# # print(w_len)
# # Define total volume matrix, Deposition matrix and Scour matrix
# DoD_vol_w = DoD_vol[:, f_cols_array[p,0]:f_cols_array[p,1]] # Total volume matrix
# dep_DoD_w = dep_DoD[:, f_cols_array[p,0]:f_cols_array[p,1]] # DoD of only deposition data
# sco_DoD_w = sco_DoD[:, f_cols_array[p,0]:f_cols_array[p,1]] # DoD of only scour data
# # Define active pixel matrix
# act_px_matrix_w = act_px_matrix[:, f_cols_array[p,0]:f_cols_array[p,1]] # Active pixel matrix, both scour and deposition
# act_px_matrix_dep_w = act_px_matrix_dep[:, f_cols_array[p,0]:f_cols_array[p,1]] # Active deposition matrix
# act_px_matrix_sco_w = act_px_matrix_sco[:, f_cols_array[p,0]:f_cols_array[p,1]] # Active scour matrix
# # Calculate principal quantities:
# # Volumes
# tot_vol_w = np.sum(DoD_vol_w)*px_x*px_y/(W*w_len*d50*1e09)# Total volume as V/(L*W*d50) [-] considering negative sign for scour
# sum_vol_w = np.sum(np.abs(DoD_vol_w))*px_x*px_y/(W*w_len*d50*1e09) # Sum of scour and deposition volume as V/(L*W*d50) [-]
# dep_vol_w = np.sum(dep_DoD_w)*px_x*px_y/(W*w_len*d50*1e09) # Deposition volume as V/(L*W*d50) [-]
# sco_vol_w = np.sum(sco_DoD_w)*px_x*px_y/(W*w_len*d50*1e09) # Scour volume as V/(L*W*d50) [-]
# # Areas:
# morph_act_area_w = np.count_nonzero(act_px_matrix_w)*px_x*px_y/(W*w_len*1e06) # Active area both in terms of scour and deposition as A/(W*L) [-]
# morph_act_area_dep_w = np.count_nonzero(act_px_matrix_dep_w)*px_x*px_y/(W*w_len*1e06) # Active deposition area as A/(W*L) [-]
# morph_act_area_sco_w = np.count_nonzero(act_px_matrix_sco_w)*px_x*px_y/(W*w_len*1e06) # Active scour area as A/(W*L) [-]
# # Widths:
# act_width_mean_w = np.count_nonzero(act_px_matrix_w)*px_x*px_y/(W*w_len*1e06) # Total mean active width [%] - Wact/W
# act_width_mean_dep_w = np.count_nonzero(act_px_matrix_dep_w)*px_x*px_y/(W*w_len*1e06) # Deposition mean active width [%] - Wact/W
# act_width_mean_sco_w = np.count_nonzero(act_px_matrix_sco_w)*px_x*px_y/(W*w_len*1e06) # Scour mean active width [%] - Wact/W
# # Thicknesses:
# act_thickness_w = sum_vol_w/morph_act_area_w*(d50*1e03) # Total active thickness (abs(V_sco) + V_dep)/act_area [mm]
# act_thickness_dep_w = dep_vol_w/morph_act_area_dep_w*(d50*1e03) # Deposition active thickness V_dep/act_area [mm]
# act_thickness_sco_w = sco_vol_w/act_width_mean_sco_w*(d50*1e03) # Scour active thickness V_sco/act_area [mm]
# # Append all values in arrays
# tot_vol_w_array = np.append(tot_vol_w_array, tot_vol_w)
# sum_vol_w_array = np.append(sum_vol_w_array, sum_vol_w)
# dep_vol_w_array = np.append(dep_vol_w_array, dep_vol_w)
# sco_vol_w_array = np.append(sco_vol_w_array, sco_vol_w)
# morph_act_area_w_array = np.append(morph_act_area_w_array, morph_act_area_w)
# morph_act_area_dep_w_array = np.append(morph_act_area_dep_w_array, morph_act_area_dep_w)
# morph_act_area_sco_w_array = np.append(morph_act_area_sco_w_array, morph_act_area_sco_w)
# act_width_mean_w_array = np.append(act_width_mean_w_array, act_width_mean_w)
# act_width_mean_dep_w_array = np.append(act_width_mean_dep_w_array, act_width_mean_dep_w)
# act_width_mean_sco_w_array = np.append(act_width_mean_sco_w_array, act_width_mean_sco_w)
# act_thickness_w_array = np.append(act_thickness_w_array, act_thickness_w)
# act_thickness_dep_w_array = np.append(act_thickness_dep_w_array, act_thickness_dep_w)
# act_thickness_sco_w_array = np.append(act_thickness_sco_w_array, act_thickness_sco_w)
# if plot_mode ==2:
# # Plots
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, dep_vol_w_array, 'o', c='brown', markersize=0.1)
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Deposition volumes V/(W*L*d50) [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, sco_vol_w_array, 'o', c='brown', markersize=0.1)
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Scour volumes V/(W*L*d50) [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, act_width_mean_w_array, 'o', c='brown', markersize=0.1)
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Active width actW/W [-]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# axs.plot(x_data, act_thickness_w_array, 'o', c='brown', markersize=0.1)
# axs.set_title(run)
# axs.set_xlabel('Window analysis length [m]')
# axs.set_ylabel('Active thickness [mm]')
# # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# plt.show()
# if f == DoDs_name_array[0]:
# dep_vol_w_array_all = np.transpose(np.array(dep_vol_w_array))
# sco_vol_w_array_all = np.transpose(np.array(sco_vol_w_array))
# else:
# pass
# dep_vol_w_array_all = np.vstack((dep_vol_w_array_all,dep_vol_w_array))
# dep_vol_mean = np.mean(dep_vol_w_array_all, axis=0)
# dep_vol_std = np.std(dep_vol_w_array_all, axis=0)
# sco_vol_w_array_all = np.vstack((sco_vol_w_array_all,sco_vol_w_array))
# sco_vol_mean = np.mean(sco_vol_w_array_all, axis=0)
# sco_vol_std = np.std(sco_vol_w_array_all, axis=0)
# if windows_mode==2:
# # Loop to define the windows to clusterize data
# array = [0]
# num=0
# for n in range(0,len(c_array)):
# num += c_array[n]
# array = np.append(array, num) # Clusterize window dimension
# dep_vol_mean = []
# sco_vol_mean = []
# dep_vol_std = []
# sco_vol_std = []
# x_data_full = x_data
# x_data = []
# for n in range(0, len(array)-1):
# x_data = np.append(x_data, x_data_full[int(array[n])])
# for n in f_cols_array:
# dep_vol_mean = np.append(dep_vol_mean, np.mean(dep_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# sco_vol_mean = np.append(sco_vol_mean, np.mean(sco_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# dep_vol_std = np.append(dep_vol_std, np.std(dep_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# sco_vol_std = np.append(sco_vol_std, np.std(sco_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# # To finish
# if windows_mode == 3:
# # Loop to define the windows to clusterize data
# array = [0]
# num=0
# for n in range(0,len(c_array)):
# num += c_array[n]
# array = np.append(array, num) # Clusterize window dimension
# dep_vol_mean = []
# sco_vol_mean = []
# dep_vol_std = []
# sco_vol_std = []
# x_data_full = x_data
# x_data = []
# for n in range(0, len(array)-1):
# # low_lim = int(f_cols_array[n,0])
# # upp_lim = int(f_cols_array[n,1])
# x_data = np.append(x_data, round(x_data_full[int(array[n])+n],1))
# # dep_vol_mean = np.append(dep_vol_mean, np.mean(dep_vol_w_array_all[:,low_lim:upp_lim]))
# # sco_vol_mean = np.append(sco_vol_mean, np.mean(sco_vol_w_array_all[:,low_lim:upp_lim]))
# # dep_vol_std = np.append(dep_vol_std, np.std(dep_vol_w_array_all[:,low_lim:upp_lim]))
# # sco_vol_std = np.append(sco_vol_std, np.std(sco_vol_w_array_all[:,low_lim:upp_lim]))
# dep_vol_mean = np.append(dep_vol_mean, np.mean(dep_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# sco_vol_mean = np.append(sco_vol_mean, np.mean(sco_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# dep_vol_std = np.append(dep_vol_std, np.std(dep_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# sco_vol_std = np.append(sco_vol_std, np.std(sco_vol_w_array_all[:,int(array[n]):int(array[n+1])]))
# # print(int(array[n]),int(array[n+1]))
# # TODO To finish
# fig3, axs = plt.subplots(2,1,dpi=80, figsize=(10,6), sharex=True, tight_layout=True)
# fig3.suptitle(run + ' - Volume')
# axs[0].errorbar(x_data, sco_vol_mean, sco_vol_std, linestyle='--', marker='^', color='red')
# # axs[0].set_ylim(bottom=0)
# axs[0].set_title('Scour')
# # axs[0].set_xlabel()
# axs[0].set_ylabel('Scour volume V/(L*W*d50) [-]')
# axs[1].errorbar(x_data, dep_vol_mean, dep_vol_std, linestyle='--', marker='^', color='blue')
# axs[1].set_ylim(bottom=0)
# axs[1].set_title('Deposition')
# axs[1].set_xlabel('Analysis window length [m]')
# axs[1].set_ylabel('Deposition volume V/(L*W*d50) [-]')
# # plt.savefig(os.path.join(plot_dir, run +'dep_scour.png'), dpi=200)
# plt.show()
# # # Plots
# # fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# # axs.plot(x_data, dep_vol_w_array, 'o', c='brown')
# # axs.set_title(run)
# # axs.set_xlabel('Longitudinal coordinate [m]')
# # axs.set_ylabel('Deposition volumes V/(W*L*d50) [-]')
# # # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# # plt.show()
# # fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# # axs.plot(x_data, sco_vol_w_array, 'o', c='brown')
# # axs.set_title(run)
# # axs.set_xlabel('Longitudinal coordinate [m]')
# # axs.set_ylabel('Scour volumes V/(W*L*d50) [-]')
# # # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# # plt.show()
# # fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# # axs.plot(x_data, act_width_mean_w_array, 'o', c='brown')
# # axs.set_title(run)
# # axs.set_xlabel('Longitudinal coordinate [m]')
# # axs.set_ylabel('Active width actW/W [-]')
# # # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# # plt.show()
# # fig1, axs = plt.subplots(1,1,dpi=200, sharex=True, tight_layout=True)
# # axs.plot(x_data, act_thickness_w_array, 'o', c='brown')
# # axs.set_title(run)
# # axs.set_xlabel('Longitudinal coordinate [m]')
# # axs.set_ylabel('Active thickness [mm]')
# # # plt.savefig(os.path.join(plot_dir, run +'_morphW_interp.png'), dpi=200)
# # plt.show() | [
"os.listdir",
"numpy.nanstd",
"math.floor",
"numpy.where",
"morph_quantities_func_v2.morph_quantities",
"os.path.join",
"os.getcwd",
"numpy.append",
"numpy.array",
"numpy.nanmean",
"numpy.isnan",
"numpy.vstack",
"numpy.loadtxt"
] | [((1005, 1016), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1014, 1016), False, 'import os\n'), ((1052, 1096), 'os.path.join', 'os.path.join', (['home_dir', '"""DoDs"""', "('DoD_' + run)"], {}), "(home_dir, 'DoDs', 'DoD_' + run)\n", (1064, 1096), False, 'import os\n'), ((1192, 1215), 'os.listdir', 'os.listdir', (['DoDs_folder'], {}), '(DoDs_folder)\n', (1202, 1215), False, 'import os\n'), ((1657, 1692), 'os.path.join', 'os.path.join', (['DoDs_folder', 'DoD_name'], {}), '(DoDs_folder, DoD_name)\n', (1669, 1692), False, 'import os\n'), ((1714, 1750), 'numpy.loadtxt', 'np.loadtxt', (['DoD_path'], {'delimiter': '"""\t"""'}), "(DoD_path, delimiter='\\t')\n", (1724, 1750), True, 'import numpy as np\n'), ((2092, 2128), 'numpy.where', 'np.where', (['(DoD_vol == NaN)', '(0)', 'DoD_vol'], {}), '(DoD_vol == NaN, 0, DoD_vol)\n', (2100, 2128), True, 'import numpy as np\n'), ((2302, 2330), 'numpy.where', 'np.where', (['(DoD_vol != 0)', '(1)', '(0)'], {}), '(DoD_vol != 0, 1, 0)\n', (2310, 2330), True, 'import numpy as np\n'), ((2402, 2430), 'numpy.where', 'np.where', (['(dep_DoD != 0)', '(1)', '(0)'], {}), '(dep_DoD != 0, 1, 0)\n', (2410, 2430), True, 'import numpy as np\n'), ((2483, 2511), 'numpy.where', 'np.where', (['(sco_DoD != 0)', '(1)', '(0)'], {}), '(sco_DoD != 0, 1, 0)\n', (2491, 2511), True, 'import numpy as np\n'), ((2905, 2921), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (2913, 2921), True, 'import numpy as np\n'), ((2009, 2034), 'numpy.isnan', 'np.isnan', (['DoD_filt_nozero'], {}), '(DoD_filt_nozero)\n', (2017, 2034), True, 'import numpy as np\n'), ((1387, 1416), 'numpy.append', 'np.append', (['DoDs_name_array', 'f'], {}), '(DoDs_name_array, f)\n', (1396, 1416), True, 'import numpy as np\n'), ((9051, 9073), 'numpy.nanmean', 'np.nanmean', (['mean_array'], {}), '(mean_array)\n', (9061, 9073), True, 'import numpy as np\n'), ((9123, 9143), 'numpy.nanstd', 'np.nanstd', (['std_array'], {}), '(std_array)\n', (9132, 9143), True, 'import numpy as np\n'), ((9203, 9221), 'numpy.nanmean', 'np.nanmean', (['x_data'], {}), '(x_data)\n', (9213, 9221), True, 'import numpy as np\n'), ((6404, 6422), 'numpy.nanmean', 'np.nanmean', (['x_data'], {}), '(x_data)\n', (6414, 6422), True, 'import numpy as np\n'), ((6514, 6541), 'numpy.nanmean', 'np.nanmean', (['tot_vol_w_array'], {}), '(tot_vol_w_array)\n', (6524, 6541), True, 'import numpy as np\n'), ((6634, 6660), 'numpy.nanstd', 'np.nanstd', (['tot_vol_w_array'], {}), '(tot_vol_w_array)\n', (6643, 6660), True, 'import numpy as np\n'), ((8015, 8037), 'numpy.nanmean', 'np.nanmean', (['mean_array'], {}), '(mean_array)\n', (8025, 8037), True, 'import numpy as np\n'), ((8091, 8111), 'numpy.nanstd', 'np.nanstd', (['std_array'], {}), '(std_array)\n', (8100, 8111), True, 'import numpy as np\n'), ((8175, 8193), 'numpy.nanmean', 'np.nanmean', (['x_data'], {}), '(x_data)\n', (8185, 8193), True, 'import numpy as np\n'), ((8668, 8694), 'numpy.array', 'np.array', (['[0, W * (i + 1)]'], {}), '([0, W * (i + 1)])\n', (8676, 8694), True, 'import numpy as np\n'), ((8724, 8762), 'numpy.vstack', 'np.vstack', (['(window_boundary, boundary)'], {}), '((window_boundary, boundary))\n', (8733, 8762), True, 'import numpy as np\n'), ((8786, 8804), 'numpy.nanmean', 'np.nanmean', (['window'], {}), '(window)\n', (8796, 8804), True, 'import numpy as np\n'), ((8827, 8844), 'numpy.nanstd', 'np.nanstd', (['window'], {}), '(window)\n', (8836, 8844), True, 'import numpy as np\n'), ((8874, 8901), 'numpy.append', 'np.append', (['mean_array', 'mean'], {}), '(mean_array, mean)\n', (8883, 8901), True, 'import numpy as np\n'), ((8930, 8955), 'numpy.append', 'np.append', (['std_array', 'std'], {}), '(std_array, std)\n', (8939, 8955), True, 'import numpy as np\n'), ((8979, 8999), 'numpy.append', 'np.append', (['x_data', 'i'], {}), '(x_data, i)\n', (8988, 8999), True, 'import numpy as np\n'), ((3620, 3650), 'math.floor', 'math.floor', (['(array.shape[1] / W)'], {}), '(array.shape[1] / W)\n', (3630, 3650), False, 'import math\n'), ((4476, 4500), 'numpy.array', 'np.array', (['[i, W * w + i]'], {}), '([i, W * w + i])\n', (4484, 4500), True, 'import numpy as np\n'), ((4534, 4572), 'numpy.vstack', 'np.vstack', (['(window_boundary, boundary)'], {}), '((window_boundary, boundary))\n', (4543, 4572), True, 'import numpy as np\n'), ((4600, 4620), 'numpy.append', 'np.append', (['x_data', 'w'], {}), '(x_data, w)\n', (4609, 4620), True, 'import numpy as np\n'), ((4921, 4945), 'morph_quantities_func_v2.morph_quantities', 'morph_quantities', (['window'], {}), '(window)\n', (4937, 4945), False, 'from morph_quantities_func_v2 import morph_quantities\n'), ((5153, 5188), 'numpy.append', 'np.append', (['tot_vol_w_array', 'tot_vol'], {}), '(tot_vol_w_array, tot_vol)\n', (5162, 5188), True, 'import numpy as np\n'), ((5225, 5260), 'numpy.append', 'np.append', (['sum_vol_w_array', 'sum_vol'], {}), '(sum_vol_w_array, sum_vol)\n', (5234, 5260), True, 'import numpy as np\n'), ((5297, 5332), 'numpy.append', 'np.append', (['dep_vol_w_array', 'dep_vol'], {}), '(dep_vol_w_array, dep_vol)\n', (5306, 5332), True, 'import numpy as np\n'), ((5369, 5404), 'numpy.append', 'np.append', (['sco_vol_w_array', 'sco_vol'], {}), '(sco_vol_w_array, sco_vol)\n', (5378, 5404), True, 'import numpy as np\n'), ((5448, 5497), 'numpy.append', 'np.append', (['morph_act_area_w_array', 'morph_act_area'], {}), '(morph_act_area_w_array, morph_act_area)\n', (5457, 5497), True, 'import numpy as np\n'), ((5545, 5602), 'numpy.append', 'np.append', (['morph_act_area_dep_w_array', 'morph_act_area_dep'], {}), '(morph_act_area_dep_w_array, morph_act_area_dep)\n', (5554, 5602), True, 'import numpy as np\n'), ((5650, 5707), 'numpy.append', 'np.append', (['morph_act_area_sco_w_array', 'morph_act_area_sco'], {}), '(morph_act_area_sco_w_array, morph_act_area_sco)\n', (5659, 5707), True, 'import numpy as np\n'), ((5751, 5800), 'numpy.append', 'np.append', (['act_width_mean_w_array', 'act_width_mean'], {}), '(act_width_mean_w_array, act_width_mean)\n', (5760, 5800), True, 'import numpy as np\n'), ((5848, 5905), 'numpy.append', 'np.append', (['act_width_mean_dep_w_array', 'act_width_mean_dep'], {}), '(act_width_mean_dep_w_array, act_width_mean_dep)\n', (5857, 5905), True, 'import numpy as np\n'), ((5953, 6010), 'numpy.append', 'np.append', (['act_width_mean_sco_w_array', 'act_width_mean_sco'], {}), '(act_width_mean_sco_w_array, act_width_mean_sco)\n', (5962, 6010), True, 'import numpy as np\n'), ((6053, 6100), 'numpy.append', 'np.append', (['act_thickness_w_array', 'act_thickness'], {}), '(act_thickness_w_array, act_thickness)\n', (6062, 6100), True, 'import numpy as np\n'), ((6147, 6202), 'numpy.append', 'np.append', (['act_thickness_dep_w_array', 'act_thickness_dep'], {}), '(act_thickness_dep_w_array, act_thickness_dep)\n', (6156, 6202), True, 'import numpy as np\n'), ((6249, 6304), 'numpy.append', 'np.append', (['act_thickness_sco_w_array', 'act_thickness_sco'], {}), '(act_thickness_sco_w_array, act_thickness_sco)\n', (6258, 6304), True, 'import numpy as np\n'), ((7257, 7287), 'math.floor', 'math.floor', (['(array.shape[1] / W)'], {}), '(array.shape[1] / W)\n', (7267, 7287), False, 'import math\n'), ((7598, 7636), 'numpy.array', 'np.array', (['[W * w * i, W * w * (i + 1)]'], {}), '([W * w * i, W * w * (i + 1)])\n', (7606, 7636), True, 'import numpy as np\n'), ((7664, 7702), 'numpy.vstack', 'np.vstack', (['(window_boundary, boundary)'], {}), '((window_boundary, boundary))\n', (7673, 7702), True, 'import numpy as np\n'), ((7730, 7748), 'numpy.nanmean', 'np.nanmean', (['window'], {}), '(window)\n', (7740, 7748), True, 'import numpy as np\n'), ((7775, 7792), 'numpy.nanstd', 'np.nanstd', (['window'], {}), '(window)\n', (7784, 7792), True, 'import numpy as np\n'), ((7826, 7853), 'numpy.append', 'np.append', (['mean_array', 'mean'], {}), '(mean_array, mean)\n', (7835, 7853), True, 'import numpy as np\n'), ((7886, 7911), 'numpy.append', 'np.append', (['std_array', 'std'], {}), '(std_array, std)\n', (7895, 7911), True, 'import numpy as np\n'), ((7939, 7959), 'numpy.append', 'np.append', (['x_data', 'w'], {}), '(x_data, w)\n', (7948, 7959), True, 'import numpy as np\n')] |
#--------------------------------------------------------Import libraries
import pickle
import socket
import struct
import cv2
from stable_baselines import PPO2
import numpy as np
import imageio
#--------------------------------------------------------Establiosh connection
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect(("RASPBERRY PI address",1235))
#--------------------------------------------------------Read model
model = PPO2.load("model_output/model_final.zip")
#--------------------------------------------------------Establish initial varibles to hold information
data = bytearray()
info = s.recv(4)
length = struct.unpack(">L", info[:4])[0]
#--------------------------------------------------------Initialize
# initializes arrays to hold images for GIF
images_O = []
cv2.namedWindow('frame')
cv2.resizeWindow('frame', 256,256)
try:
while True:
# Capture the bytes being sent
while len(data) < length:
data.extend(s.recv(4096))
# Convert to BGR TO RGB
frame = cv2.cvtColor(cv2.imdecode(np.frombuffer(data[:length],dtype=np.uint8),1),cv2.COLOR_BGR2RGB)
# add raw and transformed images
images_O.append(frame)
# Given state, predict action
action, _ = model.predict(frame, deterministic=True)
# send action
s.sendall(pickle.dumps(action))
# Set up to get new image
data = data[length:]
data.extend(s.recv(4))
length = struct.unpack(">L", data[:4])[0]
data = data[4:]
# Show image on display
# Convert transformed image to BGR so CV2 can show image correctly
cv2.imshow('frame',cv2.cvtColor(frame,cv2.COLOR_RGB2BGR))
if cv2.waitKey(1) & 0xFF == ord('q'):
s.close()
break
finally:
s.close()
# convert untransformed images to gif
imageio.mimsave('Lego_camera_view.gif', [np.array(img) for i, img in enumerate(images_O) if i % 2 == 0], fps=20) | [
"cv2.resizeWindow",
"socket.socket",
"pickle.dumps",
"stable_baselines.PPO2.load",
"numpy.array",
"struct.unpack",
"cv2.cvtColor",
"numpy.frombuffer",
"cv2.waitKey",
"cv2.namedWindow"
] | [((279, 328), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (292, 328), False, 'import socket\n'), ((446, 487), 'stable_baselines.PPO2.load', 'PPO2.load', (['"""model_output/model_final.zip"""'], {}), "('model_output/model_final.zip')\n", (455, 487), False, 'from stable_baselines import PPO2\n'), ((803, 827), 'cv2.namedWindow', 'cv2.namedWindow', (['"""frame"""'], {}), "('frame')\n", (818, 827), False, 'import cv2\n'), ((828, 863), 'cv2.resizeWindow', 'cv2.resizeWindow', (['"""frame"""', '(256)', '(256)'], {}), "('frame', 256, 256)\n", (844, 863), False, 'import cv2\n'), ((642, 671), 'struct.unpack', 'struct.unpack', (['""">L"""', 'info[:4]'], {}), "('>L', info[:4])\n", (655, 671), False, 'import struct\n'), ((1352, 1372), 'pickle.dumps', 'pickle.dumps', (['action'], {}), '(action)\n', (1364, 1372), False, 'import pickle\n'), ((1487, 1516), 'struct.unpack', 'struct.unpack', (['""">L"""', 'data[:4]'], {}), "('>L', data[:4])\n", (1500, 1516), False, 'import struct\n'), ((1681, 1719), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_RGB2BGR'], {}), '(frame, cv2.COLOR_RGB2BGR)\n', (1693, 1719), False, 'import cv2\n'), ((1916, 1929), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (1924, 1929), True, 'import numpy as np\n'), ((1072, 1116), 'numpy.frombuffer', 'np.frombuffer', (['data[:length]'], {'dtype': 'np.uint8'}), '(data[:length], dtype=np.uint8)\n', (1085, 1116), True, 'import numpy as np\n'), ((1731, 1745), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1742, 1745), False, 'import cv2\n')] |
# from django.shortcuts import render
# Create your views here.
from django.urls import reverse_lazy
from django.views.generic import CreateView
from fitbox.consultas.forms import ConsultaForm
from fitbox.consultas.models import Consulta
class ConsultaCreateView(CreateView):
template_name = "consultas/cadastro_consulta.html"
model = Consulta
form_class = ConsultaForm
success_url = reverse_lazy("consulta:cadastro_consulta")
| [
"django.urls.reverse_lazy"
] | [((404, 446), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""consulta:cadastro_consulta"""'], {}), "('consulta:cadastro_consulta')\n", (416, 446), False, 'from django.urls import reverse_lazy\n')] |
import os
STATIC_FOLDERS = (
'{{cookiecutter.repo_name}}/common/static',
'{{cookiecutter.repo_name}}/users/static',
)
# Muffin Plugins
PLUGINS = (
'muffin_jinja2',
'muffin_peewee',
'muffin_session',
)
# Plugins configurations
SESSION_SECRET = 'SecretHere'
SESSION_LOGIN_URL = '/users/signin/'
JINJA2_TEMPLATE_FOLDERS = (
'{{cookiecutter.repo_name}}/common/templates',
'{{cookiecutter.repo_name}}/public/templates',
'{{cookiecutter.repo_name}}/users/templates'
)
PEEWEE_CONNECTION = os.environ.get('DATABASE_URL', 'sqlite:///{{cookiecutter.repo_name}}.sqlite')
| [
"os.environ.get"
] | [((520, 597), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""', '"""sqlite:///{{cookiecutter.repo_name}}.sqlite"""'], {}), "('DATABASE_URL', 'sqlite:///{{cookiecutter.repo_name}}.sqlite')\n", (534, 597), False, 'import os\n')] |
from typing import List, Dict
from sc2.ids.ability_id import AbilityId
from sc2.position import Point2
from sc2.unit import Unit
from sc2.units import Units
from sharpy.interfaces import IZoneManager
from sharpy.managers.core.roles import UnitTask
from sharpy.plans.acts import ActBase
from sharpy.sc2math import get_intersections
MINING_RADIUS = 1.325
class SpeedMining(ActBase):
""" Make worker mine faster perhaps? """
def __init__(self, enable_on_return=True, enable_on_mine=True) -> None:
super().__init__()
self.enable_on_return = enable_on_return
self.enable_on_mine = enable_on_mine
self.mineral_target_dict: Dict[Point2, Point2] = {}
async def start(self, knowledge: "Knowledge"):
await super().start(knowledge)
self.calculate_targets()
async def execute(self) -> bool:
if len(self.ai.townhalls) < 1 or (not self.enable_on_return and not self.enable_on_mine):
return True
workers = self.get_mineral_workers()
self.speedmine(workers)
return True
def get_mineral_workers(self) -> Units:
def miner_filter(unit: Unit) -> bool:
if unit.is_carrying_vespene:
return False
if unit.order_target is not None and isinstance(unit.order_target, int):
target_unit = self.cache.by_tag(unit.order_target)
if target_unit is not None and target_unit.has_vespene:
return False
return True
units = self.roles.all_from_task(UnitTask.Gathering).filter(miner_filter)
return units
def speedmine(self, workers: Units):
for worker in workers:
self.speedmine_single(worker)
def speedmine_single(self, worker: Unit):
townhall = self.ai.townhalls.closest_to(worker)
if self.enable_on_return and worker.is_returning and len(worker.orders) == 1:
target: Point2 = townhall.position
target = target.towards(worker, townhall.radius + worker.radius)
if 0.75 < worker.distance_to(target) < 2:
worker.move(target)
worker(AbilityId.SMART, townhall, True)
return
if (
self.enable_on_mine
and not worker.is_returning
and len(worker.orders) == 1
and isinstance(worker.order_target, int)
):
mf = self.cache.by_tag(worker.order_target)
if mf is not None and mf.is_mineral_field:
target = self.mineral_target_dict.get(mf.position)
if target and 0.75 < worker.distance_to(target) < 2:
worker.move(target)
worker(AbilityId.SMART, mf, True)
def calculate_targets(self):
zone_manager = self.knowledge.get_required_manager(IZoneManager)
zones = zone_manager.expansion_zones
centers: List[Point2] = []
for zone in zones:
centers.append(zone.center_location)
for mf in self.ai.mineral_field:
target: Point2 = mf.position
center = target.closest(centers)
target = target.towards(center, MINING_RADIUS)
close = self.ai.mineral_field.closer_than(MINING_RADIUS, target)
for mf2 in close:
if mf2.tag != mf.tag:
points = get_intersections(mf.position, MINING_RADIUS, mf2.position, MINING_RADIUS)
if len(points) == 2:
target = center.closest(points)
self.mineral_target_dict[mf.position] = target
| [
"sharpy.sc2math.get_intersections"
] | [((3371, 3445), 'sharpy.sc2math.get_intersections', 'get_intersections', (['mf.position', 'MINING_RADIUS', 'mf2.position', 'MINING_RADIUS'], {}), '(mf.position, MINING_RADIUS, mf2.position, MINING_RADIUS)\n', (3388, 3445), False, 'from sharpy.sc2math import get_intersections\n')] |
# Generated by Django 2.1.3 on 2020-02-28 21:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('content_management', '0045_auto_20200228_1345'),
]
operations = [
migrations.RemoveField(
model_name='content',
name='collections',
),
migrations.AddField(
model_name='content',
name='collection',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='content_management.Collection'),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] | [((279, 343), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""content"""', 'name': '"""collections"""'}), "(model_name='content', name='collections')\n", (301, 343), False, 'from django.db import migrations, models\n'), ((492, 606), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""content_management.Collection"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='content_management.Collection')\n", (509, 606), False, 'from django.db import migrations, models\n')] |
import os
from fnmatch import fnmatch
from typing import Iterator, List, Optional, Sequence, Tuple, Union
import numpy as np
from typing_extensions import Literal
from . import lib
from .otf import TemporaryOTF
from .util import PathOrArray, _kwargs_for, imread
def rl_cleanup():
"""Release GPU buffer and cleanup after deconvolution
Call this before program quits to release global GPUBuffer d_interpOTF.
- Resets any bleach corrections
- Removes OTF from GPU buffer
- Destroys cuFFT plan
- Releases GPU buffers
"""
return lib.RL_cleanup()
def rl_init(
rawdata_shape: Tuple[int, int, int],
otfpath: str,
dzdata: float = 0.5,
dxdata: float = 0.1,
dzpsf: float = 0.1,
dxpsf: float = 0.1,
deskew: float = 0,
rotate: float = 0,
width: int = 0,
):
"""Initialize GPU for deconvolution.
Prepares cuFFT plan for deconvolution with a given data shape and OTF.
Must be used prior to :func:`pycudadecon.rl_decon`
Parameters
----------
rawdata_shape : Tuple[int, int, int]
3-tuple of data shape
otfpath : str
Path to OTF TIF
dzdata : float, optional
Z-step size of data, by default 0.5
dxdata : float, optional
XY pixel size of data, by default 0.1
dzpsf : float, optional
Z-step size of the OTF, by default 0.1
dxpsf : float, optional
XY pixel size of the OTF, by default 0.1
deskew : float, optional
Deskew angle. If not 0.0 then deskewing will be performed before
deconvolution, by default 0
rotate : float, optional
Rotation angle; if not 0.0 then rotation will be performed around Y
axis after deconvolution, by default 0
width : int, optional
If deskewed, the output image's width, by default 0 (do not crop)
Examples
--------
>>> rl_init(im.shape, otfpath)
>>> decon_result = rl_decon(im)
>>> rl_cleanup()
"""
nz, ny, nx = rawdata_shape
lib.RL_interface_init(
nx,
ny,
nz,
dxdata,
dzdata,
dxpsf,
dzpsf,
deskew,
rotate,
width,
otfpath.encode(),
)
def rl_decon(
im: np.ndarray,
background: Union[int, Literal["auto"]] = 80,
n_iters: int = 10,
shift: int = 0,
save_deskewed: bool = False,
output_shape: Optional[Tuple[int, int, int]] = None,
napodize: int = 15,
nz_blend: int = 0,
pad_val: float = 0.0,
dup_rev_z: bool = False,
) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]:
"""Perform Richardson Lucy Deconvolution.
Performs actual deconvolution. GPU must first be initialized with
:func:`pycudadecon.rl_init`
Parameters
----------
im : np.ndarray
3D image volume to deconvolve
background : int or 'auto'
User-supplied background to subtract. If 'auto', the median value of the
last Z plane will be used as background. by default 80
n_iters : int, optional
Number of iterations, by default 10
shift : int, optional
If deskewed, the output image's extra shift in X (positive->left),
by default 0
save_deskewed : bool, optional
Save deskewed raw data as well as deconvolution result, by default False
output_shape : tuple of int, optional
Specify the output shape after deskewing. Usually this is unnecessary and
can be autodetected. Mostly intended for use within a
:class:`pycudadecon.RLContext` context, by default None
napodize : int, optional
Number of pixels to soften edge with, by default 15
nz_blend : int, optional
Number of top and bottom sections to blend in to reduce axial ringing,
by default 0
pad_val : float, optional
Value with which to pad image when deskewing, by default 0.0
dup_rev_z : bool, optional
Duplicate reversed stack prior to decon to reduce axial ringing,
by default False
Returns
-------
np.ndarray or 2-tuple of np.ndarray
The deconvolved result. If `save_deskewed` is `True`, returns
`(decon_result, deskew_result)`
Raises
------
ValueError
If im.ndim is not 3, or `output_shape` is provided but not length 3
"""
if im.ndim != 3:
raise ValueError("Only 3D arrays supported")
nz, ny, nx = im.shape
if output_shape is None:
output_shape = (lib.get_output_nz(), lib.get_output_ny(), lib.get_output_nx())
elif len(output_shape) != 3:
raise ValueError("Decon output shape must have length==3")
decon_result = np.empty(tuple(output_shape), dtype=np.float32)
if save_deskewed:
deskew_result = np.empty_like(decon_result)
else:
deskew_result = np.empty(1, dtype=np.float32)
# must be 16 bit going in
if not np.issubdtype(im.dtype, np.uint16):
im = im.astype(np.uint16)
if isinstance(background, str) and background == "auto":
background = np.median(im[-1])
rescale = False # not sure if this works yet...
if not im.flags["C_CONTIGUOUS"]:
im = np.ascontiguousarray(im)
lib.RL_interface(
im,
nx,
ny,
nz,
decon_result,
deskew_result,
background,
rescale,
save_deskewed,
n_iters,
shift,
napodize,
nz_blend,
pad_val,
dup_rev_z,
)
if save_deskewed:
return decon_result, deskew_result
else:
return decon_result
def quickDecon(image: np.ndarray, otfpath: str, **kwargs):
"""Perform deconvolution of `image` with otf at `otfpath`.
Not currently used...
"""
rl_init(image.shape, otfpath, **_kwargs_for(rl_init, kwargs))
result = rl_decon(image, **_kwargs_for(rl_decon, kwargs))
lib.RL_cleanup()
return result
class RLContext:
"""Context manager to setup the GPU for RL decon
Takes care of handing the OTF to the GPU, preparing a cuFFT plane,
and cleaning up after decon. Internally, this calls :func:`rl_init`,
stores the shape of the expected output volume after any deskew/decon,
then calls :func:`rl_cleanup` when exiting the context.
For parameters, see :func:`rl_init`.
Examples
--------
>>> with RLContext(data.shape, otfpath, dz) as ctx:
... result = rl_decon(data, ctx.out_shape)
"""
def __init__(
self,
rawdata_shape: Tuple[int, int, int],
otfpath: str,
dzdata: float = 0.5,
dxdata: float = 0.1,
dzpsf: float = 0.1,
dxpsf: float = 0.1,
deskew: float = 0,
rotate: float = 0,
width: int = 0,
):
self.kwargs = locals()
self.kwargs.pop("self")
self.out_shape: Optional[Tuple[int, int, int]] = None
def __enter__(self):
"""Setup the context and return the ZYX shape of the output image"""
rl_init(**self.kwargs)
self.out_shape = (lib.get_output_nz(), lib.get_output_ny(), lib.get_output_nx())
return self
def __exit__(self, typ, val, traceback):
# exit receives a tuple with any exceptions raised during processing
# if __exit__ returns True, exceptions will be supressed
lib.RL_cleanup()
# alias
rl_context = RLContext
def _yield_arrays(
images: Union[PathOrArray, Sequence[PathOrArray]], fpattern="*.tif"
) -> Iterator[np.ndarray]:
"""Yield arrays from an array, path, or sequence of either.
Parameters
----------
images : Union[PathOrArray, Sequence[PathOrArray]]
an array, path, or sequence of either
fpattern : str, optional
used to filter files in a directory, by default "*.tif"
Yields
-------
Iterator[np.ndarray]
Arrays (read from paths if necessary)
Raises
------
OSError
If a directory is provided and no files match fpattern.
"""
if isinstance(images, np.ndarray):
yield images
elif isinstance(images, str):
if os.path.isfile(images):
yield imread(images)
elif os.path.isdir(images):
imfiles = [f for f in os.listdir(images) if fnmatch(f, fpattern)]
if not len(imfiles):
raise OSError(
'No files matching pattern "{}" found in directory: {}'.format(
fpattern, images
)
)
for fpath in imfiles:
yield imread(os.path.join(images, fpath))
else:
for item in images:
yield from _yield_arrays(item)
def decon(
images: Union[PathOrArray, Sequence[PathOrArray]],
psf: PathOrArray,
fpattern: str = "*.tif",
**kwargs
) -> Union[np.ndarray, List[np.ndarray]]:
"""Deconvolve an image or images with a PSF or OTF file.
If `images` is a directory, use the `fpattern` argument to select files
by filename pattern.
Parameters
----------
images : str, np.ndarray, or sequence of either
The array, filepath, directory, or list/tuple thereof to deconvolve
psf : str or np.ndarray
a filepath of a PSF or OTF file, or a 3D numpy PSF array. Function will
auto-detect whether the file is a 3D PSF or a filepath representing a 2D
complex OTF.
fpattern : str, optional
Filepattern to use when a directory is provided in the `images` argument,
by default `*.tif`
** kwargs
All other kwargs must be valid for either :func:`rl_init` or :func:`rl_decon`.
Returns
-------
np.ndarray or list of array
The deconvolved image(s)
Raises
------
ValueError
If save_deskewed is True and deskew is unset or 0
IOError
If a directory is provided as input and ``fpattern`` yields no files
NotImplementedError
If ``psf`` is provided as a complex, 2D numpy array (OTFs can only be
provided as filenames created with :func:`pycudadecon.make_otf`)
Examples
--------
deconvolve a 3D TIF volume with a 3D PSF volume (e.g. a single bead stack)
>>> result = decon('/path/to/image.tif', '/path/to/psf.tif')
deconvolve all TIF files in a specific directory that match a certain
`filename pattern <https://docs.python.org/3.6/library/fnmatch.html>`_,
(in this example, all TIFs with the string '560nm' in their name)
>>> result = decon(
... '/directory/with/images', '/path/to/psf.tif', fpattern='*560nm*.tif'
... )
deconvolve a list of images, provided either as np.ndarrays, filepaths,
or directories
>>> imarray = tifffile.imread('some_other_image.tif')
>>> inputs = ['/directory/with/images', '/path/to/image.tif', imarray]
>>> result = decon(inputs, '/path/to/psf.tif', fpattern='*560nm*.tif')
"""
if kwargs.get("save_deskewed"):
if kwargs.get("deskew", 1) == 0:
raise ValueError("Cannot use save_deskewed=True with deskew=0")
if not kwargs.get("deskew"):
raise ValueError("Must set deskew != 0 when using save_deskewed=True")
init_kwargs = _kwargs_for(rl_init, kwargs)
decon_kwargs = _kwargs_for(rl_decon, kwargs)
out = []
with TemporaryOTF(psf, **kwargs) as otf:
arraygen = _yield_arrays(images, fpattern)
# first, assume that all of the images are the same shape...
# in which case we can prevent a lot of GPU IO
# grab and store the shape of the first item in the generator
next_im = next(arraygen)
shp = next_im.shape
with RLContext(shp, otf.path, **init_kwargs) as ctx:
while True:
out.append(
rl_decon(next_im, output_shape=ctx.out_shape, **decon_kwargs)
)
try:
next_im = next(arraygen)
# here we check to make sure that the images are still the same
# shape... if not, we'll continue below
if next_im.shape != shp:
break
except StopIteration:
next_im = None
break
# if we had a shape mismatch, there will still be images left to process
# process them the slow way here...
if next_im is not None:
for imarray in [next_im, *arraygen]:
with RLContext(imarray.shape, otf.path, **init_kwargs) as ctx:
out.append(
rl_decon(imarray, output_shape=ctx.out_shape, **decon_kwargs)
)
if isinstance(images, (list, tuple)) and len(images) > 1:
return out
else:
return out[0]
| [
"numpy.median",
"os.listdir",
"os.path.join",
"numpy.ascontiguousarray",
"os.path.isfile",
"numpy.issubdtype",
"os.path.isdir",
"numpy.empty",
"numpy.empty_like",
"fnmatch.fnmatch"
] | [((4717, 4744), 'numpy.empty_like', 'np.empty_like', (['decon_result'], {}), '(decon_result)\n', (4730, 4744), True, 'import numpy as np\n'), ((4779, 4808), 'numpy.empty', 'np.empty', (['(1)'], {'dtype': 'np.float32'}), '(1, dtype=np.float32)\n', (4787, 4808), True, 'import numpy as np\n'), ((4851, 4885), 'numpy.issubdtype', 'np.issubdtype', (['im.dtype', 'np.uint16'], {}), '(im.dtype, np.uint16)\n', (4864, 4885), True, 'import numpy as np\n'), ((5004, 5021), 'numpy.median', 'np.median', (['im[-1]'], {}), '(im[-1])\n', (5013, 5021), True, 'import numpy as np\n'), ((5127, 5151), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['im'], {}), '(im)\n', (5147, 5151), True, 'import numpy as np\n'), ((8034, 8056), 'os.path.isfile', 'os.path.isfile', (['images'], {}), '(images)\n', (8048, 8056), False, 'import os\n'), ((8105, 8126), 'os.path.isdir', 'os.path.isdir', (['images'], {}), '(images)\n', (8118, 8126), False, 'import os\n'), ((8162, 8180), 'os.listdir', 'os.listdir', (['images'], {}), '(images)\n', (8172, 8180), False, 'import os\n'), ((8184, 8204), 'fnmatch.fnmatch', 'fnmatch', (['f', 'fpattern'], {}), '(f, fpattern)\n', (8191, 8204), False, 'from fnmatch import fnmatch\n'), ((8498, 8525), 'os.path.join', 'os.path.join', (['images', 'fpath'], {}), '(images, fpath)\n', (8510, 8525), False, 'import os\n')] |