id stringlengths 1 7 | text stringlengths 6 1.03M | dataset_id stringclasses 1
value |
|---|---|---|
1751181 | # Generated by Django 2.0.7 on 2019-05-08 08:12
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app_user', '0002_auto_20190503_2141'),
]
operations = [
migrations.AlterField(
model_name='address',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='address_by_user', to='app_user.User'),
),
]
| StarcoderdataPython |
3270100 | <filename>git_report.py
import pymsteams, requests
teams_webhook_url = "<< Insert Teams Webhook URL>>"
teams_git_repository_url = "<< Insert GitLab Repo URL (without .git) >>"
teams_git_repository_id = "<< Insert GitLab Repo ID >>"
teams_git_token = "<< Insert GitLab Access Token >>"
class Teams_Git(Plugin):
name = "Teams_Git"
def git_report(success, tag_name):
message = pymsteams.connectorcard(teams_webhook_url_vcs)
if success:
res = requests.get(f"https://gitlabs.com/api/v4/projects/{teams_git_repository_id}/repository/tags?private_token={teams_git_token}").json()
previous_repo = res[1]["name"]
message.title("Configuration Change Detected")
message.text("Successfully pushed to git.")
message.addLinkButton("View Config", f"{teams_git_repository_url}/-/tree/{tag_name}")
message.addLinkButton("View Changes", f"{teams_git_repository_url}/-/compare/{previous_repo}...{tag_name}")
message.color("#4EE73C")
message.send()
print(f"Previous Config Tag: {previous_repo}, New Config Tag: {tag_name}")
else:
message.title("No Configureation Change Detected")
message.text("Skipping git push.")
message.send() | StarcoderdataPython |
139272 | import hashlib, json
import nacl.bindings
from nacl import encoding
from nacl.utils import random
from .utils import to_hex, from_hex, is_hex, str_to_bytes
def create_address(pubkey):
if is_hex(pubkey):
pubkey = from_hex(pubkey)
h = hashlib.new('ripemd160')
h.update(pubkey)
return h.digest()
class Key(object):
def __init__(self, public_key,secret_key):
self._pubkey = public_key
self._privkey = secret_key
self._address = create_address(self._pubkey)
@classmethod
def generate(cls, seed=None):
if seed:
if not isinstance(seed, bytes):
raise Exception("Seed must be bytes")
if len(seed) != nacl.bindings.crypto_sign_SEEDBYTES:
raise Exception(
"The seed must be exactly {} bytes long".format(nacl.bindings.crypto_sign_SEEDBYTES)
)
public_key, secret_key = nacl.bindings.crypto_sign_seed_keypair(seed)
else:
r = random(nacl.bindings.crypto_sign_SEEDBYTES)
public_key, secret_key = nacl.bindings.crypto_sign_seed_keypair(r)
return cls(public_key, secret_key)
@classmethod
def fromPrivateKey(cls, sk):
if len(sk) < 64:
raise Exception('Not a private key')
if is_hex(sk):
sk = from_hex(sk)
pubkey = sk[32:]
return cls(pubkey, sk)
@classmethod
def verify(cls, pubkey, message):
if is_hex(pubkey):
pubkey = from_hex(pubkey)
smessage = encoding.RawEncoder.decode(message)
pk = encoding.RawEncoder.decode(pubkey)
try:
return nacl.bindings.crypto_sign_open(smessage, pk)
except:
# Bad or forged signature
return False
def sign(self, msg):
message = str_to_bytes(msg)
raw_signed = nacl.bindings.crypto_sign(message, self._privkey)
return encoding.RawEncoder.encode(raw_signed)
def address(self, tohex=False):
if tohex:
return to_hex(self._address)
return self._address
def publickey(self, tohex=False):
if tohex:
return to_hex(self._pubkey)
return self._pubkey
def privatekey(self, tohex=False):
if tohex:
return to_hex(self._privkey)
return self._privkey
def to_json(self):
result = {
'address': self.address(tohex=True),
'publickey': self.publickey(tohex=True),
'privatekey': self.privatekey(tohex=True)
}
return json.dumps(result,indent=2)
| StarcoderdataPython |
63497 | #########################################################################################
# Convert Jupyter notebook from Step 1 into Python script with a function called scrape
# that will execute all scraping code and return one Python dictionary containing
# all of the scraped data.
#########################################################################################
# Import Dependencies
from flask import Flask, jsonify
from bs4 import BeautifulSoup as bs
import requests
import pandas as pd
def scrape():
###################################################################################################################
# 1. Scrape the NASA Mars News Site and collect the latest News Title and Paragraph Text.
# 2. Find the image url for the current Featured Mars Image and assign the url to a variable, featured_image_url.
# 3. Visit the Mars Weather twitter page and scrape the latest Mars weather tweet from the page into mars_weather.
# 4. Visit the Mars Facts webpage and use Pandas to scrape the facts table and convert into an html_table_str.
# 5. Visit planetary.org to obtain high resolution images for each of Mar's hemispheres and make a dictionary.
###################################################################################################################
# 1. Scrape HTML from NASA website
url = 'https://mars.nasa.gov/news/?page=0&per_page=40&order=publish_date+desc%2Ccreated_at+desc&search=&category=19%2C165%2C184%2C204&blank_scope=Latest'
response = requests.get(url)
parsed = bs(response.text, 'html.parser')
## Find and save titles and description into lists
news_title_list = []
news_p_list = []
for div in parsed.find_all('div', class_ = 'slide'):
news_title = div.find('div', class_ = 'content_title').text.strip()
news_p = div.find('div', class_ = 'rollover_description_inner').text.strip()
news_title_list.append(news_title)
news_p_list.append(news_p)
# 2. Scrape HTML from JPL Mars Space Images
jplmars_url = 'https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars'
response = requests.get(jplmars_url)
parsed_jplmars = bs(response.text, 'html.parser')
## Find and save featured image url
### (Splinter's Selenium's Geckodriver was denied on MacOS due to my security settings so I won't be using Splinter)
for a in parsed_jplmars.find_all('a', class_ = 'button fancybox'):
featured_image_url = 'https://www.jpl.nasa.gov' + a.get('data-fancybox-href')
# 3. Scrape HTML from Mars Weather's Twitter Page
twitter_url = 'https://twitter.com/marswxreport?lang=en'
response = requests.get(twitter_url)
parsed_twitter = bs(response.text, 'html.parser')
## Scrape the latest Mars weather tweet from the page
for p in parsed_twitter.find('p', class_ ="TweetTextSize TweetTextSize--normal js-tweet-text tweet-text"):
mars_weather = p
# 4. Scrape table from Mars Facts using Pandas
spacefacts_url = 'https://space-facts.com/mars/'
tables = pd.read_html(spacefacts_url)
df = tables[0]
## Use Pandas to convert the data to a HTML table string
html_table_str = df.to_html()
# 5. Scrape HTML from planetary.org
hemispheres_url = 'http://www.planetary.org/blogs/guest-blogs/bill-dunford/20140203-the-faces-of-mars.html'
response = requests.get(hemispheres_url)
parsed_hemisphere = bs(response.text, 'html.parser')
hemisphere_image_urls = []
# Get img urls and save into a dictionary then append to a list.
for img in parsed_hemisphere.find_all('img', class_ = 'img840'):
hemisphere_title = img.get('alt')
hemisphere_url = img.get('src')
new_dict = {
'title': hemisphere_title,
'img_url': hemisphere_url
}
hemisphere_image_urls.append(new_dict)
# Create a dictionary with all the scraped data to return
dict_of_scraped = {
"news_title_list": news_title_list,
"news_p_list": news_p_list,
"featured_image_url": featured_image_url,
"mars_weather": mars_weather,
"html_table_str": html_table_str,
"hemisphere_image_urls": hemisphere_image_urls
}
return dict_of_scraped | StarcoderdataPython |
4818792 | <reponame>ASTARCHEN/astartool
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: 河北雪域网络科技有限公司 A.Star
# @contact: <EMAIL>
# @site: www.snowland.ltd
# @file: imagehelper.py
# @time: 2019/6/27 4:16
# @Software: PyCharm
__author__ = 'A.Star'
import base64
import re
from io import BytesIO
from PIL import Image
def base64_to_image(base64_str, image_path=None):
"""
base64转图片
:param base64_str:
:param image_path:
:return:
"""
base64_data = re.sub('^data:image/.+;base64,', '', base64_str)
byte_data = base64.b64decode(base64_data)
image_data = BytesIO(byte_data)
img = Image.open(image_data)
if image_path:
img.save(image_path)
return img
| StarcoderdataPython |
42645 | import json
import requests
def analysis(password, path, title):
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from io import StringIO
print('Convering pdf to text ...')
rsrcmgr = PDFResourceManager()
retstr = StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)
fp = open(path, 'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
password_pdf = ""
maxpages = 0
caching = True
pagenos=set()
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages, password=<PASSWORD>, caching=caching, check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue()
full_text = text.replace('-\n', '').replace('’', "'")
fp.close()
device.close()
retstr.close()
text = text.replace('-\n', '').replace('’', "'")
lines = text.split('\n')
lines_section_ids_dict = {}
lines_section_ids = []
for i, line in enumerate(lines[1:-2]):
if len(lines[i-1]) == 0 and len(lines[i+1]) == 0 and len(lines[i]) > 3 and not str(lines[i]).isdigit():
lines_section_ids_dict[i] = lines[i]
lines_section_ids.append(i)
ref_id = -1
data = []
for id in lines_section_ids_dict:
data.append((lines_section_ids_dict[id], id))
data = dict(data)
final_data = {}
final_data['paper_title'] = title
final_data['full_text'] = full_text
try:
ref_id = data['References']
except KeyError:
ref_id = len(lines) - 1
for i, id in enumerate(lines_section_ids):
if i < len(lines_section_ids) - 1 and id < ref_id:
start = lines_section_ids[i]
end = lines_section_ids[i+1]
interval_lines = lines[start+1:end]
interval_lines_txt = ' '.join(interval_lines)
if interval_lines and len(interval_lines_txt) > 100:
final_data[lines_section_ids_dict[start]] = ' '.join(interval_lines)
print('Uploading text ...')
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/paper_analysis/' + password, json=json.dumps(final_data))
if response.status_code == 200:
data = dict(response.json())
else:
data = {'error': response.status_code}
data = dict(data)
return data
def scientific_analysis(password, path, title, topn):
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from io import StringIO
print('Convering pdf to text ...')
rsrcmgr = PDFResourceManager()
retstr = StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)
fp = open(path, 'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
password_pdf = ""
maxpages = 0
caching = True
pagenos=set()
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages, password=<PASSWORD>, caching=caching, check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue()
fp.close()
device.close()
retstr.close()
text = text.replace('-\n', '').replace('’', "'").replace('infl', 'infl')
lines = text.split('\n')
lines_section_ids_dict = {}
lines_section_ids = []
for i, line in enumerate(lines[1:-2]):
if len(lines[i-1]) == 0 and len(lines[i+1]) == 0 and len(lines[i]) > 3 and not str(lines[i]).isdigit():
lines_section_ids_dict[i] = lines[i]
lines_section_ids.append(i)
data = []
for id in lines_section_ids_dict:
data.append((lines_section_ids_dict[id], id))
data = dict(data)
final_data = {}
new_txt = ''
try:
ref_id = data['References']
except KeyError:
ref_id = len(lines) - 1
for i, id in enumerate(lines_section_ids):
if i < len(lines_section_ids) - 1 and id < ref_id:
start = lines_section_ids[i]
end = lines_section_ids[i+1]
interval_lines = lines[start+1:end]
interval_lines_txt = ' '.join(interval_lines)
if 'Abbreviations' not in lines_section_ids_dict[start] and '18 of 36' not in lines_section_ids_dict[start]:
new_txt += interval_lines_txt
if interval_lines and len(interval_lines_txt) > 100:
final_data[lines_section_ids_dict[start]] = ' '.join(interval_lines)
final_data['paper_title'] = title
final_data['full_text'] = new_txt
final_data['topn'] = topn
print('Uploading text ...')
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/scientific_analysis/' + password, json=json.dumps(final_data))
if response.status_code == 200:
data = dict(response.json())
else:
data = {'error': response.status_code}
data = dict(data)
return data
def focus_on(password, pkey, entity):
final_data = {'password': password, 'pkey': pkey, 'entity': entity}
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/focus_on', json=json.dumps(final_data))
if response.status_code == 200:
data = dict(response.json())
else:
data = {'error': response.status_code}
data = dict(data)
return data
def compare_papers(password, key1, key2, edges_1, edges_2, main_scope_1, main_scope_2):
final_data = {'password': password, 'key1': key1, 'key2': key2, 'edges_1': edges_1, 'edges_2': edges_2, 'main_scope_1': main_scope_1, 'main_scope_2': main_scope_2}
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/compare_papers', json=json.dumps(final_data))
if response.status_code == 200:
data = dict(response.json())
else:
data = {'error': response.status_code}
data = dict(data)
return data
def directory_analysis(password, dir_path):
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from io import StringIO
import glob
overall_data_to_return = []
all_pdfs_in_path = glob.glob(dir_path+'/*')
for ii, path in enumerate(all_pdfs_in_path):
title = path.replace(dir_path + '/', '').replace('.pdf', '')
print('Convering pdf to text ...', ii+1, '/', len(all_pdfs_in_path))
rsrcmgr = PDFResourceManager()
retstr = StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, laparams=laparams)
fp = open(path, 'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
password_pdf = ""
maxpages = 0
caching = True
pagenos=set()
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages, password=<PASSWORD>, caching=caching, check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue()
fp.close()
device.close()
retstr.close()
text = text.replace('-\n', '').replace('’', "'").replace('infl', 'infl')
lines = text.split('\n')
lines_section_ids_dict = {}
lines_section_ids = []
for i, line in enumerate(lines[1:-2]):
if len(lines[i-1]) == 0 and len(lines[i+1]) == 0 and len(lines[i]) > 3 and not str(lines[i]).isdigit():
lines_section_ids_dict[i] = lines[i]
lines_section_ids.append(i)
data = []
for id in lines_section_ids_dict:
data.append((lines_section_ids_dict[id], id))
data = dict(data)
final_data = {}
new_txt = ''
try:
ref_id = data['References']
except KeyError:
ref_id = len(lines) - 1
for i, id in enumerate(lines_section_ids):
if i < len(lines_section_ids) - 1 and id < ref_id:
start = lines_section_ids[i]
end = lines_section_ids[i+1]
interval_lines = lines[start+1:end]
interval_lines_txt = ' '.join(interval_lines)
if 'Abbreviations' not in lines_section_ids_dict[start] and '18 of 36' not in lines_section_ids_dict[start]:
new_txt += interval_lines_txt
if interval_lines and len(interval_lines_txt) > 100:
final_data[lines_section_ids_dict[start]] = ' '.join(interval_lines)
final_data['paper_title'] = title
final_data['full_text'] = new_txt
print('Uploading text ...', ii+1, '/', len(all_pdfs_in_path))
print()
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/directory_analysis/' + password, json=json.dumps(final_data))
if response.status_code == 200:
r_data = dict(response.json())
else:
r_data = {'error': response.status_code}
r_data = dict(r_data)
if 'paper_id' in r_data:
overall_data_to_return.append(r_data['paper_id'])
return overall_data_to_return
def directory_scopes(password, papers_ids):
final_data = {'papers_ids': papers_ids}
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/directory_scopes/' + password, json=json.dumps(final_data))
if response.status_code == 200:
r_data = dict(response.json())
else:
r_data = {'error': response.status_code}
r_data = dict(r_data)
return r_data
def complementary_papers(password, papers_ids):
final_data = {'papers_ids': papers_ids}
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/complementary_papers/' + password, json=json.dumps(final_data))
if response.status_code == 200:
r_data = dict(response.json())
else:
r_data = {'error': response.status_code}
r_data = dict(r_data)
return r_data
def intuition_connection(password, papers_ids, focus_on=None):
final_data = {'paper_ids': papers_ids, 'focus_on': focus_on}
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/intuition_connection/' + password, json=json.dumps(final_data))
if response.status_code == 200:
r_data = dict(response.json())
else:
r_data = {'error': response.status_code}
r_data = dict(r_data)
return r_data
def intuition_mechanisms(password, papers_ids, focus_on=None):
final_data = {'paper_ids': papers_ids, 'focus_on': focus_on}
response = requests.post('http://tzagerlib1-env.eba-wjp8tqpj.eu-west-2.elasticbeanstalk.com/intuition_mechanisms/' + password, json=json.dumps(final_data))
if response.status_code == 200:
r_data = dict(response.json())
else:
r_data = {'error': response.status_code}
r_data = dict(r_data)
return r_data
| StarcoderdataPython |
186512 | <gh_stars>0
import os
import sys
import math
class Player:
VERSION = "DS.5.6.0"
testJSon = """{'community_cards': [],
'minimum_raise': 2,
'big_blind': 4,
'orbits': 0,
'in_action': 3,
'bet_index': 2,
'current_buy_in': 4,
'round': 0,
'players': [{'id': 0, 'bet': 0, 'version': 'Pony 1.0.0', 'time_used': 0, 'stack': 1000, 'status': 'active', 'name': '<NAME>'},
{'id': 1, 'bet': 2, 'version': '1.0', 'time_used': 0, 'stack': 998, 'status': 'active', 'name': 'PokerMasters'},
{'id': 2, 'bet': 4, 'version': 'ERROR: Unreachable', 'time_used': 0, 'stack': 996, 'status': 'active', 'name': 'NADagascar'},
{'hole_cards': [{'suit': 'hearts', 'rank': '4'},
{'suit': 'diamonds', 'rank': 'Q'}],
'bet': 0,
'version': 'DS.1.0.0',
'time_used': 0,
'id': 3,
'stack': 1000,
'status': 'active',
'name': 'TheEmpireDidNothingWrong'},
{'id': 4,
'bet': 0,
'version': '1.0',
'time_used': 0,
'stack': 1000,
'status': 'active',
'name': 'Hive'},
{'id': 5, 'bet': 0, 'version': 'Gopnik_FM_ver_1.0', 'time_used': 0, 'stack': 1000, 'status': 'active', 'name': 'Gopnik FM'}],
'small_blind': 2,
'game_id': '5c5d4b96a972e80004000021',
'dealer': 0,
'pot': 6,
'tournament_id': '5c38a553b0fea40004000003'}
"""
def betRequest(self, game_state):
sys.stdout.write("_______ WE'RE ON!!4!4 ______" + "\n")
sys.stdout.write(str(game_state) + "\n")
ranks = {'2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8,
'9': 9, '10': 10, 'J': 11, 'Q': 12, 'K': 13, 'A': 14}
high_ranks = ['J', 'Q', 'K', 'A']
current_buy_in = int(game_state['current_buy_in'])
minimum_raise = int(game_state['minimum_raise'])
community_cards = game_state['community_cards'] if 'community_cards' in game_state else None
players = game_state['players']
our_bet = 0
still_close = 2
sys.stdout.write("Initialized round\n")
try:
sys.stdout.write("Inside try statement\n")
# searching for our player
my_player = dict()
for player in players:
if player['name'] == 'TheEmpireDidNothingWrong':
my_player = player
# getting our data
my_stack = int(my_player['stack'])
my_cards = my_player['hole_cards']
my_high_cards = [card for card in my_cards if card['rank'] in high_ranks]
all_cards = my_cards + community_cards
my_ranks = [card['rank'] for card in my_cards]
all_ranks = [card['rank'] for card in all_cards]
all_suit_frequencies = self.get_suit_frequencies(all_cards)
hand_suit_frequencies = self.get_suit_frequencies(my_cards)
community_suit_frequencies = self.get_suit_frequencies(community_cards)
# checking how many players there are
if self.get_active_players(players) <= 6:
# checking if we have a high rank pair
if my_ranks[0] == my_ranks[1] and my_ranks[0] in high_ranks:
our_bet = self.handle_high_rank_pair(all_cards, my_stack)
# checking if we have a high rank pair with one of the community cards
elif len(my_high_cards) > 0:
for high_card in my_high_cards:
if self.is_there_pair_with_community_deck(high_card, community_cards):
if current_buy_in>my_stack:
our_bet = my_stack
sys.stdout.write("ALL IN BECAUSE HIGH PAIR")
else:
our_bet = current_buy_in
sys.stdout.write("CALL BECAUSE DON'T HAVE MONEY FOR ALL IN // HIGH PAIR")
elif len(my_high_cards) >= 2:
if current_buy_in>my_stack:
our_bet = my_stack
sys.stdout.write("ALL IN BECAUSE BOTH CARDS ARE HIGH")
else:
our_bet = current_buy_in
sys.stdout.write("CALL BECAUSE DON'T HAVE MONEY FOR ALL IN // ONE HIGH CARD")
# checking if there are 3 cards on the table and 4 identical suits
elif len(community_cards) == 3 and max(all_suit_frequencies.values()) >= 4:
our_bet = current_buy_in + 200
#checking if flush
elif len(community_cards) == 5 and max(hand_suit_frequencies.values()) >= 2:
if community_suit_frequencies[my_cards[0]["suit"]] >= 3:
our_bet = my_stack
# checking if our ranks are close
#elif abs(ranks[my_ranks[0]] - ranks[my_ranks[1]]) <= still_close:
#our_bet = self.handle_close_ranks(current_buy_in, my_stack)
# checking if all community cards are dealt
elif community_cards is not None and len(community_cards) >= 4:
our_bet = current_buy_in
sys.stdout.write("Bet calculated based on ALL CARDS ON TABLE: " + str(our_bet) + "\n")
else:
sys.stdout.write("We don't have good enough cards :/")
else:
our_bet = 0
sys.stdout.write("Too many players for us :( \n")
except Exception as e:
our_bet = 0
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
sys.stdout.write(str(e) + "\n")
# print call stack
sys.stdout.write(", ".join([str(exc_type), str(fname), str(exc_tb.tb_lineno)]) + "\n")
sys.stdout.write("Bet calculated based on caught exception: " + str(our_bet) + "\n")
# log state
sys.stdout.write("Our bet: " + str(our_bet) + "\n")
sys.stdout.write("Bet value type: " + str(type(our_bet)) + "\n")
return our_bet
def handle_close_ranks(self, current_buy_in, my_stack):
our_bet = min(my_stack, current_buy_in)
sys.stdout.write("Bet calculated based on CLOSE RANKS: " + str(our_bet) + "\n")
return our_bet
def handle_high_ranks(self, current_buy_in, high_ranks, my_ranks, my_stack):
if my_ranks[0] in high_ranks and my_ranks[1] in high_ranks:
our_bet = min(min(my_stack, current_buy_in), my_stack)
sys.stdout.write("Bet calculated based on TWO HIGH RANKS: " + str(our_bet) + "\n")
else:
if False:
our_bet = current_buy_in
sys.stdout.write("Bet calculated based on ONE HIGH RANK: " + str(our_bet) + "\n")
else:
our_bet = 0
sys.stdout.write("Bet calculated based on ONE HIGH RANK: " + str(our_bet) + "\n")
return our_bet
def handle_high_rank_pair(self, cards, my_stack):
our_bet = my_stack
sys.stdout.write("Bet calculated based on PAIR: " + str(our_bet) + "\n")
return our_bet
def is_there_pair_with_community_deck(self, my_high_card, community_cards):
for community_card in community_cards:
if my_high_card['rank'] == community_card['rank']:
return True
return False
def is_there_drill(self, my_cards):
pass
def get_suit_frequencies(self, list_of_cards):
freq = dict()
for item in list_of_cards:
if item['suit'] in freq:
freq[item['suit']] += 1
else:
freq[item['suit']] = 1
return freq
def get_active_players(self, players):
active_players = [player for player in players if player['status'] == 'active']
return len(active_players)
def showdown(self, game_state):
pass
| StarcoderdataPython |
56697 | """ NOTICE: A Custom Dataset SHOULD BE PROVIDED
Created: May 02,2019 - <NAME>
Revised: May 07,2019 - <NAME>
"""
import os
import numpy as np
from PIL import Image
import torchvision.transforms as transforms
import inception_preprocessing
from torch.utils.data import Dataset
__all__ = ['CustomDataset']
config = {
# e.g. train/val/test set should be located in os.path.join(config['datapath'], 'train/val/test')
'datapath': '/data/shaozl/WS-DAN.PyTorch/dataset',
}
class My_transform(object):
def __call__(self, img):
return add_and_mul(img)
def add_and_mul(image):
image = image-0.5
image = image*2.0
return image
class CustomDataset(Dataset):
"""
# Description:
Basic class for retrieving images and labels
# Member Functions:
__init__(self, phase, shape): initializes a dataset
phase: a string in ['train', 'val', 'test']
shape: output shape/size of an image
__getitem__(self, item): returns an image
item: the idex of image in the whole dataset
__len__(self): returns the length of dataset
"""
def __init__(self, phase='train', shape=(512, 512)):
self.create_lable_map()
assert phase in ['train', 'val', 'test']
self.phase = phase
self.data_path = os.path.join(config['datapath'], phase)
self.data_list = os.listdir(self.data_path)
self.shape = shape
self.config = config
if self.phase=='train':
self.transform = transforms.Compose([
transforms.Resize(size=(int(self.shape[0]*1.0/0.875), int(self.shape[1]*1.0/0.875))),
transforms.RandomCrop((self.shape[0], self.shape[1])),
transforms.RandomHorizontalFlip(p=0.5),
transforms.ColorJitter(brightness=0.125, contrast=0.5),
transforms.ToTensor(),
My_transform()
# transforms.ToTensor(),
# transforms.Normalize(mean=[0.4729, 0.4871, 0.4217], std=[0.1589, 0.1598, 0.1681])
# tensor([0.4856, 0.4994, 0.4324]) tensor([0.1784, 0.1778, 0.1895]) 没有增强的mean和std
# tensor([0.4729, 0.4871, 0.4217]) tensor([0.1589, 0.1598, 0.1681]) 有增强的mean和std
])
else:
self.transform = transforms.Compose([
# transforms.Resize(size=(self.shape[0], self.shape[1])),
transforms.Resize(size=(int(self.shape[0] * 1.0 / 0.875), int(self.shape[1] * 1.0 / 0.875))),
transforms.CenterCrop((self.shape[0], self.shape[1])),
transforms.ToTensor(),
My_transform()
# transforms.Normalize(mean=[0.4862, 0.4998, 0.4311], std=[0.1796, 0.1781, 0.1904])
# tensor([0.4862, 0.4998, 0.4311]) tensor([0.1796, 0.1781, 0.1904]) 没有增强的mean和std
])
def __getitem__(self, item):
image = Image.open(os.path.join(self.data_path, self.data_list[item])).convert('RGB') # (C, H, W)
image = self.transform(image)
assert image.size(1) == self.shape[0] and image.size(2) == self.shape[1]
if self.phase != 'test':
# filename of image should have 'id_label.jpg/png' form
label = int(self.class_name.index(self.data_list[item].rsplit('_',2)[0].lower())) # label
return image, label
else:
# filename of image should have 'id.jpg/png' form, and simply return filename in case of 'test'
return image, self.data_list[item]
def __len__(self):
return len(self.data_list)
def create_lable_map(self):
with open('/data/shaozl/WS-DAN.PyTorch/CUB_200_2011/classes.txt') as f:
class_index_and_name = f.readlines()
class_index_and_name = [i.strip().lower() for i in class_index_and_name]
self.class_index = [i.split(' ', 1)[0] for i in class_index_and_name]
self.class_name = [i.split(' ', 1)[1].split('.',1)[1] for i in class_index_and_name]
| StarcoderdataPython |
196566 | """
Given a sequence originalSeq and an array of sequences, write a method to find if originalSeq can be uniquely
reconstructed from the array of sequences.
Unique reconstruction means that we need to find if originalSeq is the only sequence such that all sequences in
the array are subsequences of it.
Example 1:
Input: originalSeq: [1, 2, 3, 4], seqs: [[1, 2], [2, 3], [3, 4]]
Output: true
Explanation: The sequences [1, 2], [2, 3], and [3, 4] can uniquely reconstruct
[1, 2, 3, 4], in other words, all the given sequences uniquely define the order of numbers
in the 'originalSeq'.
Example 2:
Input: originalSeq: [1, 2, 3, 4], seqs: [[1, 2], [2, 3], [2, 4]]
Output: false
Explanation: The sequences [1, 2], [2, 3], and [2, 4] cannot uniquely reconstruct
[1, 2, 3, 4]. There are two possible sequences we can construct from the given sequences:
1) [1, 2, 3, 4]
2) [1, 2, 4, 3]
Example 3:
Input: originalSeq: [3, 1, 4, 2, 5], seqs: [[3, 1, 5], [1, 4, 2, 5]]
Output: true
Explanation: The sequences [3, 1, 5] and [1, 4, 2, 5] can uniquely reconstruct
[3, 1, 4, 2, 5].
"""
from collections import deque
def can_construct(originalSeq, sequences):
sortedOrder = []
if len(originalSeq) <= 0:
return sortedOrder
inDegree = {i:0 for i in range(1, len(originalSeq)+1)}
graph = {i:[] for i in range(1, len(originalSeq)+1)}
for seq in sequences:
for i in range(len(seq)-1):
parent, child = seq[i], seq[i+1]
if parent != child:
graph[parent] += child,
inDegree[child] += 1
# if we don't have ordering rules for all numbers
if len(originalSeq) != len(inDegree):
return False
sources = deque([v for v, d in inDegree.items() if d == 0])
while sources:
# if there are more than 1 source, it's not more unique
if len(sources) > 1: return False
# if next number is different from the original sequence
if originalSeq[len(sortedOrder)] != sources[0]: return False
vertex = sources.popleft()
sortedOrder.append(vertex)
for child in graph[vertex]:
inDegree[child] -= 1
if inDegree[child] == 0:
sources.append(child)
return len(sortedOrder) == len(originalSeq) # no unqiue way
if __name__ == "__main__":
print("Can construct: " + str(can_construct([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4]])))
print("Can construct: " + str(can_construct([1, 2, 3, 4], [[1, 2], [2, 3], [2, 4]])))
print("Can construct: " + str(can_construct([3, 1, 4, 2, 5], [[3, 1, 5], [1, 4, 2, 5]]))) | StarcoderdataPython |
99551 | #!/usr/bin/env python3
#### Intro to Python and Jupyter Notebooks ####
#### Before class ####
# share URL to hack.md
# check installation of python and jupyter notebooks
#### Welcome ####
# instructor introduction
# overview of fredhutch.io
# sign in
# learner introductions and motivation
# overview course philosophy, how to stay engaged
# course objectives: overview of basic functionality of python (syntax, data manipulation, visualization)
#### Objectives ####
# Today:
# Python and jupyter notebooks
# operators, functions, and data types
# sequences and dictionaries
# defining functions
#### Orientation to Python and projects ####
# ask about previous experience with Python or other programming languages
# overview course philosophy, how to stay engaged
# motivation for Python: programming language, reproducibility, open source
# many ways to interact with Python
# python in terminal
# ipython in terminal
# save script in text editor
# IDE like spyder
# notebook: web application that combines code, graphs, and text
# interactive mode in terminal, chevrons (>>>) is prompt, waiting for input
# scripting mode: save commands in file (ends in .py), execute entire file at once
# about our tools
# Anaconda: distribution (way of obtaining) Python;
# includes extra packages like ipython, spyder
# conda: package manager that comes with Anaconda, installs/updates packages
# jupyter notebook: installed with Anaconda
# setting up a project directory
# create new directory called python_project
# keep data, analyses, and text in single folder (directory)
# scripts and text files in folder relate to relative places in the directory
# suggested directories to go with project:
# data/ which may include separate directories for raw and processed data
# documents/ for outlines, drafts, and other text
# scripts/ for scripts related to data cleaning, analysis, plotting
# setting up Notebooks
# open Terminal (Mac) or Command Prompt (Windows)
# execute `jupyter notebook`
# terminal window must stay open, this is kernel (running python)
# web browser is how you interact with notebook
# navigate to project directory
# click "New" in upper right hand, then select "Python3"
# creates notebook (*.ipynb, or ipython notebook file)
# autosaves, or can save manually
# click on title to rename
# executing code in a jupyter notebook:
# enter code in cell and execute by pressing Shift + Return/enter
# output is printed directly below cell, prefaced by Out[ ]:
# add new cell with + button
# can add Markdown cells with nicely formatted text
# comments prefaced with # (not read/executed by python)
# commands and output saved in notebook
# talk about other menu options and buttons to remove/add/run cells
# example notebook: https://github.com/rasilab/machkovech_2018/blob/master/scripts/NA43_competition.ipynb
#### Operators, functions, and data types ####
# operators (mathematical calculations)
4 + 5
4+5 # spaces are optional, but easier to read
3 > 4 # comparisons, logic
# built-in data types: strings, integers, floats
number = 42
pi_value = 3.1415
text = "<NAME>"
# find type of each using function (type)
type(number) # integer
type(pi_value) # float
type(text) # string: characters (letters, numbers, punctuation, emoji)
# convert float to integer
int(pi_value) # decimals removed
type(pi_value) # type hasn't changed!
pi_value = int(pi_value) # convert original object
type(pi_value)
# convert integer to float
float(number) # decimals added
# see value of something (required to display output in a script)
print(text)
# demo example.py in separate window
# print and type are built in functions; there can also be methods (subset of functions) and user-defined functions
# find help on a function
help(print)
#### Sequences ####
# lists: data structure that holds sequence of elements
# surrounded by square brackets
numbers = [1, 2, 3]
# reference one part of a list
numbers[0] # indexing starts at 0
# find help on an object (can also check under help menu)
?numbers # can also use help(numbers), but may not be useful to you right now
# add number to end of list
numbers.append(4) # append() is an attribute
print(numbers)
# can use tab complete in notebook to see other options
?numbers.append # find help for attribute
# lists can be string data as well
organs = ["lung", "breast", "prostate"]
## Challenge: what google search could you use to determine a method for adding multiple values to a list?
## Challenge: how do you remove items from a list?
# tuple: list with ordered sequence of elements; cannot be modified
# surrounded by parentheses
a_tuple = (1, 2, 3)
## Challenge:
# What happens when you execute:
#numbers[1] = 5
#a_tuple[2] = 5
# Traceback is a multi-line error block printed
# includes information on what error and where in code
# comment out code error if you want to keep it for notes
# sequences can include more than just one data type
mix_tuple = ("lung", 200, "chromosome 1") # can apply to lists, but they're more often one data type
# for loop to access elements in list, tuple, or other data structure one at a time
for num in mix_tuple:
print(num)
#### Dictionaries ####
# dictionary: container holding a pair of objects, key and value
translation = {"one": 1, "two": 2}
translation["one"]
# can include lists as values
list_value = {"yes": [1, 2, 3]}
# cannot include lists as keys
#list_key = {[1, 2, 3]: "nope"}
# add items to dictionaries by assigning new value to key
rev = {1: "one", 2: "two"} # different data types can be key, value
rev[3] = "three"
rev
# access each element by key
for key in rev.keys():
print(key, "->", rev[key])
# access each element one at a time with item
for key, value in rev.items():
print(key, "->", value)
## Challenge:
# print only the values of the rev dictionary to the screen
# Reassign the second value (in the key value pair) so that it no longer reads “two” but instead “apple-sauce”
# Print the values of rev to the screen again to see if the value has changed
#### Functions ####
# define a chunk of code as function
def add_function(a, b):
result = a + b
return result
z = add_function(20, 22)
print(z)
## Challenge: define a new function called subtract_function that subtracts d from c and test on numbers of your choice
#### Wrapping up ####
# make sure work is saved
# review how to get back into notebook
# review objectives
# preview next week's objectives
# remind to sign in
| StarcoderdataPython |
3352802 | <gh_stars>0
from .utils import sorted_by_key
def stations_level_over_threshold(stations, tol):
"""
Function that returns a list of (station, tol) tuples with stations
at which the latest relative water level is over tol.
Inputs
------
stations: list of MonitoringStation objects
tol : float
Returns
---------
list of (station, tol) tuples with stations
at which the latest relative water level is over tol sorted in descending order
"""
exceeded_list = []
for station in stations:
water_lvl = station.relative_water_level()
if water_lvl != None:
if water_lvl > tol:
exceeded_list.append((station, water_lvl))
return sorted_by_key(exceeded_list,1, reverse = True)
def stations_highest_rel_level(stations, N):
"""
returns the stations with the highest relative water level
Inputs
------
stations: list of MonitoringStation objects
N: int, the number of stations to return
Returns
------
returns the N stations that have the highest relative water level in descending order
"""
station_level_list = stations_level_over_threshold(stations, 0.0)
return [station for station, n in station_level_list[:N]] | StarcoderdataPython |
3366253 | <reponame>cajfisher/vasppy<gh_stars>10-100
from lxml import etree # type: ignore
from typing import List, Union, Optional, Any, Dict
from pymatgen.core import Structure # type: ignore
import numpy as np # type: ignore
def parse_varray(varray: etree.Element) -> Union[List[List[float]],
List[List[int]],
List[List[bool]]]:
"""Parse <varray> data.
Args:
varray (etree.Element): xml <varray> element.
Returns:
(list(list): A nested list of either float, int, or bool.
"""
m: Union[List[List[int]], List[List[float]], List[List[bool]]]
varray_type = varray.get("type", None)
v_list = [v.text.split() for v in varray.findall("v")]
if varray_type == 'int':
m = [[int(number) for number in v] for v in v_list]
elif varray_type == 'logical':
m = [[i == "T" for i in v] for v in v_list]
else:
m = [[float(number) for number in v] for v in v_list]
return m
def parse_structure(structure: etree.Element) -> Dict[str, Any]:
"""Parse <structure> data..
Args:
structure (etree.Element): xml <structure> element.
Returns:
(dict): Dictionary of structure data:
`lattice`: cell matrix (list(list(float))..
`frac_coords`: atom fractional coordinates (list(list(float)).
`selective_dynamics`: selective dynamics (list(bool)|None).
"""
latt = parse_varray(structure.find("crystal").find("varray"))
pos = parse_varray(structure.find("varray"))
sdyn = structure.find("varray/[@name='selective']")
if sdyn:
sdyn = parse_varray(sdyn)
structure_dict = {'lattice': latt,
'frac_coords': pos,
'selective_dynamics': sdyn}
return structure_dict
def structure_from_structure_data(lattice: List[List[float]],
atom_names: List[str],
frac_coords: List[List[float]]) -> Structure:
"""Generate a pymatgen Structure.
Args:
lattice (list(list(float)): 3x3 cell matrix.
atom_names (list(str)): list of atom name strings.
frac_coords (list(list(float): Nx3 list of fractional coordinates.
Returns:
(pymatgen.Structure)
"""
structure = Structure(lattice=lattice,
species=atom_names,
coords=frac_coords,
coords_are_cartesian=False)
return structure
class Vasprun:
"""Object for parsing vasprun.xml data.
Attributes:
atom_names (list(str)): List of atom name strings.
structures (list(pymatgen.Structure): List of structures as pymatgen Structure objects.
frac_coords (np.array): timesteps x atoms x 3 numpy array of fractional coordinates.
cart_coords (np.array): timesteps x atoms x 3 numpy array of cartesian coordinates.
forces (:obj:`np.array`, optional): timesteps x atoms x 3 numpy array of forces.
Examples:
>>> vasprun = Vasprun('vasprun.xml')
>>> cart_coords = vasprun.cart_coords
>>> forces = vasprun.forces
"""
def __init__(self,
filename: str) -> None:
"""Initialise a Vasprun object from a vasprun.xml file.
Args:
filename (str): The vasprun.xml filename.
Returns:
None
"""
doc = etree.parse(filename)
self.doc = doc.getroot()
self._atom_names = None # type: Optional[List[str]]
self._structures = None # type: Optional[List[Structure]]
@property
def structures(self) -> List[Structure]:
"""Getter for structures attribut.
Returns:
(list(pymatgen.Structure)): A list of pymatgen Structure objects.
Notes:
When first called this parses the vasprun XML data and
caches the result.
"""
if not self._structures:
self._structures = self.parse_structures()
return self._structures
@property
def atom_names(self) -> List[str]:
"""Getter for atom_names attribute.
Returns:
(list(str)): A list of atom name strings.
Notes:
When first called this parses the vasprun XML data and
caches the result.
"""
if not self._atom_names:
self._atom_names = self.parse_atom_names()
return self._atom_names
def parse_atom_names(self) -> List[str]:
"""Return a list of atom names for the atoms in this calculation.
Args:
None
Returns:
(list(str))
"""
atominfo = self.doc.find("atominfo")
if atominfo is None:
raise ValueError("No atominfo found in file")
atom_names = []
for array in atominfo.findall("array"):
if array.attrib["name"] == "atoms":
atom_names = [rc.find("c").text.strip() for rc in array.find("set")]
if not atom_names:
raise ValueError("No atomname found in file")
return atom_names
def parse_structures(self) -> List[Structure]:
"""Returns a list of pymatgen Structures for this calculation.
Args:
None
Returns:
(list(pymatgen.Structure))
"""
structures = []
for child in self.doc.iterfind("calculation"):
elem = child.find("structure")
structure_data = parse_structure(elem)
structures.append(
structure_from_structure_data(
lattice=structure_data['lattice'],
atom_names=self.atom_names,
frac_coords=structure_data['frac_coords']
)
)
return structures
@property
def frac_coords(self) -> np.ndarray:
"""Fractional coordinates from each calculation structure.
Args:
None
Returns:
(np.ndarray): timesteps x atoms x 3 numpy array of fractional coordinates.
"""
frac_coords = np.array([s.frac_coords for s in self.structures])
return frac_coords
@property
def cart_coords(self) -> np.ndarray:
"""Cartesian coordinates from each calculation structure.
Args:
None
Returns:
(np.ndarray): timesteps x atoms x 3 numpy array of cartesian coordinates.
"""
cart_coords = np.array([s.cart_coords for s in self.structures])
return cart_coords
@property
def forces(self) -> Optional[np.ndarray]:
"""Cartesian forces from each calculation structure
(if present in the vasprun XML).
Args:
None
Returns:
(np.ndarray|None): timesteps x atoms x 3 numpy array of cartesian forces
if forces are included in the vasprun XML. If not, returns None.
"""
forces = []
for child in self.doc.iterfind("calculation"):
elem = child.find("varray/[@name='forces']")
if elem != None:
forces.append(parse_varray(elem))
if forces:
return np.array(forces)
else:
return None
| StarcoderdataPython |
1715497 | r"""Test :py:class:`lmp.model` signature."""
import inspect
from lmp.model import LSTMModel, RNNModel
def test_class():
r"""Ensure class signature."""
assert inspect.isclass(LSTMModel)
assert not inspect.isabstract(LSTMModel)
assert issubclass(LSTMModel, RNNModel)
def test_class_attribute():
r"""Ensure class attributes' signature."""
assert isinstance(LSTMModel.model_name, str)
assert LSTMModel.model_name == 'LSTM'
assert LSTMModel.file_name == RNNModel.file_name
def test_inherent_method():
r'''Ensure inherent methods' signature are same as base class.'''
assert (
inspect.signature(LSTMModel.__init__)
==
inspect.signature(RNNModel.__init__)
)
assert LSTMModel.forward == RNNModel.forward
assert (
inspect.signature(LSTMModel.load)
==
inspect.signature(RNNModel.load)
)
assert LSTMModel.loss_fn == RNNModel.loss_fn
assert LSTMModel.pred == RNNModel.pred
assert LSTMModel.ppl == RNNModel.ppl
assert LSTMModel.save == RNNModel.save
assert LSTMModel.train_parser == RNNModel.train_parser
| StarcoderdataPython |
3233228 | <gh_stars>0
# Copyright (c) 2011-2013 <NAME>. All rights reserved.
# Use of this source code is governed by a BSD License found in README.md.
from django.db import models
class Conference(models.Model):
session = models.PositiveSmallIntegerField(default=0)
start_date = models.DateField()
end_date = models.DateField()
reg_open = models.DateField()
early_reg_close = models.DateField()
reg_close = models.DateField()
min_attendance = models.PositiveSmallIntegerField(default=0)
max_attendance = models.PositiveSmallIntegerField(default=0)
@staticmethod
def auto_country_assign(school):
'''Automatically assign a school country and committee assignments
based on preference, and then by default order.'''
spots_left = school.max_delegation_size
if spots_left:
spots_left = Conference.auto_assign(Country.objects.filter(special=True).order_by('?'),
school.get_committee_preferences(),
school,
spots_left,
(school.max_delegation_size*.15)+1)
if spots_left:
spots_left = Conference.auto_assign(Country.objects.filter(special=False).order_by('?'),
school.get_committee_preferences(),
school,
spots_left,
(school.max_delegation_size*.20)+1)
if spots_left:
spots_left = Conference.auto_assign(school.get_country_preferences(),
Committee.objects.filter(special=False).order_by('?'),
school,
spots_left)
if spots_left:
spots_left = Conference.auto_assign(Country.objects.filter(special=False).order_by('?'),
Committee.objects.filter(special=False).order_by('?'),
school,
spots_left)
@staticmethod
def auto_assign(countries, committees, school, spots_left, max_spots=100):
'''Assign schools to unassigned Assignment objects based on a set of
available countries and committees.'''
for country in countries:
for committee in committees:
try:
assignment = Assignment.objects.get(committee=committee,
country=country)
if assignment.school is None:
assignment.school = school
spots_left -= committee.delegation_size
max_spots -= committee.delegation_size
assignment.save()
if spots_left < 3:
return 0
if max_spots < 0:
return spots_left
except Assignment.DoesNotExist:
pass
return spots_left
def __unicode__(self):
return 'BMUN %d' % self.session
class Meta:
db_table = u'conference'
get_latest_by = 'start_date'
class Country(models.Model):
name = models.CharField(max_length=128)
special = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Meta:
db_table = u'country'
class Committee(models.Model):
name = models.CharField(max_length=8)
full_name = models.CharField(max_length=128)
countries = models.ManyToManyField(Country, through='Assignment')
delegation_size = models.PositiveSmallIntegerField(default=2)
special = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Meta:
db_table = u'committee'
class School(models.Model):
TYPE_CLUB = 1
TYPE_CLASS = 2
PROGRAM_TYPE_OPTIONS = (
(TYPE_CLUB, 'Club'),
(TYPE_CLASS, 'Class'),
)
LOCATION_USA = 'location/usa'
LOCATION_INTERNATIONAL = 'location/international'
LOCATION_OPTIONS = (
(LOCATION_USA, 'United States of America'),
(LOCATION_INTERNATIONAL, 'International'),
)
registered = models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=128)
address = models.CharField(max_length=128)
city = models.CharField(max_length=128)
state = models.CharField(max_length=16)
zip_code = models.CharField(max_length=16)
country = models.CharField(max_length=64)
primary_name = models.CharField(max_length=128)
primary_email = models.EmailField()
primary_phone = models.CharField(max_length=32)
secondary_name = models.CharField(max_length=128, blank=True)
secondary_email = models.EmailField(blank=True)
secondary_phone = models.CharField(max_length=32, blank=True)
program_type = models.PositiveSmallIntegerField(choices=PROGRAM_TYPE_OPTIONS)
times_attended = models.PositiveSmallIntegerField(default=0)
min_delegation_size = models.PositiveSmallIntegerField(default=0)
max_delegation_size = models.PositiveSmallIntegerField(default=0)
international = models.BooleanField(default=False)
countrypreferences = models.ManyToManyField(Country, through='CountryPreference')
committeepreferences = models.ManyToManyField(Committee, limit_choices_to={'special':True})
registration_fee = models.DecimalField(max_digits=6, decimal_places=2, default=0)
registration_fee_paid = models.DecimalField(max_digits=6, decimal_places=2, default=0)
registration_fee_balance = models.DecimalField(max_digits=6, decimal_places=2, default=0)
delegation_fee = models.DecimalField(max_digits=6, decimal_places=2, default=0)
delegation_fee_paid = models.DecimalField(max_digits=6, decimal_places=2, default=0)
delegation_fee_balance = models.DecimalField(max_digits=6, decimal_places=2, default=0)
def update_country_preferences(self, country_ids, shuffled=False):
""" Refreshes a school's country preferences, given a list of
country IDs. If shuffled is True, then the country_ids are
ordered [1, 6, 2, 7, 3, 8, ...] due to double columning in
the template. """
if shuffled:
country_ids = CountryPreference.unshuffle(country_ids)
self.countrypreferences.clear()
seen = set()
for rank, country_id in enumerate(country_ids, start=1):
if country_id and country_id not in seen:
seen.add(country_id)
CountryPreference.objects.create(school=self,
country_id=country_id,
rank=rank)
def update_committee_preferences(self, committee_ids):
""" Refreshes a school's committee preferences. """
self.committeepreferences.clear()
self.committeepreferences = committee_ids
self.save()
def update_delegate_slots(self, slot_data):
""" Adds, deletes, or updates delegates attached to this school's
delegate slots. """
for slot_id, delegate_data in slot_data:
slot = DelegateSlot.objects.get(id=slot_id)
if 'name' in delegate_data and 'email' in delegate_data:
slot.update_or_create_delegate(delegate_data)
else:
slot.delete_delegate_if_exists()
def get_country_preferences(self):
""" Returns a list of this school's country preferences,
ordered by rank. Note that these are Country objects,
not CountryPreference objects."""
return list(self.countrypreferences.all()
.order_by('countrypreference__rank'))
def get_committee_preferences(self):
return self.committeepreferences.all()
def get_delegate_slots(self):
""" Returns a list of this school's delegate slots,
ordered by committee name. """
return list(DelegateSlot.objects.filter(assignment__school=self)
.order_by('assignment__committee__name'))
def __unicode__(self):
return self.name
class Meta:
db_table = u'school'
class Assignment(models.Model):
committee = models.ForeignKey(Committee)
country = models.ForeignKey(Country)
school = models.ForeignKey(School, null=True, blank=True, default=None)
def __unicode__(self):
return self.committee.name + " : " + self.country.name + " : " + (self.school.name if self.school else "Unassigned")
class Meta:
db_table = u'assignment'
class CountryPreference(models.Model):
school = models.ForeignKey(School)
country = models.ForeignKey(Country, limit_choices_to={'special':False})
rank = models.PositiveSmallIntegerField()
@staticmethod
def unshuffle(countries):
""" Given a list of 2-tuples, Returns a list of countries (or IDs) in correct,
unshuffled order. """
countries = [list(t) for t in zip(*countries)]
return filter(None, countries[0] + countries[1])
@staticmethod
def shuffle(countries):
""" Returns a list of countries (or IDs) in shuffled order
for double columning, i.e. [1, 6, 2, 7, 3, 8, ...]. Before
shuffling, the list is padded to length 10. """
countries += [None]*(10 - len(countries))
c1, c2 = countries[:5], countries[5:]
return zip(c1, c2)
def __unicode__(self):
return "%s : %s (%d)" % self.school.name, self.country.name, self.rank
class Meta:
db_table = u'country_preference'
ordering = ['rank']
class DelegateSlot(models.Model):
assignment = models.ForeignKey(Assignment)
attended_session1 = models.BooleanField(default=False)
attended_session2 = models.BooleanField(default=False)
attended_session3 = models.BooleanField(default=False)
attended_session4 = models.BooleanField(default=False)
def update_or_create_delegate(self, delegate_data):
""" Updates this slot's delegate object, or creates one if
the slot has no delegate. """
try:
delegate = self.delegate
for attr, value in delegate_data.items():
setattr(delegate, attr, value)
delegate.save()
except Delegate.DoesNotExist:
Delegate.objects.create(delegate_slot=self, **delegate_data)
def delete_delegate_if_exists(self):
""" Deletes this slot's delegate or fails silently. """
try:
self.delegate.delete()
except Delegate.DoesNotExist:
pass
def update_delegate_attendance(self, slot_data):
""" Updates this slot's attendance information. """
self.attended_session1 = slot_data['session1']
self.attended_session2 = slot_data['session2']
self.attended_session3 = slot_data['session3']
self.attended_session4 = slot_data['session4']
self.save()
@property
def country(self):
return self.assignment.country
@property
def committee(self):
return self.assignment.committee
@property
def school(self):
return self.assignment.school
def __unicode__(self):
return str(self.assignment)
class Meta:
db_table = u'delegate_slot'
class Delegate(models.Model):
name = models.CharField(max_length=64, blank=True)
email = models.EmailField(blank=True)
delegate_slot = models.OneToOneField(DelegateSlot, related_name='delegate', null=True, default=None)
created_at = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.name
@property
def country(self):
return self.delegate_slot.country
@property
def committee(self):
return self.delegate_slot.committee
@property
def school(self):
return self.delegate_slot.school
class Meta:
db_table = u'delegate'
class HelpCategory(models.Model):
name = models.CharField(max_length=128, unique=True)
def __unicode__(self):
return self.name
class Meta:
db_table = u'help_category'
class HelpQuestion(models.Model):
category = models.ForeignKey(HelpCategory)
question = models.CharField(max_length=255)
answer = models.TextField()
def __unicode__(self):
return self.question
class Meta:
db_table = u'help_question'
| StarcoderdataPython |
3240603 | from typing import List, Any
class BaseModule(object):
def __init__(self):
pass
def tick(self, lines: List[Any]):
pass
| StarcoderdataPython |
1780803 | <filename>examples/fre/compare_2_scheduling.py
'''
Interactively explore the difference between two schedules of the same trace.
Usage:
compare_2_scheduling.py <swf_file1> <swf_file2> [-h]
Options:
-h --help show this help message and exit.
'''
import matplotlib
matplotlib.use('Qt5Agg')
from matplotlib import pyplot as plt
from evalys.visu import legacy as vleg
from evalys import workload
from docopt import docopt
#retrieving arguments
arguments = docopt(__doc__, version='1.0.0rc2')
fig, axes = plt.subplots(nrows=5, ncols=1, sharex=True)
w = workload.Workload.from_csv(arguments["<swf_file1>"])
w0 = workload.Workload.from_csv(arguments["<swf_file2>"])
vleg.plot_series_comparison({arguments["<swf_file1>"]: w.utilisation.load,
arguments["<swf_file2>"]: w0.utilisation.load},
axes[0],
"Utilisation comparison")
vleg.plot_series_comparison({arguments["<swf_file1>"]: w.queue.load,
arguments["<swf_file2>"]: w0.queue.load},
axes[1],
"Queue comparison")
vleg.plot_job_details(w.df, 100, axes[2], arguments["<swf_file1>"])
vleg.plot_job_details(w0.df, 100, axes[3], arguments["<swf_file2>"])
vleg.plot_series("waiting_time", {arguments["<swf_file1>"]: w, arguments["<swf_file2>"]: w0}, axes[4])
plt.tight_layout(True)
plt.legend()
plt.show()
| StarcoderdataPython |
3201379 | <reponame>lzmaths/leetcode
class Solution(object):
def minTotalDistance(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
rows = []
cols = []
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
rows.append(i)
cols.append(j)
cols.sort()
#print(rows, cols)
ans = 0
i, j = 0, len(rows)-1
while i < j:
ans += rows[j] - rows[i] + cols[j] - cols[i]
i += 1
j -= 1
return ans | StarcoderdataPython |
1630539 | <gh_stars>0
# https://zhuanlan.zhihu.com/p/33593039
# Syntax of list comprehension(列表解析):
# result = [do_something_with(item) for item in item_list]
# Same way to get a generator:
# result = (do_something_with(item) for item in item_list)
# ============================================================
# Modifying a long for-loop expression
# ============================================================
# results = []
# for item in item_list:
# # setups
# # condition
# # processing
# # calculation
# results.append(result)
# -*- Can be replaced by: -*-
# def process_item(item):
# # setups
# # condition
# # processing
# # calculation
# return result
#
# results = [process_item(item) for item in item_list]
# ============================================================
# Modifying a nested for-loop expression
# ============================================================
# results = []
# for i in range(10):
# for j in range(i):
# results.append((i, j))
# -*- Can be replaced by: -*-
# results = [(i, j)
# for i in range(10)
# for j in range(i)]
| StarcoderdataPython |
3383540 | #coding: utf-8
# thanks to GNQG/lr2irproxy for the original source code of DPISocket
# github: https://github.com/GNQG/lr2irproxy
# still not sure the necessity and detailed contents of this class
import socket
from httplib import HTTPMessage, HTTPResponse
from StringIO import StringIO
from zlib import decompress
import GlobalTools
ORIGINAL_IP = '192.168.127.12'
class DPISocket():
def __init__(self, method, path, version='HTTP/1.1'):
self.method = method.upper()
self.path = path
self.version = version
self.msg = HTTPMessage(StringIO())
self.body = ''
def setHeader(self,key,val):
self.msg[key]=val
def setBody(self,body):
self.body = body
def setMsg(self,httpmsg):
self.msg = httpmsg
def __str__(self):
self.msg['Host'] = 'www.dream-pro.info'
if self.body:
self.msg['content-length'] = str(len(self.body))
else :
self.msg['content-length'] = '0'
s = ''
s += '%s %s %s\n' % (self.method, self.path, self.version)
s += str(self.msg)
s += '\n' # end of header
s += self.body
return s
def sendAndReceive(self):
try:
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.connect((ORIGINAL_IP,80))
sock.sendall(str(self))
except:
return GlobalTools.FailedHTTPResponse(),''
res = HTTPResponse(sock)
res.begin()
res_body = res.read()
res.close()
if 'transfer-encoding' in res.msg:
# httplib.HTTPResponse automatically concatenate chunked response
# but do not delete 'transfer-encoding' header
# so the header must be deleted
res.msg.__delitem__('transfer-encoding')
compmeth = res.msg.getheader('content-encoding','').lower()
if compmeth and compmeth.find('identity') != 0 :
# response body is compressed with some method
offset = 0
if compmeth.find('gzip') != -1:
# if body is gziped, header offset value is 47
# if not, offset value is 0
# this server does not support sdch...
offset += 47
res_body = decompress(res_body,offset)
res.msg['content-encoding'] = 'identity'
return res, res_body
| StarcoderdataPython |
19470 | import os
from argparse import ArgumentParser
from pathlib import Path
from general_utils import split_hparams_string, split_int_set_str
# from tacotron.app.eval_checkpoints import eval_checkpoints
from tacotron.app import (DEFAULT_MAX_DECODER_STEPS, continue_train, infer,
plot_embeddings, train, validate)
from tacotron.app.defaults import (DEFAULT_MCD_NO_OF_COEFFS_PER_FRAME,
DEFAULT_REPETITIONS,
DEFAULT_SAVE_MEL_INFO_COPY_PATH,
DEFAULT_SEED)
BASE_DIR_VAR = "base_dir"
def init_plot_emb_parser(parser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--custom_checkpoint', type=int)
return plot_embeddings
# def init_eval_checkpoints_parser(parser):
# parser.add_argument('--train_name', type=str, required=True)
# parser.add_argument('--custom_hparams', type=str)
# parser.add_argument('--select', type=int)
# parser.add_argument('--min_it', type=int)
# parser.add_argument('--max_it', type=int)
# return eval_checkpoints_main_cli
# def evaeckpoints_main_cli(**args):
# argsl_ch["custom_hparams"] = split_hparams_string(args["custom_hparams"])
# eval_checkpoints(**args)
# def init_restore_parser(parser: ArgumentParser) -> None:
# parser.add_argument('--train_name', type=str, required=True)
# parser.add_argument('--checkpoint_dir', type=Path, required=True)
# return restore_model
def init_train_parser(parser: ArgumentParser) -> None:
parser.add_argument('--ttsp_dir', type=Path, required=True)
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--merge_name', type=str, required=True)
parser.add_argument('--prep_name', type=str, required=True)
parser.add_argument('--warm_start_train_name', type=str)
parser.add_argument('--warm_start_checkpoint', type=int)
parser.add_argument('--custom_hparams', type=str)
parser.add_argument('--weights_train_name', type=str)
parser.add_argument('--weights_checkpoint', type=int)
parser.add_argument('--map_from_speaker', type=str)
parser.add_argument('--map_symbol_weights', action='store_true')
parser.add_argument('--use_weights_map', action='store_true')
return train_cli
def train_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
train(**args)
def init_continue_train_parser(parser: ArgumentParser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--custom_hparams', type=str)
return continue_train_cli
def continue_train_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
continue_train(**args)
def init_validate_parser(parser: ArgumentParser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--entry_ids', type=str, help="Utterance ids or nothing if random")
parser.add_argument('--speaker', type=str, help="ds_name,speaker_name")
parser.add_argument('--ds', type=str, help="Choose if validation- or testset should be taken.",
choices=["val", "test"], default="val")
parser.add_argument('--custom_checkpoints', type=str)
parser.add_argument('--full_run', action='store_true')
parser.add_argument('--max_decoder_steps', type=int, default=DEFAULT_MAX_DECODER_STEPS)
parser.add_argument('--copy_mel_info_to', type=str, default=DEFAULT_SAVE_MEL_INFO_COPY_PATH)
parser.add_argument('--custom_hparams', type=str)
parser.add_argument('--select_best_from', type=str)
parser.add_argument('--mcd_no_of_coeffs_per_frame', type=int,
default=DEFAULT_MCD_NO_OF_COEFFS_PER_FRAME)
parser.add_argument('--fast', action='store_true')
parser.add_argument('--repetitions', type=int, default=DEFAULT_REPETITIONS)
parser.add_argument('--seed', type=int, default=DEFAULT_SEED)
return validate_cli
def validate_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
args["entry_ids"] = split_int_set_str(args["entry_ids"])
args["custom_checkpoints"] = split_int_set_str(args["custom_checkpoints"])
validate(**args)
def init_inference_parser(parser: ArgumentParser) -> None:
parser.add_argument('--train_name', type=str, required=True)
parser.add_argument('--text_name', type=str, required=True)
parser.add_argument('--speaker', type=str, required=True, help="ds_name,speaker_name")
parser.add_argument('--utterance_ids', type=str)
parser.add_argument('--custom_checkpoint', type=int)
parser.add_argument('--custom_hparams', type=str)
parser.add_argument('--full_run', action='store_true')
parser.add_argument('--max_decoder_steps', type=int, default=DEFAULT_MAX_DECODER_STEPS)
parser.add_argument('--seed', type=int, default=DEFAULT_SEED)
parser.add_argument('--copy_mel_info_to', type=str, default=DEFAULT_SAVE_MEL_INFO_COPY_PATH)
return infer_cli
def infer_cli(**args) -> None:
args["custom_hparams"] = split_hparams_string(args["custom_hparams"])
args["utterance_ids"] = split_int_set_str(args["utterance_ids"])
infer(**args)
def add_base_dir(parser: ArgumentParser) -> None:
assert BASE_DIR_VAR in os.environ.keys()
base_dir = Path(os.environ[BASE_DIR_VAR])
parser.set_defaults(base_dir=base_dir)
def _add_parser_to(subparsers, name: str, init_method) -> None:
parser = subparsers.add_parser(name, help=f"{name} help")
invoke_method = init_method(parser)
parser.set_defaults(invoke_handler=invoke_method)
add_base_dir(parser)
return parser
def _init_parser():
result = ArgumentParser()
subparsers = result.add_subparsers(help='sub-command help')
_add_parser_to(subparsers, "train", init_train_parser)
_add_parser_to(subparsers, "continue-train", init_continue_train_parser)
_add_parser_to(subparsers, "validate", init_validate_parser)
_add_parser_to(subparsers, "infer", init_inference_parser)
# _add_parser_to(subparsers, "eval-checkpoints", init_taco_eval_checkpoints_parser)
_add_parser_to(subparsers, "plot-embeddings", init_plot_emb_parser)
#_add_parser_to(subparsers, "restore", init_restore_parser)
return result
def _process_args(args) -> None:
params = vars(args)
invoke_handler = params.pop("invoke_handler")
invoke_handler(**params)
if __name__ == "__main__":
main_parser = _init_parser()
received_args = main_parser.parse_args()
_process_args(received_args)
| StarcoderdataPython |
90434 | # Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php or see LICENSE file.
# Copyright 2007-2008 <NAME> <<EMAIL>>
""" Facilities for python properties generation.
"""
def gen_property_with_default(name, fget=None, fset=None, doc=""):
""" Generates a property of a name either with a default fget or a default
fset.
@param name: property name
@param fget: default fget
@param fset: default fset
@param doc: documentation for the property
@type name: string
@type fget: callable or None
@type fset: callable or None
@type doc: string
"""
if fget == None and fset == None:
raise NotImplementedError("fget or fset must be not null")
internal_name = '%s%s' % ("_prop_", name)
def getter(self):
if not internal_name in dir(self):
setattr(self, internal_name, "")
return getattr(self, internal_name)
def setter(self, value):
return setattr(self, internal_name, value)
if fget is None:
return property(getter, fset, doc=doc)
return property(fget, setter, doc=doc)
def gen_property_of_type(name, _type, doc=""):
""" Generates a type-forced property associated with a name. Provides type
checking on the setter (coherence between value to be set and the type
specified).
@param name: property name
@param _type: force type
@param doc: documentation for the property
@type name: string
@type _type: type
@type doc: string
"""
internal_name = '%s%s' % ("_prop_", name)
def getter(self):
return getattr(self, internal_name)
def setter(self, value):
if isinstance(value, _type):
return setattr(self, internal_name, value)
else:
raise TypeError(("invalid type '%s' for property %s:"
"%s is required.") %
(type(value).__name__, name, type(_type).__name__))
return property(getter, setter, doc=doc)
| StarcoderdataPython |
4838618 | <filename>src/agimus/path_execution/play_path.py
#!/usr/bin/env python
# Copyright 2018 CNRS Airbus SAS
# Author: <NAME>
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import rospy
import smach
import smach_ros
import std_srvs.srv
from agimus_hpp import ros_tools
from agimus_sot_msgs.srv import GetInt, PlugSot, ReadQueue, WaitForMinQueueSize
from std_msgs.msg import Empty, Int32, String, UInt32
from .initialize_path import InitializePath
from .wait_for_input import WaitForInput
from .error_state import ErrorState
from .move_base import MoveBase
class ErrorEvent(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
## State of \c smach finite-state machine
#
# See method \link agimus.path_execution.play_path.PlayPath.execute
# execute \endlink for details.
class PlayPath(smach.State):
hppTargetPubDict = {"publish": [Empty, 1]}
subscribersDict = {
"agimus": {
"sot": {
"event": {
"error": [Int32, "_handleEventError"],
"done": [Int32, "_handleEventDone"],
},
"interrupt": [String, "_handleInterrupt"],
}
},
"hpp": {"target": {"publish_done": [Empty, "_handlePublishDone"]}},
}
serviceProxiesDict = {
"agimus": {
"sot": {
"run_pre_action": [PlugSot],
"plug_sot": [PlugSot],
"run_post_action": [PlugSot],
"clear_queues": [std_srvs.srv.Trigger],
"wait_for_min_queue_size": [WaitForMinQueueSize],
"read_queue": [ReadQueue],
"stop_reading_queue": [std_srvs.srv.Empty],
}
},
"hpp": {
"target": {
"publish_first": [std_srvs.srv.Trigger],
"get_queue_size": [GetInt],
}
},
}
def __init__(self, status):
super(PlayPath, self).__init__(
outcomes=["succeeded", "aborted", "preempted"],
input_keys=["transitionId", "endStateId", "duration",
"currentSection", "queue_initialized"],
output_keys=["queue_initialized"],
)
self.status = status
self.targetPub = ros_tools.createPublishers(
"/hpp/target", self.hppTargetPubDict
)
self.subscribers = ros_tools.createSubscribers(self, "", self.subscribersDict)
self.serviceProxies = ros_tools.createServiceProxies(
"", PlayPath.serviceProxiesDict
)
self.path_published = False
self.event_done = None
self.event_error = None
self.event_done_min_time = 0
self.interruption = None
self.event_done_count = 0
def _handleEventError(self, msg):
self.event_error = msg.data
def _handleEventDone(self, msg):
if msg.data < self.event_done_min_time and msg.data != 0:
rospy.loginfo("Filtered out event_done " + str(msg.data))
return
self.event_done_count += 1
self.event_done = msg.data
rospy.loginfo(
"Received event_done "
+ str(msg.data)
+ " ("
+ str(self.event_done_count)
+ ")"
)
def _handleInterrupt(self, msg):
self.interruption = msg.data
rospy.loginfo(str(msg.data))
self.path_published = True
def _handlePublishDone(self, msg):
self.path_published = True
rospy.loginfo("Publishing path done.")
## Wait until new value is provided by topic "/agimus/sot/event/done"
#
# \li publish True in topic "/agimus/status/is_waiting_for_event_done",
# \li wait until integer value is provided by topic
# "/agimus/sot/event/done" (by the Stack of Tasks),
# \li publish False in topic "/agimus/status/is_waiting_for_event_done".
#
# \note if value provided by topic "/agimus/sot/event/done" is less than
# threshold self.event_done_min_time, or value is provided by topic
# "/agimus/sot/event/error, an exception is raised.
# if value is equal to zero, it is considered as a user request and
# it is accepted.
def _wait_for_event_done(self, rate, msg):
try:
rospy.loginfo("Wait for event on /agimus/sot/event/done after {} (current: {})"
.format(self.event_done_min_time, self.event_done))
self.status.set_wait_for_event_done(True)
while self.event_done is None or (
self.event_done < self.event_done_min_time and self.event_done != 0):
if self.event_error is not None:
exception = ErrorEvent("ErrorEvent during {}: {}".format(msg, self.event_error))
self.event_done = None
self.event_error = None
raise exception
if rospy.is_shutdown():
raise ErrorEvent("Requested rospy shutdown")
rate.sleep()
self.event_done = None
finally:
self.status.set_wait_for_event_done(False)
## Execute a sub-path
#
# There are three main steps in execution of a sub-path
# \li pre-action tasks
# \li execution of the path, publish in topic "/hpp/target/publish"
# \li post-action tasks, typically closing the gripper
#
# Between each step, the execution is paused until a message on topic
# `/agimus/sot/event/done`.
# \todo fix scheduling, the current code waits a bit before considering
# control_norm_changed and step_by_step messages.
#
# \todo error handling, like object not in gripper when closing it.
def execute(self, userdata):
rate = rospy.Rate(1000)
transition_identifier = "{} of {}".format(*userdata.transitionId)
try:
self.status.wait_if_step_by_step("Beginning execution.", 3)
first_published = False
if not userdata.queue_initialized:
rsp = self.serviceProxies["hpp"]["target"]["publish_first"]()
if not rsp.success:
raise ErrorEvent(
"Could not initialize the queues in SoT: " + rsp.message
)
rsp = self.serviceProxies["agimus"]["sot"]["wait_for_min_queue_size"](1, 1.)
if not rsp.success:
raise ErrorEvent(
"Did not receive the first message for initialization: " + rsp.message
)
self.serviceProxies["agimus"]["sot"]["clear_queues"]()
userdata.queue_initialized = True
first_published = True
rospy.loginfo("Queues initialized.")
# TODO Check that the current SOT and the future SOT are compatible ?
self.serviceProxies["agimus"]["sot"]["clear_queues"]()
status = self.serviceProxies["agimus"]["sot"]["run_pre_action"](
userdata.transitionId[0], userdata.transitionId[1]
)
if status.success:
self.status.set_description("Executing pre-action {}, subpath {}."
.format(transition_identifier, userdata.currentSection))
rospy.loginfo("Start pre-action")
if not first_published:
rsp = self.serviceProxies["hpp"]["target"]["publish_first"]()
if not rsp.success:
raise ErrorEvent(rsp.message)
rsp = self.serviceProxies["agimus"]["sot"]["read_queue"](
delay=1, minQueueSize=1, duration=0, timeout=1.
)
self.event_done_min_time = rsp.start_time
first_published = True
else:
rsp = self.serviceProxies["agimus"]["sot"]["read_queue"](
delay=1, minQueueSize=0, duration=0, timeout=1.
)
self.event_done_min_time = rsp.start_time
if not rsp.success:
raise ErrorEvent(
"Could not read queues for pre-action: " + rsp.message
)
self._wait_for_event_done(rate, "pre-actions")
self.status.wait_if_step_by_step("Pre-action ended.", 2)
rospy.loginfo("Publishing path")
self.path_published = False
self.serviceProxies["agimus"]["sot"]["clear_queues"]()
queueSize = self.serviceProxies["hpp"]["target"]["get_queue_size"]().data
self.targetPub["publish"].publish()
self.status.set_description("Executing action {}, subpath {}."
.format(transition_identifier, userdata.currentSection))
status = self.serviceProxies["agimus"]["sot"]["plug_sot"](
userdata.transitionId[0], userdata.transitionId[1]
)
if not status.success:
rospy.logerr(status.msg)
return "preempted"
# self.control_norm_ok = False
rospy.loginfo("Read queue (size {})".format(queueSize))
# SoT should wait to have a queue larger than 1. This ensures that read_queue won't run into
# an infinite loop for a very short path (i.e. one configuration only).
# SoT should not wait to have a queue larger than 100ms
# Delay is 1 if the queue is large enough or 10 if it is small.
# TODO Make maximum queue size and delay parameterizable.
dt = rospy.get_param ("/sot_controller/dt")
queueSize = min(queueSize, int(0.1 / dt))
delay = 1 if queueSize > 10 else 10
rsp = self.serviceProxies["agimus"]["sot"]["read_queue"](
delay=delay, minQueueSize=queueSize, duration=userdata.duration, timeout = 1.
)
if not rsp.success:
raise ErrorEvent(
"Could not read queues for action: " + rsp.message
)
self.event_done_min_time = rsp.start_time
if self.interruption is not None:
rospy.logerr(str(self.interruption))
self.interruption = None
return "aborted"
self._wait_for_event_done(rate, "main action")
while not self.path_published:
# rospy.logerr("Path publication is not over yet.")
rate.sleep()
# TODO stop publishing queues
self.status.wait_if_step_by_step("Action ended.", 2)
# Run post action if any
rospy.loginfo("Start post-action")
status = self.serviceProxies["agimus"]["sot"]["run_post_action"](
userdata.endStateId[0], userdata.endStateId[1]
)
if status.success:
self.status.set_description("Executing post-action {}, subpath {}."
.format(transition_identifier, userdata.currentSection))
self.event_done_min_time = rsp.start_time
self._wait_for_event_done(rate, "post-action")
self.status.wait_if_step_by_step("Post-action ended.", 2)
return "succeeded"
except ErrorEvent as e:
# TODO interrupt path publication.
rospy.logerr(str(e))
return "preempted"
def makeStateMachine():
from .status import Status
# Set default values of parameters
if not rospy.has_param("step_by_step"):
rospy.set_param("step_by_step", 0)
sm = smach.StateMachine(outcomes=["aborted",])
status = Status()
with sm:
from agimus_hpp.client import HppClient
hppclient = HppClient(context="corbaserver", connect=False)
smach.StateMachine.add(
"WaitForInput",
WaitForInput(status, hppclient),
transitions={
"start_path": "Init",
"failed_to_start": "WaitForInput",
"interrupted": "aborted"},
remapping={
"pathId": "pathId",
"times": "times",
"transitionIds": "transitionIds",
"endStateIds": "endStateIds",
"currentSection": "currentSection",
"queue_initialized": "queue_initialized",
},
)
smach.StateMachine.add(
"Init",
InitializePath(status, hppclient),
transitions={
"finished": "WaitForInput",
"move_base": "MoveBase",
"next": "Play",
},
remapping={
"pathId": "pathId",
"times": "times",
"transitionId": "transitionId",
"currentSection": "currentSection",
},
)
smach.StateMachine.add(
"MoveBase",
MoveBase(status),
transitions={
"succeeded": "Init",
"preempted": "Error",
},
remapping={
"currentSection": "currentSection",
"times": "times",
"pathId": "pathId",
},
)
smach.StateMachine.add(
"Play",
PlayPath(status),
transitions={
"succeeded": "Init",
"aborted": "WaitForInput",
"preempted": "Error",
},
remapping={
"transitionId": "transitionId",
"duration": "duration",
"currentSection": "currentSection",
"queue_initialized": "queue_initialized",
},
)
smach.StateMachine.add(
"Error",
ErrorState(status),
transitions={
"finished": "WaitForInput",
},
)
sm.set_initial_state(["WaitForInput"])
sis = smach_ros.IntrospectionServer("agimus", sm, "/AGIMUS")
return sm, sis
| StarcoderdataPython |
3345987 | # # -*- coding: utf-8 -*-
# # Import Python libs
# from __future__ import absolute_import
# # Import Salt Testing libs
# from tests.support.unit import skipIf, TestCase
# from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
# # Import salt libs
# import salt.modules.cyg as cyg
# cyg.__salt__ = {}
# @skipIf(NO_MOCK, NO_MOCK_REASON)
# class TestcygModule(TestCase):
# def test__get_cyg_dir(self):
# self.assertEqual(cyg._get_cyg_dir(), 'c:\\cygwin64')
# self.assertEqual(cyg._get_cyg_dir('x86_64'), 'c:\\cygwin64')
# self.assertEqual(cyg._get_cyg_dir('x86'), 'c:\\cygwin')
# def test_cyg_install(self):
# mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
# with patch.dict(cyg.__salt__,
# {'cmd.run_all': mock}):
# cyg._get_cyg_dir()
# mock.assert_called_once_with('cyg install dos2unix')
# mock = MagicMock(return_value=None)
# with patch.dict(cyg.__salt__,
# {'rvm.is_installed': MagicMock(return_value=True),
# 'rbenv.is_installed': MagicMock(return_value=False),
# 'rvm.do': mock}):
# cyg._get_cyg_dir('install dos2unix', ruby='1.9.3')
# mock.assert_called_once_with(
# '1.9.3', 'cyg install dos2unix'
# )
# mock = MagicMock(return_value=None)
# with patch.dict(cyg.__salt__,
# {'rvm.is_installed': MagicMock(return_value=False),
# 'rbenv.is_installed': MagicMock(return_value=True),
# 'rbenv.do': mock}):
# cyg._get_cyg_dir('install dos2unix')
# mock.assert_called_once_with(
# 'cyg install dos2unix'
# )
# def test_install_pre(self):
# mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
# with patch.dict(cyg.__salt__,
# {'rvm.is_installed': MagicMock(return_value=False),
# 'rbenv.is_installed': MagicMock(return_value=False),
# 'cmd.run_all': mock}):
# cyg.install('dos2unix', pre_releases=True)
# mock.assert_called_once_with(
# 'cyg install dos2unix --no-rdoc --no-ri --pre'
# )
# def test_list(self):
# output = '''
# actionmailer (2.3.14)
# actionpack (2.3.14)
# activerecord (2.3.14)
# activeresource (2.3.14)
# activesupport (3.0.5, 2.3.14)
# rake (0.9.2, 0.8.7)
# responds_to_parent (1.0.20091013)
# sass (3.1.15, 3.1.7)
# '''
# mock = MagicMock(return_value=output)
# with patch.object(cyg, '_cyg', new=mock):
# self.assertEqual(
# {'actionmailer': ['2.3.14'],
# 'actionpack': ['2.3.14'],
# 'activerecord': ['2.3.14'],
# 'activeresource': ['2.3.14'],
# 'activesupport': ['3.0.5', '2.3.14'],
# 'rake': ['0.9.2', '0.8.7'],
# 'responds_to_parent': ['1.0.20091013'],
# 'sass': ['3.1.15', '3.1.7']},
# cyg.list_())
# def test_sources_list(self):
# output = '''*** CURRENT SOURCES ***
# http://rubycygs.org/
# '''
# mock = MagicMock(return_value=output)
# with patch.object(cyg, '_cyg', new=mock):
# self.assertEqual(
# ['http://rubycygs.org/'], cyg.sources_list())
| StarcoderdataPython |
133590 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2014-2016 pocsuite developers (https://seebug.org)
See the file 'docs/COPYING' for copying permission
"""
import sys
from pocsuite_cli import pcsInit
from .lib.core.common import banner
from .lib.core.common import dataToStdout
from .lib.core.settings import PCS_OPTIONS
def main():
try:
pocFile, targetUrl = sys.argv[1: 3]
except ValueError:
excMsg = "usage: pcs-attack [pocfile] [url]\n"
excMsg += "pocsuite: error: too few arguments"
dataToStdout(excMsg)
sys.exit(1)
PCS_OPTIONS.update(
{
'url': targetUrl, 'pocFile': pocFile, 'headers': None, 'extra_params': None,
'host': None, 'Mode': 'attack', 'retry': None, 'delay': None, 'dork': None,
'vulKeyword': None,
}
)
pcsInit(PCS_OPTIONS)
if __name__ == "__main__":
main()
| StarcoderdataPython |
139639 | <filename>pyp.py
#!/usr/bin/env python3
import argparse
import ast
import importlib
import inspect
import itertools
import os
import sys
import textwrap
import traceback
from collections import defaultdict
from typing import Any, Dict, Iterator, List, Optional, Set, Tuple, cast
__all__ = ["pypprint"]
__version__ = "0.3.4"
def pypprint(*args, **kwargs): # type: ignore
"""Replacement for ``print`` that special-cases dicts and iterables.
- Dictionaries are printed one line per key-value pair, with key and value colon-separated.
- Iterables (excluding strings) are printed one line per item
- Everything else is delegated to ``print``
"""
from typing import Iterable
if len(args) != 1:
print(*args, **kwargs)
return
x = args[0]
if isinstance(x, dict):
for k, v in x.items():
print(f"{k}:", v, **kwargs)
elif isinstance(x, Iterable) and not isinstance(x, str):
for i in x:
print(i, **kwargs)
else:
print(x, **kwargs)
class NameFinder(ast.NodeVisitor):
"""Finds undefined names, top-level defined names and wildcard imports in the given AST.
A top-level defined name is any name that is stored to in the top-level scopes of ``trees``.
An undefined name is any name that is loaded before it is defined (in any scope).
Notes: a) we ignore deletes, b) used builtins will appear in undefined names, c) this logic
doesn't fully support comprehension / nonlocal / global / late-binding scopes.
"""
def __init__(self, *trees: ast.AST) -> None:
self._scopes: List[Set[str]] = [set()]
self._comprehension_scopes: List[int] = []
self.undefined: Set[str] = set()
self.wildcard_imports: List[str] = []
for tree in trees:
self.visit(tree)
assert len(self._scopes) == 1
@property
def top_level_defined(self) -> Set[str]:
return self._scopes[0]
def flexible_visit(self, value: Any) -> None:
if isinstance(value, list):
for item in value:
if isinstance(item, ast.AST):
self.visit(item)
elif isinstance(value, ast.AST):
self.visit(value)
def generic_visit(self, node: ast.AST) -> None:
def order(f_v: Tuple[str, Any]) -> int:
# This ordering fixes comprehensions, dict comps, loops, assignments
return {"generators": -3, "iter": -3, "key": -2, "value": -1}.get(f_v[0], 0)
# Adapted from ast.NodeVisitor.generic_visit, but re-orders traversal a little
for _, value in sorted(ast.iter_fields(node), key=order):
self.flexible_visit(value)
def visit_Name(self, node: ast.Name) -> None:
if isinstance(node.ctx, ast.Load):
if all(node.id not in d for d in self._scopes):
self.undefined.add(node.id)
elif isinstance(node.ctx, ast.Store):
self._scopes[-1].add(node.id)
# Ignore deletes, see docstring
self.generic_visit(node)
def visit_Global(self, node: ast.Global) -> None:
self._scopes[-1] |= self._scopes[0] & set(node.names)
def visit_Nonlocal(self, node: ast.Nonlocal) -> None:
if len(self._scopes) >= 2:
self._scopes[-1] |= self._scopes[-2] & set(node.names)
def visit_AugAssign(self, node: ast.AugAssign) -> None:
if isinstance(node.target, ast.Name):
# TODO: think about global, nonlocal
if node.target.id not in self._scopes[-1]:
self.undefined.add(node.target.id)
self.generic_visit(node)
def visit_NamedExpr(self, node: Any) -> None:
self.visit(node.value)
# PEP 572 has weird scoping rules
assert isinstance(node.target, ast.Name)
assert isinstance(node.target.ctx, ast.Store)
scope_index = len(self._scopes) - 1
comp_index = len(self._comprehension_scopes) - 1
while comp_index >= 0 and scope_index == self._comprehension_scopes[comp_index]:
scope_index -= 1
comp_index -= 1
self._scopes[scope_index].add(node.target.id)
def visit_alias(self, node: ast.alias) -> None:
if node.name != "*":
self._scopes[-1].add(node.asname if node.asname is not None else node.name)
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
if node.module is not None and "*" in (a.name for a in node.names):
self.wildcard_imports.append(node.module)
self.generic_visit(node)
def visit_ClassDef(self, node: ast.ClassDef) -> None:
self.flexible_visit(node.decorator_list)
self.flexible_visit(node.bases)
self.flexible_visit(node.keywords)
self._scopes.append(set())
self.flexible_visit(node.body)
self._scopes.pop()
# Classes are not okay with self-reference, so define ``name`` afterwards
self._scopes[-1].add(node.name)
def visit_function_helper(self, node: Any, name: Optional[str] = None) -> None:
# Functions are okay with recursion, but not self-reference while defining default values
self.flexible_visit(node.args)
if name is not None:
self._scopes[-1].add(name)
self._scopes.append(set())
for arg_node in ast.iter_child_nodes(node.args):
if isinstance(arg_node, ast.arg):
self._scopes[-1].add(arg_node.arg)
self.flexible_visit(node.body)
self._scopes.pop()
def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
self.flexible_visit(node.decorator_list)
self.visit_function_helper(node, node.name)
def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
self.flexible_visit(node.decorator_list)
self.visit_function_helper(node, node.name)
def visit_Lambda(self, node: ast.Lambda) -> None:
self.visit_function_helper(node)
def visit_ExceptHandler(self, node: ast.ExceptHandler) -> None:
# ExceptHandler's name is scoped to the handler. If name exists and the name is not already
# defined, we'll define then undefine it to mimic the scope.
if not node.name or node.name in self._scopes[-1]:
self.generic_visit(node)
return
self.flexible_visit(node.type)
assert node.name is not None
self._scopes[-1].add(node.name)
self.flexible_visit(node.body)
self._scopes[-1].remove(node.name)
def visit_comprehension_helper(self, node: Any) -> None:
self._comprehension_scopes.append(len(self._scopes))
self._scopes.append(set())
self.generic_visit(node)
self._scopes.pop()
self._comprehension_scopes.pop()
visit_ListComp = visit_comprehension_helper
visit_SetComp = visit_comprehension_helper
visit_GeneratorExp = visit_comprehension_helper
visit_DictComp = visit_comprehension_helper
def dfs_walk(node: ast.AST) -> Iterator[ast.AST]:
"""Helper to iterate over an AST depth-first."""
stack = [node]
while stack:
node = stack.pop()
stack.extend(reversed(list(ast.iter_child_nodes(node))))
yield node
MAGIC_VARS = {
"index": {"i", "idx", "index"},
"loop": {"line", "x", "l"},
"input": {"lines", "stdin"},
}
def is_magic_var(name: str) -> bool:
return any(name in vars for vars in MAGIC_VARS.values())
class PypError(Exception):
pass
def get_config_contents() -> str:
"""Returns the empty string if no config file is specified."""
config_file = os.environ.get("PYP_CONFIG_PATH")
if config_file is None:
return ""
try:
with open(config_file, "r") as f:
return f.read()
except FileNotFoundError as e:
raise PypError(f"Config file not found at PYP_CONFIG_PATH={config_file}") from e
class PypConfig:
"""PypConfig is responsible for handling user configuration.
We allow users to configure pyp with a config file that is very Python-like. Rather than
executing the config file as Python unconditionally, we treat it as a source of definitions. We
keep track of what each top-level stmt in the AST of the config file defines, and if we need
that definition in our program, use it. A wrinkle here is that definitions in the config file
may depend on other definitions within the config file; this is handled by build_missing_config.
Another wrinkle is wildcard imports; these are kept track of and added to the list of special
cased wildcard imports in build_missing_imports.
"""
def __init__(self) -> None:
config_contents = get_config_contents()
try:
config_ast = ast.parse(config_contents)
except SyntaxError as e:
error = f": {e.text!r}" if e.text else ""
raise PypError(f"Config has invalid syntax{error}") from e
# List of config parts
self.parts: List[ast.stmt] = config_ast.body
# Maps from a name to index of config part that defines it
self.name_to_def: Dict[str, int] = {}
self.def_to_names: Dict[int, List[str]] = defaultdict(list)
# Maps from index of config part to undefined names it needs
self.requires: Dict[int, Set[str]] = defaultdict(set)
# Modules from which automatic imports work without qualification, ordered by AST encounter
self.wildcard_imports: List[str] = []
self.shebang: str = "#!/usr/bin/env python3"
if config_contents.startswith("#!"):
self.shebang = "\n".join(
itertools.takewhile(lambda l: l.startswith("#"), config_contents.splitlines())
)
top_level: Tuple[Any, ...] = (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)
top_level += (ast.Import, ast.ImportFrom, ast.Assign, ast.AnnAssign, ast.If, ast.Try)
for index, part in enumerate(self.parts):
if not isinstance(part, top_level):
node_type = type(
part.value if isinstance(part, ast.Expr) else part
).__name__.lower()
raise PypError(
"Config only supports a subset of Python at top level; "
f"unsupported construct ({node_type}) on line {part.lineno}"
)
f = NameFinder(part)
for name in f.top_level_defined:
if self.name_to_def.get(name, index) != index:
raise PypError(f"Config has multiple definitions of {repr(name)}")
if is_magic_var(name):
raise PypError(f"Config cannot redefine built-in magic variable {repr(name)}")
self.name_to_def[name] = index
self.def_to_names[index].append(name)
self.requires[index] = f.undefined
self.wildcard_imports.extend(f.wildcard_imports)
class PypTransform:
"""PypTransform is responsible for transforming all input code.
A lot of pyp's magic comes from it making decisions based on defined and undefined names in the
input. This class helps keep track of that state as things change based on transformations. In
general, the logic in here is very sensitive to reordering; there are various implicit
assumptions about what transformations have happened and what names have been defined. But
the code is pretty small and the tests are good, so you should be okay!
"""
def __init__(
self,
before: List[str],
code: List[str],
after: List[str],
define_pypprint: bool,
config: PypConfig,
) -> None:
def parse_input(code: List[str]) -> ast.Module:
try:
return ast.parse(textwrap.dedent("\n".join(code).strip()))
except SyntaxError as e:
message = traceback.format_exception_only(type(e), e)
message[0] = "Invalid input\n\n"
raise PypError("".join(message).strip()) from e
self.before_tree = parse_input(before)
self.tree = parse_input(code)
self.after_tree = parse_input(after)
f = NameFinder(self.before_tree, self.tree, self.after_tree)
self.defined: Set[str] = f.top_level_defined
self.undefined: Set[str] = f.undefined
self.wildcard_imports: List[str] = f.wildcard_imports
# We'll always use sys in ``build_input``, so add it to undefined.
# This lets config define it or lets us automatically import it later
# (If before defines it, we'll just let it override the import...)
self.undefined.add("sys")
self.define_pypprint = define_pypprint
self.config = config
# The print statement ``build_output`` will add, if it determines it needs to.
self.implicit_print: Optional[ast.Call] = None
def build_missing_config(self) -> None:
"""Modifies the AST to define undefined names defined in config."""
config_definitions: Set[str] = set()
attempt_to_define = set(self.undefined)
while attempt_to_define:
can_define = attempt_to_define & set(self.config.name_to_def)
# The things we can define might in turn require some definitions, so update the things
# we need to attempt to define and loop
attempt_to_define = set()
for name in can_define:
config_definitions.update(self.config.def_to_names[self.config.name_to_def[name]])
attempt_to_define.update(self.config.requires[self.config.name_to_def[name]])
# We don't need to attempt to define things we've already decided we need to define
attempt_to_define -= config_definitions
config_indices = {self.config.name_to_def[name] for name in config_definitions}
# Run basically the same thing in reverse to see which dependencies stem from magic vars
before_config_indices = set(config_indices)
derived_magic_indices = {
i for i in config_indices if any(map(is_magic_var, self.config.requires[i]))
}
derived_magic_names = set()
while derived_magic_indices:
before_config_indices -= derived_magic_indices
derived_magic_names |= {
name for i in derived_magic_indices for name in self.config.def_to_names[i]
}
derived_magic_indices = {
i for i in before_config_indices if self.config.requires[i] & derived_magic_names
}
magic_config_indices = config_indices - before_config_indices
before_config_defs = [self.config.parts[i] for i in sorted(before_config_indices)]
magic_config_defs = [self.config.parts[i] for i in sorted(magic_config_indices)]
self.before_tree.body = before_config_defs + self.before_tree.body
self.tree.body = magic_config_defs + self.tree.body
for i in config_indices:
self.undefined.update(self.config.requires[i])
self.defined |= config_definitions
self.undefined -= config_definitions
def define(self, name: str) -> None:
"""Defines a name."""
self.defined.add(name)
self.undefined.discard(name)
def get_valid_name_in_top_scope(self, name: str) -> str:
"""Return a name related to ``name`` that does not conflict with existing definitions."""
while name in self.defined or name in self.undefined:
name += "_"
return name
def build_output(self) -> None:
"""Ensures that the AST prints something.
This is done by either a) checking whether we load a thing that prints, or b) if the last
thing in the tree is an expression, modifying the tree to print it.
"""
if self.undefined & {"print", "pprint", "pp", "pypprint"}: # has an explicit print
return
def inner(body: List[ast.stmt], use_pypprint: bool = False) -> bool:
if not body:
return False
if isinstance(body[-1], ast.Pass):
del body[-1]
return True
if not isinstance(body[-1], ast.Expr):
if (
# If the last thing in the tree is a statement that has a body
hasattr(body[-1], "body")
# and doesn't have an orelse, since users could expect the print in that branch
and not getattr(body[-1], "orelse", [])
# and doesn't enter a new scope
and not isinstance(
body[-1], (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)
)
):
# ...then recursively look for a standalone expression
return inner(body[-1].body, use_pypprint) # type: ignore
return False
if isinstance(body[-1].value, ast.Name):
output = body[-1].value.id
body.pop()
else:
output = self.get_valid_name_in_top_scope("output")
self.define(output)
body[-1] = ast.Assign(
targets=[ast.Name(id=output, ctx=ast.Store())], value=body[-1].value
)
print_fn = "print"
if use_pypprint:
print_fn = "pypprint"
self.undefined.add("pypprint")
if_print = ast.parse(f"if {output} is not None: {print_fn}({output})").body[0]
body.append(if_print)
self.implicit_print = if_print.body[0].value # type: ignore
return True
# First attempt to add a print to self.after_tree, then to self.tree
# We use pypprint in self.after_tree and print in self.tree, although the latter is
# subject to change later on if we call ``use_pypprint_for_implicit_print``. This logic
# could be a little simpler if we refactored so that we know what transformations we will
# do before we do them.
success = inner(self.after_tree.body, True) or inner(self.tree.body)
if not success:
raise PypError(
"Code doesn't generate any output; either explicitly print something, end with "
"an expression that pyp can print, or explicitly end with `pass`."
)
def use_pypprint_for_implicit_print(self) -> None:
"""If we implicitly print, use pypprint instead of print."""
if self.implicit_print is not None:
self.implicit_print.func.id = "pypprint" # type: ignore
# Make sure we import it later
self.undefined.add("pypprint")
def build_input(self) -> None:
"""Modifies the AST to use input from stdin.
How we do this depends on which magic variables are used.
"""
possible_vars = {typ: names & self.undefined for typ, names in MAGIC_VARS.items()}
if (possible_vars["loop"] or possible_vars["index"]) and possible_vars["input"]:
loop_names = ", ".join(possible_vars["loop"] or possible_vars["index"])
input_names = ", ".join(possible_vars["input"])
raise PypError(
f"Candidates found for both loop variable ({loop_names}) and "
f"input variable ({input_names})"
)
for typ, names in possible_vars.items():
if len(names) > 1:
names_str = ", ".join(names)
raise PypError(f"Multiple candidates for {typ} variable: {names_str}")
if possible_vars["loop"] or possible_vars["index"]:
# We'll loop over stdin and define loop / index variables
idx_var = possible_vars["index"].pop() if possible_vars["index"] else None
loop_var = possible_vars["loop"].pop() if possible_vars["loop"] else None
if loop_var:
self.define(loop_var)
if idx_var:
self.define(idx_var)
if loop_var is None:
loop_var = "_"
if idx_var:
for_loop = f"for {idx_var}, {loop_var} in enumerate(sys.stdin): "
else:
for_loop = f"for {loop_var} in sys.stdin: "
for_loop += f"{loop_var} = {loop_var}.rstrip('\\n')"
loop: ast.For = ast.parse(for_loop).body[0] # type: ignore
loop.body.extend(self.tree.body)
self.tree.body = [loop]
elif possible_vars["input"]:
# We'll read from stdin and define the necessary input variable
input_var = possible_vars["input"].pop()
self.define(input_var)
if input_var == "stdin":
input_assign = ast.parse(f"{input_var} = sys.stdin")
else:
input_assign = ast.parse(f"{input_var} = [x.rstrip('\\n') for x in sys.stdin]")
self.tree.body = input_assign.body + self.tree.body
self.use_pypprint_for_implicit_print()
else:
no_pipe_assertion = ast.parse(
"assert sys.stdin.isatty() or not sys.stdin.read(), "
'''"The command doesn't process input, but input is present"'''
)
self.tree.body = no_pipe_assertion.body + self.tree.body
self.use_pypprint_for_implicit_print()
def build_missing_imports(self) -> None:
"""Modifies the AST to import undefined names."""
self.undefined -= set(dir(__import__("builtins")))
# Optimisation: we will almost always define sys and pypprint. However, in order for us to
# get to `import sys`, we'll need to examine our wildcard imports, which in the presence
# of config, could be slow.
if "pypprint" in self.undefined:
pypprint_def = (
inspect.getsource(pypprint) if self.define_pypprint else "from pyp import pypprint"
)
self.before_tree.body = ast.parse(pypprint_def).body + self.before_tree.body
self.undefined.remove("pypprint")
if "sys" in self.undefined:
self.before_tree.body = ast.parse("import sys").body + self.before_tree.body
self.undefined.remove("sys")
# Now short circuit if we can
if not self.undefined:
return
def get_names_in_module(module: str) -> Any:
try:
mod = importlib.import_module(module)
except ImportError as e:
raise PypError(
f"Config contains wildcard import from {module}, but {module} failed to import"
) from e
return getattr(mod, "__all__", (n for n in dir(mod) if not n.startswith("_")))
subimports = {"Path": "pathlib", "pp": "pprint"}
wildcard_imports = (
["itertools", "math", "collections"]
+ self.config.wildcard_imports
+ self.wildcard_imports
)
subimports.update(
{name: module for module in wildcard_imports for name in get_names_in_module(module)}
)
def get_import_for_name(name: str) -> str:
if name in subimports:
return f"from {subimports[name]} import {name}"
return f"import {name}"
self.before_tree.body = [
ast.parse(stmt).body[0] for stmt in sorted(map(get_import_for_name, self.undefined))
] + self.before_tree.body
def build(self) -> ast.Module:
"""Returns a transformed AST."""
self.build_missing_config()
self.build_output()
self.build_input()
self.build_missing_imports()
ret = ast.parse("")
ret.body = self.before_tree.body + self.tree.body + self.after_tree.body
# Add fake line numbers to the nodes, so we can generate a traceback on error
i = 0
for node in dfs_walk(ret):
if isinstance(node, ast.stmt):
i += 1
node.lineno = i
return ast.fix_missing_locations(ret)
def unparse(tree: ast.AST, short_fallback: bool = False) -> str:
"""Returns Python code equivalent to executing ``tree``."""
if sys.version_info >= (3, 9):
return ast.unparse(tree)
try:
import astunparse # type: ignore
return cast(str, astunparse.unparse(tree))
except ImportError:
pass
if short_fallback:
return f"# {ast.dump(tree)} # --explain has instructions to make this readable"
return f"""
from ast import *
tree = fix_missing_locations({ast.dump(tree)})
# To see this in human readable form, run pyp with Python 3.9
# Alternatively, install a third party ast unparser: `python3 -m pip install astunparse`
# Once you've done that, simply re-run.
# In the meantime, this script is fully functional, if not easily readable or modifiable...
exec(compile(tree, filename="<ast>", mode="exec"), {{}})
"""
def run_pyp(args: argparse.Namespace) -> None:
config = PypConfig()
tree = PypTransform(args.before, args.code, args.after, args.define_pypprint, config).build()
if args.explain:
print(config.shebang)
print(unparse(tree))
return
try:
exec(compile(tree, filename="<pyp>", mode="exec"), {})
except Exception as e:
try:
line_to_node: Dict[int, ast.AST] = {}
for node in dfs_walk(tree):
line_to_node.setdefault(getattr(node, "lineno", -1), node)
def code_for_line(lineno: int) -> str:
node = line_to_node[lineno]
# Don't unparse nested child statements. Note this destroys the tree.
for _, value in ast.iter_fields(node):
if isinstance(value, list) and value and isinstance(value[0], ast.stmt):
value.clear()
return unparse(node, short_fallback=True).strip()
# Time to commit several sins against CPython implementation details
tb_except = traceback.TracebackException(
type(e), e, e.__traceback__.tb_next # type: ignore
)
for fs in tb_except.stack:
if fs.filename == "<pyp>":
fs._line = code_for_line(fs.lineno) # type: ignore[attr-defined]
fs.lineno = "PYP_REDACTED" # type: ignore[assignment]
tb_format = tb_except.format()
assert "Traceback (most recent call last)" in next(tb_format)
message = "Possible reconstructed traceback (most recent call last):\n"
message += "".join(tb_format).strip("\n")
message = message.replace(", line PYP_REDACTED", "")
except Exception:
message = "".join(traceback.format_exception_only(type(e), e)).strip()
if isinstance(e, ModuleNotFoundError):
message += (
"\n\nNote pyp treats undefined names as modules to automatically import. "
"Perhaps you forgot to define something or PYP_CONFIG_PATH is set incorrectly?"
)
if args.before and isinstance(e, NameError):
var = str(e)
var = var[var.find("'") + 1 : var.rfind("'")]
if var in ("lines", "stdin"):
message += (
"\n\nNote code in `--before` runs before any magic variables are defined "
"and should not process input. Your command should work by simply removing "
"`--before`, so instead passing in multiple statements in the main section "
"of your code."
)
raise PypError(
"Code raised the following exception, consider using --explain to investigate:\n\n"
f"{message}"
) from e
def parse_options(args: List[str]) -> argparse.Namespace:
parser = argparse.ArgumentParser(
prog="pyp",
formatter_class=argparse.RawDescriptionHelpFormatter,
description=(
"Easily run Python at the shell!\n\n"
"For help and examples, see https://github.com/hauntsaninja/pyp\n\n"
"Cheatsheet:\n"
"- Use `x`, `l` or `line` for a line in the input. Use `i`, `idx` or `index` "
"for the index\n"
"- Use `lines` to get a list of rstripped lines\n"
"- Use `stdin` to get sys.stdin\n"
"- Use print explicitly if you don't like when or how or what pyp's printing\n"
"- If the magic is ever too mysterious, use --explain"
),
)
parser.add_argument("code", nargs="+", help="Python you want to run")
parser.add_argument(
"--explain",
"--script",
action="store_true",
help="Prints the Python that would get run, instead of running it",
)
parser.add_argument(
"-b",
"--before",
action="append",
default=[],
metavar="CODE",
help="Python to run before processing input",
)
parser.add_argument(
"-a",
"--after",
action="append",
default=[],
metavar="CODE",
help="Python to run after processing input",
)
parser.add_argument(
"--define-pypprint",
action="store_true",
help="Defines pypprint, if used, instead of importing it from pyp.",
)
parser.add_argument("--version", action="version", version=f"pyp {__version__}")
return parser.parse_args(args)
def main() -> None:
try:
run_pyp(parse_options(sys.argv[1:]))
except PypError as e:
print(f"error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()
| StarcoderdataPython |
128665 | # -------------------------------------------------------------------------
# Copyright (c) PTC Inc. and/or all its affiliates. All rights reserved.
# See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# User Management Example - Simple example on how to manage a connection and
# execute various calls for configuring the user and user group properties of the Kepware
# Configuration API
from kepconfig import connection, error
from kepconfig.admin import user_groups, users
# User Groups
group1 = {
'common.ALLTYPES_NAME': 'Operators',
"libadminsettings.USERMANAGER_GROUP_ENABLED": True,
"libadminsettings.USERMANAGER_IO_TAG_READ": True,
"libadminsettings.USERMANAGER_IO_TAG_WRITE": True,
"libadminsettings.USERMANAGER_IO_TAG_DYNAMIC_ADDRESSING": True,
"libadminsettings.USERMANAGER_SYSTEM_TAG_READ": True,
"libadminsettings.USERMANAGER_SYSTEM_TAG_WRITE": True,
"libadminsettings.USERMANAGER_INTERNAL_TAG_READ": True,
"libadminsettings.USERMANAGER_INTERNAL_TAG_WRITE": True,
"libadminsettings.USERMANAGER_SERVER_MANAGE_LICENSES": True,
"libadminsettings.USERMANAGER_SERVER_RESET_OPC_DIAGS_LOG": True,
"libadminsettings.USERMANAGER_SERVER_RESET_COMM_DIAGS_LOG": True,
"libadminsettings.USERMANAGER_SERVER_MODIFY_SERVER_SETTINGS": True,
"libadminsettings.USERMANAGER_SERVER_DISCONNECT_CLIENTS": True,
"libadminsettings.USERMANAGER_SERVER_RESET_EVENT_LOG": True,
"libadminsettings.USERMANAGER_SERVER_OPCUA_DOTNET_CONFIGURATION": True,
"libadminsettings.USERMANAGER_SERVER_CONFIG_API_LOG_ACCESS": True,
"libadminsettings.USERMANAGER_SERVER_REPLACE_RUNTIME_PROJECT": True,
"libadminsettings.USERMANAGER_BROWSE_BROWSENAMESPACE": True
}
group2 = {
'common.ALLTYPES_NAME': 'UA Users',
"libadminsettings.USERMANAGER_GROUP_ENABLED": True,
"libadminsettings.USERMANAGER_IO_TAG_READ": True,
"libadminsettings.USERMANAGER_IO_TAG_WRITE": True,
"libadminsettings.USERMANAGER_IO_TAG_DYNAMIC_ADDRESSING": True,
"libadminsettings.USERMANAGER_SYSTEM_TAG_READ": True,
"libadminsettings.USERMANAGER_SYSTEM_TAG_WRITE": True,
"libadminsettings.USERMANAGER_INTERNAL_TAG_READ": True,
"libadminsettings.USERMANAGER_INTERNAL_TAG_WRITE": True,
"libadminsettings.USERMANAGER_SERVER_MANAGE_LICENSES": False,
"libadminsettings.USERMANAGER_SERVER_RESET_OPC_DIAGS_LOG": False,
"libadminsettings.USERMANAGER_SERVER_RESET_COMM_DIAGS_LOG": False,
"libadminsettings.USERMANAGER_SERVER_MODIFY_SERVER_SETTINGS": True,
"libadminsettings.USERMANAGER_SERVER_DISCONNECT_CLIENTS": False,
"libadminsettings.USERMANAGER_SERVER_RESET_EVENT_LOG": False,
"libadminsettings.USERMANAGER_SERVER_OPCUA_DOTNET_CONFIGURATION": False,
"libadminsettings.USERMANAGER_SERVER_CONFIG_API_LOG_ACCESS": False,
"libadminsettings.USERMANAGER_SERVER_REPLACE_RUNTIME_PROJECT": False,
"libadminsettings.USERMANAGER_BROWSE_BROWSENAMESPACE": True
}
# Users
user1 = {
"common.ALLTYPES_NAME": "Client1",
"libadminsettings.USERMANAGER_USER_GROUPNAME": "Operators",
"libadminsettings.USERMANAGER_USER_ENABLED": True,
"libadminsettings.USERMANAGER_USER_PASSWORD": "<PASSWORD>"
}
user2 = {
"common.ALLTYPES_NAME": "Client2",
"libadminsettings.USERMANAGER_USER_GROUPNAME": "UA Users",
"libadminsettings.USERMANAGER_USER_ENABLED": True,
"libadminsettings.USERMANAGER_USER_PASSWORD": "<PASSWORD>"
}
def ErrorHandler(err):
# Generic Handler for exception errors
if err.__class__ is error.KepError:
print(err.msg)
elif err.__class__ is error.KepHTTPError:
print(err.code)
print(err.msg)
print(err.url)
print(err.hdrs)
print(err.payload)
elif err.__class__ is error.KepURLError:
print(err.url)
print(err.reason)
else:
print('Different Exception Received: {}'.format(err))
# This creates a server reference that is used to target all modifications of
# the Kepware configuration
server = connection.server(host = '127.0.0.1', port = 57412, user = 'Administrator', pw = '')
# ---------------------------------------------
# User Group Methods
# ---------------------------------------------
# Add the User Groups with the appropriate parameters
try:
print("{} - {}".format("Add new User Groups", user_groups.add_user_group(server, [group1, group2])))
except Exception as err:
ErrorHandler(err)
# Modify permissions on a User Group
# Ex: Prevent Write access for user group
modify_group = {
"libadminsettings.USERMANAGER_IO_TAG_WRITE": False,
"libadminsettings.USERMANAGER_SYSTEM_TAG_WRITE": False,
"libadminsettings.USERMANAGER_INTERNAL_TAG_WRITE": False
}
try:
print("{} - {}".format("Modify User Group properties to prevent 'Writes'",user_groups.modify_user_group(server, modify_group, group1['common.ALLTYPES_NAME'])))
except Exception as err:
ErrorHandler(err)
# Disable and Enable a user groups
try:
print("{} - {}".format("Disable User Group",user_groups.disable_user_group(server, group1['common.ALLTYPES_NAME'])))
except Exception as err:
ErrorHandler(err)
try:
print("{} - {}".format("Enable User Group",user_groups.enable_user_group(server, group1['common.ALLTYPES_NAME'])))
except Exception as err:
ErrorHandler(err)
# ---------------------------------------------
# User Methods
# ---------------------------------------------
# Add new users with the appropriate parameters
try:
print("{} - {}".format("Add new Users", users.add_user(server, [user1, user2])))
except Exception as err:
ErrorHandler(err)
# Modify new user parameters - New Password
modify_pass = {
"libadminsettings.USERMANAGER_USER_PASSWORD": "<PASSWORD>"
}
try:
print("{} - {}".format("Updated a user password", users.modify_user(server,modify_pass, user1['common.ALLTYPES_NAME'])))
except Exception as err:
ErrorHandler(err)
# Disable and Enable a user
try:
print("{} - {}".format("Disable a user", users.disable_user(server, user1['common.ALLTYPES_NAME'])))
except Exception as err:
ErrorHandler(err)
try:
print("{} - {}".format("Enable a user", users.enable_user(server, user1['common.ALLTYPES_NAME'])))
except Exception as err:
ErrorHandler(err) | StarcoderdataPython |
65192 | <gh_stars>1-10
def verify(f_d, sol, num_frags):
count = 0
for i in range(1, len(sol)):
l = sol[:i]
r = sol[i:]
if not l in f_d and not r in f_d:
continue
if l in f_d and r in f_d:
if f_d[l] != f_d[r]:
return False
count += (2*f_d[l] if l != r else f_d[l])
else:
return False
return True if count == num_frags else False
from sys import stdin, stdout
cases = int(input())
for c in range(cases):
if c == 0:
stdin.readline()
fragments = []
f_d = {}
line = input().strip()
total_len = 0
while line != "":
fragments.append(line)
total_len += len(line)
if line in f_d:
f_d[line] += 1
else:
f_d[line] = 1
line = stdin.readline().strip()
if len(fragments) == 2:
stdout.write("".join(fragments[0] + fragments[1])+"\n")
else:
file_len = total_len*2 // len(fragments)
i = 0
checked = set()
for j in range(1, len(fragments)):
if len(fragments[i]) + len(fragments[j]) == file_len:
if fragments[j] not in checked:
if verify(f_d, fragments[i] + fragments[j], len(fragments)):
stdout.write("".join(fragments[i] + fragments[j])+"\n")
break
if verify(f_d, fragments[j] + fragments[i], len(fragments)):
stdout.write("".join(fragments[j] + fragments[i])+"\n")
break
checked.add(fragments[j])
if c < cases - 1:
stdout.write("\n")
| StarcoderdataPython |
104765 | <reponame>kampelmuehler/synthesizing_human_like_sketches
import torchvision
import torch.nn as nn
import torch.nn.functional as F
from collections import namedtuple
class PSim_Alexnet(nn.Module):
def __init__(self, num_classes=125, train=True, with_classifier=False):
super(PSim_Alexnet, self).__init__()
self.train_mode = train
self.with_classifier = with_classifier
alexnet_model = torchvision.models.alexnet(pretrained=True)
feature_layers = list(alexnet_model.features.children())
self.Conv1 = nn.Conv2d(1, 64, kernel_size=11, stride=4, padding=2)
# load weight from first channel of pretrained RGB model - narrow(dim, start, length)
self.Conv1.weight.data.copy_(feature_layers[0].weight.data.narrow(1, 0, 1))
self.Conv1.bias.data.copy_(feature_layers[0].bias.data)
self.Conv2 = feature_layers[3]
self.Conv3 = feature_layers[6]
self.Conv4 = feature_layers[8]
self.Conv5 = feature_layers[10]
# take pretrained from Alexnet, replace last layer
if train is True:
linear = nn.Linear(4096, num_classes)
# init with first num_classes weights from pretrained model
linear.weight.data.copy_(alexnet_model.classifier.state_dict()['6.weight'].narrow(0, 0, num_classes))
linear.bias.data.copy_(alexnet_model.classifier.state_dict()['6.bias'].narrow(0, 0, num_classes))
alexnet_classifier = list(alexnet_model.classifier.children())
self.classifier = nn.Sequential(nn.Dropout(p=0.5),
*alexnet_classifier[1:3],
nn.Dropout(p=0.5),
*alexnet_classifier[4:-1],
linear)
def forward(self, x):
conv1_activation = F.relu(self.Conv1(x))
x = F.max_pool2d(conv1_activation, kernel_size=3, stride=2)
conv2_activation = F.relu(self.Conv2(x))
x = F.max_pool2d(conv2_activation, kernel_size=3, stride=2)
conv3_activation = F.relu(self.Conv3(x))
conv4_activation = F.relu(self.Conv4(conv3_activation))
conv5_activation = F.relu(self.Conv5(conv4_activation))
if self.with_classifier is True:
x = F.max_pool2d(conv5_activation, kernel_size=3, stride=2)
x = x.view(x.size(0), -1)
x = self.classifier(x)
net_outputs = namedtuple("AlexnetActivations", ['relu1', 'relu2', 'relu3', 'relu4', 'relu5'])
return net_outputs(conv1_activation,
conv2_activation,
conv3_activation,
conv4_activation,
conv5_activation), F.log_softmax(x, dim=1)
elif self.train_mode is True:
x = F.max_pool2d(conv5_activation, kernel_size=3, stride=2)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return F.log_softmax(x, dim=1)
else:
net_outputs = namedtuple("AlexnetActivations", ['relu1', 'relu2', 'relu3', 'relu4', 'relu5'])
return net_outputs(conv1_activation, conv2_activation, conv3_activation, conv4_activation, conv5_activation)
def load_weights(self, state_dict):
# load only weights that are in the model (eg. if train=False, the classifier weights don't need to be loaded)
model_dict = self.state_dict()
# 1. filter out unnecessary keys
state_dict = {k: v for k, v in state_dict.items() if k in model_dict}
# 2. overwrite entries in the existing state dict
model_dict.update(state_dict)
# 3. load the new state dict
self.load_state_dict(state_dict)
| StarcoderdataPython |
1698313 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""API鉴权相关代码"""
from functools import wraps
from executor.common.constant import Roles
from executor import exceptions
def enforce(required=Roles.guest):
"""
校验身份用户身份的Handler装饰器,默认是Guest权限,若权限不足则直接返回401
"""
assert Roles.contains(required)
def wrap(handler):
@wraps(handler)
def _inner(self, req, *args, **kwargs):
# 没有上下文,用户未登陆, 且API不是访客权限,直接拒绝访问
if not hasattr(req, "ctx") and required != Roles.guest:
raise exceptions.AccessDeniedException()
current_user = req.ctx.user
# 当前用户未登录, API需权限, 拒接访问
if not current_user and required != Roles.guest:
raise exceptions.AccessDeniedException()
user_role = getattr(current_user, "role", Roles.guest)
if Roles.permission_check(user_role, required):
return handler(self, req, *args, **kwargs)
# 当前用户权限不足,拒接访问
raise exceptions.AccessDeniedException()
return _inner
return wrap
| StarcoderdataPython |
4825227 | #! /usr/bin/env py.test
from __future__ import print_function
import os
import sys
import time
from qs import proc
def test_run_cmd_with_this():
st, out = proc.run_cmd([sys.executable, "-c", "import this"])
assert "Namespaces are one honking great idea -- let's do more of those!" in out
def test_run_cmd_timeout():
stime = time.time()
st, out = proc.run_cmd([sys.executable, "-c", "import time; time.sleep(10)"], 0.2)
needed = time.time() - stime
assert (st, out) == (9, "")
assert needed >= 0.18
assert needed < 0.4
def test_run_cmd_trigger_loopexit():
proc.run_cmd(
[sys.executable, "-uc", "import time, os, this; os.close(1); os.close(2); time.sleep(0.2)"]
)
def test_run_cmd_exit_before_close():
st, out = proc.run_cmd(
[
sys.executable,
"-uc",
"""import os; import sys; os.spawnl(os.P_NOWAIT, sys.executable, sys.executable, "-c", "from __future__ import print_function; import time; time.sleep(0.2); print('foobar!');")""",
]
)
print(st, out)
assert (st, out) == (0, "foobar!\n")
def test_run_cmd_execfail():
st, out = proc.run_cmd([sys.executable + "-999"])
print("status:", st)
print("out:", repr(out))
assert os.WIFEXITED(st)
assert os.WEXITSTATUS(st) == 97
if int(sys.version[0]) < 3:
assert "failed to exec" in out
assert "OSError" in out
assert "Traceback (most recent call last)" in out
def test_run_cmd_unicode():
# the error only shows up if sys.getfilesystemencoding() != "utf-8"
# unset LANG to enforce that and run test in an external process
import qs
if sys.getfilesystemencoding().lower() != "utf-8":
print("-----", sys.getfilesystemencoding(), "----")
qs_location = os.path.dirname(os.path.dirname(qs.__file__))
cmd = (
"""/usr/bin/env PYTHONOPTIMIZE= LANG= %s -c 'import sys; sys.path.insert(0, %s); from qs.proc import run_cmd; st, out = run_cmd([u"echo", "hello", unichr(900)]); print st; print out; assert(st)==0, "failed to execute echo with unicode argument"'"""
% (sys.executable, repr(qs_location).replace("'", '"'))
)
err = os.system(cmd)
assert err == 0
| StarcoderdataPython |
11713 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('activity_log', '0003_activitylog_extra_data'),
]
operations = [
migrations.AlterField(
model_name='activitylog',
name='datetime',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='datetime', db_index=True),
),
migrations.AlterField(
model_name='activitylog',
name='ip_address',
field=models.GenericIPAddressField(blank=True, null=True, verbose_name='user IP', db_index=True),
),
migrations.AlterField(
model_name='activitylog',
name='request_url',
field=models.CharField(db_index=True, verbose_name='url', max_length=256),
),
] | StarcoderdataPython |
3258710 | # -*- coding: utf-8 -*-
import pytest
import concierge.core.exceptions as exceptions
import concierge.core.lexer as lexer
def make_token(indent_lvl=0):
token_name = "a{0}".format(0)
return lexer.Token(indent_lvl, token_name, [token_name], token_name, 0)
@pytest.mark.parametrize(
"input_, output_", (
("", ""),
(" ", ""),
(" #", ""),
("# ", ""),
(" # dsfsdfsdf sdfsdfsd", ""),
(" a", " a"),
(" a# sdfsfdf", " a"),
(" a # sdfsfsd x xxxxxxx # sdfsfd", " a")))
def test_clean_line(input_, output_):
assert lexer.clean_line(input_) == output_
@pytest.mark.parametrize(
"input_, output_", (
("", ""),
(" ", " "),
(" ", " "),
(" ", " "),
("\t ", " "),
("\t\t\t", 12 * " "),
("\t \t", " "),
("\t\t\t ", " "),
(" \t\t\t ", " ")))
def test_reindent_line(input_, output_):
assert lexer.reindent_line(input_) == output_
@pytest.mark.parametrize(
"indent_", (
"",
" ",
" ",
"\t",
"\t\t",
"\t \t",
"\t\t ",
" \t\t"))
@pytest.mark.parametrize(
"content_", (
"",
"a"))
def test_get_split_indent(indent_, content_):
text = indent_ + content_
assert lexer.get_indent(text) == indent_
assert lexer.split_indent(text) == (indent_, content_)
@pytest.mark.parametrize(
"text", (
"#",
"# ",
"# sdfsdf #",
"## sdfsfdf",
"# #sdf # #"))
def test_regexp_comment_ok(text):
assert lexer.RE_COMMENT.match(text)
@pytest.mark.parametrize(
"text", (
"",
"sdfdsf",
"sdfsdf#",
"dzfsdfsdf#sdfsdf",
"sdf #",
" #"))
def test_regexp_comment_nok(text):
assert not lexer.RE_COMMENT.match(text)
@pytest.mark.parametrize(
"text", (
" ",
" ",
" ",
"\t"))
def test_regexp_indent_ok(text):
assert lexer.RE_INDENT.match(text)
@pytest.mark.parametrize(
"text", (
"",
"sdf",
"sdfs ",
"sdfsfd dsfx"))
def test_regexp_indent_nok(text):
assert not lexer.RE_INDENT.match(text)
@pytest.mark.parametrize(
"text", (
"''",
"'sdf'",
"'sdfsf\'sfdsf'",
"'sdfsd\'\'sdfsf\'sdf\'sdfxx'"
'""',
'"sdf"',
'"sdfsf\"fdsf"',
'"sdfsd\"\"sdfsf\"sdf\"sdfx"',
"'\"'",
"'sdfsdf' \"sdfsdf\"",
"'sdfx\"sdx' 'sdfdf\"' \"sdfx'sdfffffdf\" \"sdfsdf'sdxx'ds\""))
def test_regexp_quoted_ok(text):
assert lexer.RE_QUOTED.match(text)
@pytest.mark.parametrize(
"text", (
"'xx\"",
"\"sdfk'"))
def test_regexp_quoted_nok(text):
assert not lexer.RE_QUOTED.match(text)
@pytest.mark.parametrize(
"text", (
"hhh x",
"hhh x",
"hhh \tx",
"hhh=x",
"hhh =sdfsf",
"sdf= sdfx",
"sdf = sdf",
"hhh x",
"sdfsf- x"))
def test_regexp_optvalue_ok(text):
assert lexer.RE_OPT_VALUE.match(text)
@pytest.mark.parametrize(
"text", (
"",
"hhx",
"sdfsf ",
" sdfsfdf",
"sdfsf =",
"sdfsf= ",
"sdfsdf = ",
" "))
def test_regexp_optvalue_nok(text):
assert not lexer.RE_OPT_VALUE.match(text)
@pytest.mark.parametrize(
"input_, output_", (
("", ""),
("a", "a"),
(" a", " a"),
(" a", " a"),
("\ta", " a"),
(" \ta", " a"),
(" \t a", " a"),
(" \t a ", " a"),
(" \t a #sdfds", " a"),
(" \t a #sdfds #", " a"),
("a\t", "a"),
("a\t\r", "a"),
("a\r", "a"),
("a\n", "a")))
def test_process_line(input_, output_):
assert lexer.process_line(input_) == output_
@pytest.mark.parametrize(
"text, indent_len, option, values", (
("\ta 1", 1, "a", "1"),
("\ta 1 2", 1, "a", ["1", "2"]),
("\t\ta 1 2", 2, "a", ["1", "2"]),
("a 1 2 'cv'", 0, "a", ["1", "2", "'cv'"]),
("a 1 2 \"cv\"", 0, "a", ["1", "2", '"cv"']),
("a 1 2 \"cv\" 3", 0, "a", ["1", "2", '"cv"', "3"]),
("\ta=1", 1, "a", "1"),
("\ta =1 2", 1, "a", ["1", "2"]),
("\t\ta= 1 2", 2, "a", ["1", "2"]),
("a = 1 2 'cv'", 0, "a", ["1", "2", "'cv'"])))
def test_make_token_ok(text, indent_len, option, values):
processed_line = lexer.process_line(text)
token = lexer.make_token(processed_line, text, 1)
if not isinstance(values, (list, tuple)):
values = [values]
assert token.indent == indent_len
assert token.option == option
assert token.values == values
assert token.original == text
@pytest.mark.parametrize(
"text", (
"",
"a",
"a=",
"a =",
"a ",
"=",
"==",
" =asd"))
def test_make_token_incorrect_value(text):
with pytest.raises(exceptions.LexerIncorrectOptionValue):
lexer.make_token(text, text, 1)
@pytest.mark.parametrize(
"offset", (
1, 2, 3, 5, 6, 7))
def test_make_token_incorrect_indentation(offset):
text = " " * offset + "a = 1"
with pytest.raises(exceptions.LexerIncorrectIndentationLength):
lexer.make_token(text, text, 1)
def test_verify_tokens_empty():
assert lexer.verify_tokens([]) == []
def test_verify_tokens_one_token():
token = make_token(indent_lvl=0)
assert lexer.verify_tokens([token]) == [token]
@pytest.mark.parametrize(
"level", list(range(1, 4)))
def test_verify_tokens_one_token_incorrect_level(level):
token = make_token(indent_lvl=level)
with pytest.raises(exceptions.LexerIncorrectFirstIndentationError):
assert lexer.verify_tokens([token]) == [token]
def test_verify_tokens_ladder_level():
tokens = [make_token(indent_lvl=level) for level in range(5)]
assert lexer.verify_tokens(tokens) == tokens
@pytest.mark.parametrize(
"level", list(range(2, 7)))
def test_verify_tokens_big_level_gap(level):
tokens = [make_token(indent_lvl=0), make_token(indent_lvl=level)]
with pytest.raises(exceptions.LexerIncorrectIndentationError):
assert lexer.verify_tokens(tokens) == tokens
@pytest.mark.parametrize("level", list(range(5)))
def test_verify_tokens_dedent(level):
tokens = [make_token(indent_lvl=lvl) for lvl in range(5)]
tokens.append(make_token(indent_lvl=level))
assert lexer.verify_tokens(tokens) == tokens
def test_verify_tokens_lex_ok():
text = """\
aa = 1
b 1
q = 2
c = 3 # q
d = 5 'aa' "sdx" xx 3 3
e = 3
""".strip()
tokens = lexer.lex(text.split("\n"))
assert len(tokens) == 6
assert tokens[0].indent == 0
assert tokens[0].option == "aa"
assert tokens[0].values == ["1"]
assert tokens[0].original == "aa = 1"
assert tokens[0].lineno == 1
assert tokens[1].indent == 0
assert tokens[1].option == "b"
assert tokens[1].values == ["1"]
assert tokens[1].original == "b 1"
assert tokens[1].lineno == 2
assert tokens[2].indent == 1
assert tokens[2].option == "q"
assert tokens[2].values == ["2"]
assert tokens[2].original == " q = 2"
assert tokens[2].lineno == 5
assert tokens[3].indent == 1
assert tokens[3].option == "c"
assert tokens[3].values == ["3"]
assert tokens[3].original == " c = 3 # q"
assert tokens[3].lineno == 6
assert tokens[4].indent == 2
assert tokens[4].option == "d"
assert tokens[4].values == ["5", "'aa'", '"sdx"', "xx", "3", "3"]
assert tokens[4].original == " d = 5 'aa' \"sdx\" xx 3 3"
assert tokens[4].lineno == 7
assert tokens[5].indent == 0
assert tokens[5].option == "e"
assert tokens[5].values == ["3"]
assert tokens[5].original == "e = 3"
assert tokens[5].lineno == 9
def test_lex_incorrect_first_indentation():
text = """\
a = 1
b = 3
"""
with pytest.raises(exceptions.LexerIncorrectFirstIndentationError):
lexer.lex(text.split("\n"))
| StarcoderdataPython |
3259226 | from flask import Flask, g
from src.model.databaseHandler import ModelDatabase
from flask_socketio import SocketIO
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode="gevent")
# Prevent the spam of death
import logging
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
def databaseHandler():
#if not hasattr(g, "databaseHandler"):
# g.databaseHandler = ModelDatabase
#return g.databaseHandler()
return ModelDatabase()
import src.server.ui
import src.server.kinect
import src.server.api.users_api
import src.server.api.sessions_api
import src.server.api.instanceConfiguration_api
import src.server.api.image_api
import src.server.api.canvas_api
import src.server.api.stickyNote_api
def run_server():
socketio.run(app, host="0.0.0.0", port=8088, debug=False)
if __name__ == '__main__':
run_server()
| StarcoderdataPython |
170360 | import tensorflow as tf
slim = tf.contrib.slim
from helper_net.inception_v4 import *
import pickle
import numpy as np
def get_weights():
checkpoint_file = '../checkpoints/inception_v4.ckpt'
sess = tf.Session()
arg_scope = inception_v4_arg_scope()
input_tensor = tf.placeholder(tf.float32, (None, 299, 299, 3))
with slim.arg_scope(arg_scope):
logits, end_points = inception_v4(input_tensor, is_training=False)
saver = tf.train.Saver()
saver.restore(sess, checkpoint_file)
final_weights = []
current_bn = []
final_lr = []
vars_model = tf.global_variables()
for i in range(0, len(vars_model), 4):
for y in range(4):
key = vars_model[i+y]
if not "Aux" in key.name:
if y in [1, 2, 3] and not "Logits" in key.name:
value = sess.run(key)
if y == 1:
current_bn = []
current_bn.append(value)
elif y == 2:
current_bn.append(value)
elif y == 3:
current_bn.append(value)
final_weights.append(current_bn)
elif "Logits" in key.name:
value = sess.run(key)
if not "biases" in key.name:
final_lr.append(value)
else:
final_lr.append(value)
final_weights.append(final_lr)
else:
value = sess.run(key)
final_weights.append([value])
with open('weights.p', 'wb') as fp:
pickle.dump(final_weights, fp)
if __name__ == "__main__":
get_weights() | StarcoderdataPython |
3342273 | <filename>python/caty/core/camb/__init__.py
# coding: utf-8
def create_bindings(action_fs, app):
from caty.core.camb.parser import BindingParser, LiterateBindingParser
from caty.core.camb.binding import ModuleBinderContainer
from caty.core.language.util import remove_comment
if app._no_ambient:
return
binders = ModuleBinderContainer()
for f in action_fs.opendir(u'/').read(True):
if not f.is_dir and (f.path.endswith('.camb') or f.path.endswith('.camb.lit')):
try:
msg = app.i18n.get('Bindings: $path', path=f.path.strip('/'))
app.cout.write(u' * ' + msg)
app.cout.write('...')
source = f.read()
if f.path.endswith('.camb.lit'):
parser = LiterateBindingParser(f.path.strip('/').split('.')[0].replace('/', '.'), binders, app)
else:
parser = BindingParser(f.path.strip('/').split('.')[0].replace('/', '.'), binders, app)
binder = parser.run(source, hook=lambda seq:remove_comment(seq, is_doc_str), auto_remove_ws=True)
app.cout.writeln('OK')
except:
app.cout.writeln('NG')
raise
return binders
def is_doc_str(seq):
from topdown import option, skip_ws
from caty.core.language.util import docstring
_ = seq.parse(option(docstring))
if _:
try:
seq.parse(skip_ws)
seq.parse(option(annotation))
seq.parse(skip_ws)
seq.parse(['bind'])
return True
except Exception, e:
return False
return False
| StarcoderdataPython |
114026 | #!/usr/bin/env python
"""
wiggletools_commands.py
<NAME> / December 15, 2015
Writes wiggletools commands for computing mean bigwigs by tissue. Each set
of commands is numbered. They should be executed in order; some commands
in successive files depend on commands from previous files.
"""
import gzip
from collections import defaultdict
import os
import glob
import sys
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--gtex-dir', required=True,
help='path to GTEx output files; this is where the batch_* '
'subdirectories are')
parser.add_argument('--auc', required=True,
help='path to file with bigwig AUCs for normalization; '
'this script normalizes to library size 40 million 100-bp reads')
parser.add_argument('--wiggletools', required=True,
help='path to wiggletools')
parser.add_argument('--max-bw', required=False,
default=500,
help='max number of bigwig files to process at a time')
parser.add_argument('--out', required=True,
help='path to output directory')
args = parser.parse_args()
containing_dir = os.path.dirname(os.path.realpath(__file__))
# Create original sample index to new sample index map
sample_name_to_bw = {}
big_name_to_sample_name = {}
batch_numbers = []
for manifest in glob.glob(os.path.join(containing_dir, '*.manifest')):
batch_number = int(manifest.partition('.')[0].rpartition('_')[2])
batch_numbers.append(batch_number)
with open(manifest) as manifest_stream:
for j, line in enumerate(manifest_stream):
line = line.strip()
if not line: continue
sample_name = line.partition('\t')[0].partition(':')[2]
big_name_to_sample_name[line.rpartition('\t')[2]] = sample_name
sample_name_to_bw[sample_name] = os.path.join(
args.gtex_dir,
'batch_{}'.format(batch_number),
'coverage_bigwigs',
line.rpartition('\t')[2] +
'.bw'
)
sample_name_to_auc = {}
with open(args.auc) as auc_stream:
for line in auc_stream:
tokens = line.strip().split('\t')
sample_name_to_auc[tokens[0].strip()] = float(tokens[1])
sample_name_to_tissue = {}
with open(os.path.join(containing_dir, 'SraRunInfo.csv')) as sra_stream:
sra_stream.readline()
for line in sra_stream:
line = line.strip()
if not line: continue
if '_rep1' in line or '_rep2' in line: continue
tokens = line.split(',')
sample_name_to_tissue[tokens[0]] = tokens[41]
# Handle exceptions: some samples in SraRunInfo don't have tissues labeled
sample_name_to_tissue['SRR1325138'] = 'Skin'
sample_name_to_tissue['SRR1325690'] = 'Stomach'
sample_name_to_tissue['SRR1397115'] = 'Esophagus'
sample_name_to_tissue['SRR1405266'] = 'Esophagus'
sample_name_to_tissue['SRR1467633'] = 'Skin'
tissue_to_sample_names = defaultdict(list)
for sample_name in sample_name_to_auc:
tissue_to_sample_names[
sample_name_to_tissue[sample_name]
].append(sample_name)
try:
os.makedirs(args.out)
except OSError as e:
if 'File exists' not in e:
raise
file_handles = [
open(os.path.join(args.out, 'wiggletools_commands_0'), 'w')
]
more_than_one_batch = {}
for tissue in tissue_to_sample_names:
more_than_one_batch[tissue] = False
file_index = 0
divided_sample_names = [
tissue_to_sample_names[tissue][i:i+args.max_bw]
for i in xrange(0, len(tissue_to_sample_names[tissue]),
args.max_bw)
]
# Remove a lonely sample
if (len(divided_sample_names) >= 2
and len(divided_sample_names[-1]) == 1):
divided_sample_names[-2].append(divided_sample_names[-1][0])
divided_sample_names = divided_sample_names[:-1]
for i, sample_group in enumerate(divided_sample_names):
command_to_print = ' '.join([args.wiggletools, 'sum'] + [
'scale {} {}'.format(
float(40000000) * 100
/ sample_name_to_auc[sample_name],
sample_name_to_bw[sample_name]
) for sample_name in sample_group
]) + ' >{}'.format(
os.path.join(args.out,
tissue.replace(' ', '_')
+ '.sum_{}.wig'.format(i))
)
if i >= 1:
more_than_one_batch[tissue] = True
try:
print >>file_handles[i], command_to_print
except IndexError:
file_handles.append(
open(os.path.join(args.out,
'wiggletools_commands_' + str(i)), 'w')
)
print >>file_handles[i], command_to_print
next_index = len(file_handles)
file_handles.append(
open(os.path.join(args.out,
'wiggletools_commands_'
+ str(next_index)), 'w')
)
import glob
for tissue in tissue_to_sample_names:
print >>file_handles[-1], ' '.join([args.wiggletools, 'scale',
str(1./len(tissue_to_sample_names[tissue])),
('sum ' + os.path.join(args.out,
tissue.replace(' ', '_') + '.sum_*.wig'))
if more_than_one_batch[tissue]
else os.path.join(args.out,
tissue.replace(' ', '_') + '.sum_*.wig'),
'>' + os.path.join(args.out,
tissue.replace(' ', '_') + '.mean.wig')])
file_handles.append(
open(os.path.join(args.out,
'wiggletools_commands_'
+ str(next_index+1)), 'w')
)
sample_count = len(sample_name_to_auc)
print >>file_handles[-1], ' '.join([args.wiggletools, 'sum']
+ ['scale {} {}'.format(
float(
len(tissue_to_sample_names[tissue])
) / sample_count,
os.path.join(args.out,
tissue.replace(' ', '_') + '.mean.wig')
) for tissue in tissue_to_sample_names]) + (
' >{}'.format(
os.path.join(args.out,
'mean.wig')
)
)
for file_handle in file_handles:
file_handle.close()
| StarcoderdataPython |
17769 | from pathlib import Path
from toolz import itertoolz, curried
import vaex
transform_path_to_posix = lambda path: path.as_posix()
def path_to_posix():
return curried.valmap(transform_path_to_posix)
transform_xlsx_to_vaex = lambda path: vaex.from_ascii(path, seperator="\t")
def xlsx_to_vaex():
return curried.valmap(transform_ascii_to_vaex)
transform_ascii_to_vaex = lambda path: vaex.from_ascii(path, seperator="\t")
def ascii_to_vaex():
return curried.valmap(transform_ascii_to_vaex)
transform_ascii_to_vaex2 = lambda path: vaex.from_ascii(path)
def ascii_to_vaex2():
return curried.valmap(transform_ascii_to_vaex2)
transform_vaex_to_list = lambda df: [itertoolz.second(x) for x in df.iterrows()]
def vaex_rows_to_list():
return curried.valmap(transform_vaex_to_list)
transform_vaex_to_dict = lambda df: df.to_dict()
def vaex_to_dict():
return curried.valmap(transform_vaex_to_dict)
| StarcoderdataPython |
141529 | #!/usr/bin/env python3
a, b, n = map(int, input().split())
x = min(b-1, n)
print(a*x//b - a*(x // b)) | StarcoderdataPython |
3251998 | from polyphony import testbench
def if10(x, y, z):
if x == 0:
if y == 0:
z = 0
elif y == 1:
z = 1
else:
z = 2
elif x == 1:
z = 1
else:
z = 2
return z
@testbench
def test():
assert 0 == if10(0, 0, 1)
assert 1 == if10(0, 1, 0)
assert 2 == if10(0, 2, 0)
assert 1 == if10(1, 0, 0)
assert 2 == if10(2, 0, 0)
test()
| StarcoderdataPython |
3259571 | <reponame>Gugush284/get<filename>5_lab/5-2-adc-sar.py<gh_stars>0
import RPi.GPIO as GPIO
import time
dac = [26, 19, 13, 6, 5, 11, 9, 10]
comp = 4
troyka = 17
maxVolt = 3.3
GPIO.setmode(GPIO.BCM)
GPIO.setup(dac, GPIO.OUT, initial = 0)
GPIO.setup(troyka, GPIO.OUT, initial = 1)
GPIO.setup(comp, GPIO.IN)
def adc():
signal = [0, 0, 0, 0, 0, 0, 0, 0]
for i in range(8):
signal[i] = 1
GPIO.output(dac, signal)
time.sleep(0.01)
CompValue = GPIO.input(comp)
if CompValue == 0:
signal[i] = 0
print ("signal = {}".format(signal))
value = 0
for i in range(8):
if signal[i] == 1:
value = value + 2**(7-i)
return value
try:
while True:
value = adc ()
voltage = value/(2**len(dac)) * maxVolt
print ("ADC value = {}, input voltage = {:.2f}".format(value, voltage))
finally:
GPIO.output(dac, 0)
GPIO.output(troyka, 0)
GPIO.cleanup() | StarcoderdataPython |
4836867 | # Copyright 2019 Cortex Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
setup(
name="cortex",
version="master", # CORTEX_VERSION
description="",
author="Cortex Labs",
author_email="<EMAIL>",
install_requires=["dill>=0.3.0", "requests>=2.20.0", "msgpack>=0.6.0"],
setup_requires=["setuptools"],
packages=find_packages(),
)
| StarcoderdataPython |
54904 | <filename>log/urls.py
from django.conf.urls import url, include
from . import views
from django.contrib.auth import views as auth_views
app_name = 'log'
urlpatterns = [
url(r'^login/$', views.login_view, name='login_url'),
url(r'^logout/$', views.logout_view, name='logout_url'),
]
| StarcoderdataPython |
3310458 | import sys
import pickle
from preprocessing.simplesrl import read_simplesrl
class LabeledData(object):
def __init__(self):
self.data = [[], []]
def getData(self):
return self.data
def getSentences(self):
return self.data[0]
def getLabels(self):
return self.data[1]
def addPair(self, sentence, labels):
if len(sentence) is not len(labels):
raise Exception('Expecting length of given pair to be equal, but sentence contains {0} words and labels {1} entries.'.format(len(sentence), len(labels)))
self.data[0].append(sentence)
self.data[1].append(labels)
def addData(self, labeledData):
if len(labeledData) is not 2:
raise Exception('Expecting labeled data to contain exactly two lists (with corresponding pairs)')
if len(labeledData[0]) is not len(labeledData[1]):
raise Exception('Given lists of sentences and associated labels do not match in length.')
self.data[0].extend(labeledData[0])
self.data[1].extend(labeledData[1])
class WordIndex(object):
def __init__(self):
self.words = []
self.words2index = {}
self.index2words = {}
self.changed = False
def getSize(self):
return len(self.words)
def getCurrentIndex(self):
return self.words2index
def getIndex2Word(self):
if self.changed or self.index2words is None:
# Recalculate inverse index
self.index2words = {}
for w in self.words2index:
self.index2words[self.words2index[w]] = w
return self.index2words
def loadIndex(self, indexPath):
otherIndex = pickle.load(open(indexPath, 'r'))
self.merge(otherIndex)
def setChanged(self):
self.changed = True
def loadIndexReplace(self, indexPath):
self.words2index = pickle.load(open(indexPath, 'r'))
self.setChanged()
def addWords(self, words):
currentMax = len(self.words2index)
for w in words:
if not w in self.words2index:
self.words2index[w] = currentMax
currentMax += 1
self.setChanged()
def addSentences(self, sentences):
for s in sentences:
words = s.split()
self.addWords(words)
self.setChanged()
def addSplittedSentences(self, sentences):
for s in sentences:
self.addWords(s)
self.setChanged()
def addWordsFromFile(self, filePath):
raise Exception('not implemented yet')
self.setChanged()
def merge(self, otherIndex):
currentMax = len(self.words2index)
for word in otherIndex.words2index:
if not word in self.words2index:
self.words2index[word] = currentMax
currentMax += 1
self.setChanged()
def storeCurrentIndex(self, indexPath):
pickle.dump(self.words2index, open(indexPath, 'wb'))
'''
Each entry in the list denotes one sentence (string)
'''
def create_word_index(sentences):
index = 0
words2index = {}
for s in sentences:
s = s[0]
words = s.split()
for w in words:
if w not in words2index:
words2index[w] = index
index += 1
return words2index
if __name__ == "__main__":
if len(sys.argv) < 2:
raise Exception('Pass file path as argument: python create_word_index.py source')
filePath = sys.argv[1]
errors, sentences = read_simplesrl(filePath)
#print "Got {0} sentences parsed".format(len(sentences))
#print ""
words2index = create_word_index(sentences)
#print "Created {0} words in index".format(len(words2index))
#print words2index
print pickle.dumps(words2index) #, open(destinationPath, 'wb')) | StarcoderdataPython |
66278 | # PyQt5 modules
from PyQt5.QtGui import QColor, QPainter, QRadialGradient, QBrush
from PyQt5.QtCore import Qt, QTimer, QSize, pyqtSlot, pyqtProperty
from PyQt5.QtWidgets import QWidget, QApplication
class LedWidget(QWidget):
def __init__(self, parent=None):
super(LedWidget, self).__init__(parent)
self._diamX = 0
self._diamY = 0
self._diameter = 20
self._color = QColor("gray")
self._alignment = Qt.AlignCenter
self._state = True
self._flashing = False
self._flashRate = 100
self._timer = QTimer()
self._timer.timeout.connect(self.toggleState)
self.setDiameter(self._diameter)
def paintEvent(self, event):
painter = QPainter()
x = 0
y = 0
if self._alignment & Qt.AlignLeft:
x = 0
elif self._alignment & Qt.AlignRight:
x = self.width() - self._diameter
elif self._alignment & Qt.AlignHCenter:
x = (self.width() - self._diameter) / 2
elif self._alignment & Qt.AlignJustify:
x = 0
if self._alignment & Qt.AlignTop:
y = 0
elif self._alignment & Qt.AlignBottom:
y = self.height() - self._diameter
elif self._alignment & Qt.AlignVCenter:
y = (self.height() - self._diameter) / 2
gradient = QRadialGradient(x + self._diameter / 2, y + self._diameter / 2,
self._diameter * 0.4, self._diameter * 0.4, self._diameter * 0.4)
gradient.setColorAt(0, Qt.white)
if self._state:
gradient.setColorAt(1, self._color)
else:
gradient.setColorAt(1, Qt.black)
painter.begin(self)
brush = QBrush(gradient)
painter.setPen(self._color)
painter.setRenderHint(QPainter.Antialiasing, True)
painter.setBrush(brush)
painter.drawEllipse(x, y, self._diameter - 1, self._diameter - 1)
if self._flashRate > 0 and self._flashing:
self._timer.start(self._flashRate)
else:
self._timer.stop()
painter.end()
def minimumSizeHint(self):
return QSize(self._diameter, self._diameter)
def sizeHint(self):
return QSize(self._diameter, self._diameter)
def getDiameter(self):
return self._diameter
@pyqtSlot(int)
def setDiameter(self, value):
self._diameter = value
self.update()
def getColor(self):
return self._color
@pyqtSlot(QColor)
def setColor(self, value):
self._color = value
self.update()
def getAlignment(self):
return self._alignment
@pyqtSlot(Qt.Alignment)
def setAlignment(self, value):
self._alignment = value
self.update()
def getState(self):
return self._alignment
@pyqtSlot(bool)
def setState(self, value):
self._state = value
self.update()
@pyqtSlot()
def toggleState(self):
self._state = not self._state
self.update()
def isFlashing(self):
return self._flashing
@pyqtSlot(bool)
def setFlashing(self, value):
self._flashing = value
self.update()
def getFlashRate(self):
return self._flashRate
@pyqtSlot(int)
def setFlashRate(self, value):
self._flashRate = value
self.update()
@pyqtSlot()
def startFlashing(self):
self.setFlashing(True)
@pyqtSlot()
def stopFlashing(self):
self.setFlashing(False)
diameter = pyqtProperty(int, getDiameter, setDiameter)
color = pyqtProperty(QColor, getColor, setColor)
alignment = pyqtProperty(Qt.Alignment, getAlignment, setAlignment)
state = pyqtProperty(bool, getState, setState)
flashing = pyqtProperty(bool, isFlashing, setFlashing)
flashRate = pyqtProperty(int, getFlashRate, setFlashRate)
if __name__ == "__main__":
import sys
app = QApplication(sys.argv)
led = LedWidget()
led.show()
sys.exit(app.exec_())
| StarcoderdataPython |
1620341 | <filename>addon_common/common/functools.py
'''
Copyright (C) 2021 CG Cookie
http://cgcookie.com
<EMAIL>
Created by <NAME>, <NAME>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from inspect import isfunction
##################################################
# find functions of object that has key attribute
# returns list of (attribute value, fn)
def find_fns(obj, key):
cls = type(obj)
fn_type = type(find_fns)
members = [getattr(cls, k) for k in dir(cls)]
# test if type is fn_type rather than isfunction() because bpy has problems!
# methods = [member for member in members if isfunction(member)]
methods = [member for member in members if type(member) == fn_type]
return [
(getattr(method, key), method)
for method in methods
if hasattr(method, key)
]
| StarcoderdataPython |
1605661 | #!/usr/bin/env python3
import json
import pdb
import csv
from collections import OrderedDict
import os.path
from pathlib import Path
from datetime import datetime
import re
import pandas as pd
import sys
import settings
import scrape_schedule
def CalcPercent(total, skip, correct):
try:
return round(correct / (total - skip) * 100., 2)
except ZeroDivisionError:
return None
def GetPercent(item):
newstr = item.replace("%", "")
newstr = newstr.replace("?", "")
if (newstr.strip()==""):
return -1
return float(newstr)
def GetIndex(item):
filename = os.path.basename(str(item))
idx = re.findall(r'\d+', str(filename))
if (len(idx) == 0):
idx.append("-1")
return int(idx[0])
def GetFiles(path, templatename):
A = []
files = Path(path).glob(templatename)
for p in files:
A.append(p)
file_list = []
for item in range(0, 19):
file_list.append("?")
for item in A:
idx = GetIndex(item)
if (len(file_list) > idx):
file_list[idx] = item
file_list = [x for x in file_list if x != "?"]
return file_list
def CurrentScheduleFiles(filename):
stat = os.path.getmtime(filename)
stat_date = datetime.fromtimestamp(stat)
if stat_date.date() < datetime.now().date():
return False
return True
def RefreshScheduleFiles():
now = datetime.now()
year = int(now.year)
scrape_schedule.year = year
scrape_schedule.main(sys.argv[1:])
def GetActualScores(abbra, teama, abbrb, teamb, scores):
items = re.split(r'(,|\s)\s*', str(scores).lower())
if (not items):
return -1, -1
if (items[0].strip() == "canceled"):
return -3, -3
if (items[0].strip() == "postponed"):
return -2, -2
if (items[0].strip() == "?"): # not yet Played Game
return -1, -1
ot = -1
if (len(items) == 9 and "ot)" in items[8]):
# overtime case
ot += 1
elif (len(items) != 7):
return -1, -1
if (abbra.lower().strip() not in items and abbrb.lower().strip() not in items):
return -1, -1
if (abbra.lower().strip() == items[0].lower().strip()):
scorea = int(items[2])
scoreb = int(items[6])
else:
scorea = int(items[6])
scoreb = int(items[2])
return scorea, scoreb
now = datetime.now()
saved_path = "{0}{1}/{2}".format(settings.predict_root, int(now.year), settings.predict_saved)
sched_path = "{0}{1}/{2}".format(settings.predict_root, int(now.year), settings.predict_sched)
verbose = False
if (len(sys.argv)==2):
verbose = True
print ("Measure Actual Results Tool")
print ("**************************")
Path(sched_path).mkdir(parents=True, exist_ok=True)
RefreshScheduleFiles()
file = '{0}sched1.json'.format(sched_path)
if (not os.path.exists(file)):
if (verbose):
print ("schedule files are missing, run the scrape_schedule tool to create")
exit()
Path(saved_path).mkdir(parents=True, exist_ok=True)
file = '{0}week1.csv'.format(saved_path)
if (not os.path.exists(file)):
if (verbose):
print ("Weekly files are missing, run the score_week tool to create")
exit()
sched_files = GetFiles(sched_path, "sched*.json")
list_sched = []
for file in sched_files:
with open(file) as sched_file:
item = json.load(sched_file, object_pairs_hook=OrderedDict)
item['Week'] = GetIndex(file)
list_sched.append(item)
week_files = GetFiles(saved_path, "week*.csv")
list_week = []
for file in week_files:
with open(file) as week_file:
reader = csv.DictReader(week_file)
for row in reader:
row['Week'] = GetIndex(file)
list_week.append(row)
IDX=[]
A=[]
B=[]
C=[]
D=[]
E=[]
index = 0
alltotal = 0
allskip = 0
allcorrect = 0
count = 0
for idx in range(len(list_sched)):
total = 0
skip = 0
correct = 0
week = list_sched[idx]["Week"]
for item in list_sched[idx].values():
if (item == week):
break
total += 1
chancea = -1
abbra = ""
abbrb = ""
teama = ""
teamb = ""
if (index < len(list_week) and list_week[index]["Week"] == week):
chancea = GetPercent(list_week[index]["ChanceA"])
chanceb = GetPercent(list_week[index]["ChanceB"])
abbra = list_week[index]["AbbrA"]
abbrb = list_week[index]["AbbrB"]
teama = list_week[index]["TeamA"]
teamb = list_week[index]["TeamB"]
index += 1
scorea, scoreb = GetActualScores(abbra, teama, abbrb, teamb, item["Score"])
if ((int(chancea) == 0 and int(chanceb) == 0) or scorea < 0 or scoreb < 0):
if (teama != "" and teamb != "" and "tickets" not in item["Score"]):
if (item["Score"].lower() == "canceled"):
print ("***\nGame skipped\n\n\t[{0} vs {1}] \n\tabbreviation(s) [{2}] [{3}] Score {4}\n\tcanceled\n***\n"
.format(teama, teamb, abbra, abbrb, item["Score"]))
elif (item["Score"].lower() == "postponed"):
print ("***\nGame skipped\n\n\t[{0} vs {1}] \n\tabbreviation(s) [{2}] [{3}] Score {4}\n\tpostponed\n***\n"
.format(teama, teamb, abbra, abbrb, item["Score"]))
else:
if (item["Score"] != "?"):
print ("***\nGame skipped\n\n\t[{0} vs {1}] \n\tabbreviation(s) [{2}] [{3}] Score {4}\n\treview your merge files\n***\n".format(teama, teamb, abbra, abbrb, item["Score"]))
skip += 1
else:
if (chancea >= 50 and (scorea >= scoreb)):
correct += 1
if (chancea < 50 and (scorea < scoreb)):
correct += 1
count += 1
IDX.append(count)
A.append(week)
B.append(total)
C.append(skip)
D.append(correct)
E.append(CalcPercent(total, skip, correct))
print ("week{0} total={1}, skip={2}, correct={3} Percent={4}%".format(week, total, skip, correct, CalcPercent(total, skip, correct)))
alltotal = alltotal + total
allskip = allskip + skip
allcorrect = allcorrect + correct
count += 1
IDX.append(count)
A.append(99)
B.append(alltotal)
C.append(allskip)
D.append(allcorrect)
E.append(CalcPercent(alltotal, allskip, allcorrect))
print ("====================================================================")
print ("Totals total={0}, skip={1}, correct={2} Percent={3}%".format(alltotal, allskip, allcorrect, CalcPercent(alltotal, allskip, allcorrect)))
print ("====================================================================")
df=pd.DataFrame(IDX,columns=['Index'])
df['Week']=A
df['Total Games']=B
df['Count Unpredicted']=C
df['Count Correct']=D
df['Percent Correct']=E
file = "{0}results.json".format(saved_path)
with open(file, 'w') as f:
f.write(df.to_json(orient='index'))
with open(file) as results_json:
dict_results = json.load(results_json, object_pairs_hook=OrderedDict)
file = "{0}results.csv".format(saved_path)
results_sheet = open(file, 'w', newline='')
csvwriter = csv.writer(results_sheet)
count = 0
for row in dict_results.values():
if (count == 0):
header = row.keys()
csvwriter.writerow(header)
count += 1
csvwriter.writerow(row.values())
results_sheet.close()
print ("done.")
| StarcoderdataPython |
3383637 | import enum
class NTStatus(enum.Enum):
STATUS_SUCCESS = 0x00000000
STATUS_WAIT_0 = 0x00000000
STATUS_WAIT_1 = 0x00000001
STATUS_WAIT_2 = 0x00000002
STATUS_WAIT_3 = 0x00000003
STATUS_WAIT_63 = 0x0000003F
STATUS_ABANDONED = 0x00000080
STATUS_ABANDONED_WAIT_0 = 0x00000080
STATUS_ABANDONED_WAIT_63 = 0x000000BF
STATUS_USER_APC = 0x000000C0
STATUS_KERNEL_APC = 0x00000100
STATUS_ALERTED = 0x00000101
STATUS_TIMEOUT = 0x00000102
STATUS_PENDING = 0x00000103
STATUS_REPARSE = 0x00000104
STATUS_MORE_ENTRIES = 0x00000105
STATUS_NOT_ALL_ASSIGNED = 0x00000106
STATUS_SOME_NOT_MAPPED = 0x00000107
STATUS_OPLOCK_BREAK_IN_PROGRESS = 0x00000108
STATUS_VOLUME_MOUNTED = 0x00000109
STATUS_RXACT_COMMITTED = 0x0000010A
STATUS_NOTIFY_CLEANUP = 0x0000010B
STATUS_NOTIFY_ENUM_DIR = 0x0000010C
STATUS_NO_QUOTAS_FOR_ACCOUNT = 0x0000010D
STATUS_PRIMARY_TRANSPORT_CONNECT_FAILED = 0x0000010E
STATUS_PAGE_FAULT_TRANSITION = 0x00000110
STATUS_PAGE_FAULT_DEMAND_ZERO = 0x00000111
STATUS_PAGE_FAULT_COPY_ON_WRITE = 0x00000112
STATUS_PAGE_FAULT_GUARD_PAGE = 0x00000113
STATUS_PAGE_FAULT_PAGING_FILE = 0x00000114
STATUS_CACHE_PAGE_LOCKED = 0x00000115
STATUS_CRASH_DUMP = 0x00000116
STATUS_BUFFER_ALL_ZEROS = 0x00000117
STATUS_REPARSE_OBJECT = 0x00000118
STATUS_RESOURCE_REQUIREMENTS_CHANGED = 0x00000119
STATUS_TRANSLATION_COMPLETE = 0x00000120
STATUS_DS_MEMBERSHIP_EVALUATED_LOCALLY = 0x00000121
STATUS_NOTHING_TO_TERMINATE = 0x00000122
STATUS_PROCESS_NOT_IN_JOB = 0x00000123
STATUS_PROCESS_IN_JOB = 0x00000124
STATUS_VOLSNAP_HIBERNATE_READY = 0x00000125
STATUS_FSFILTER_OP_COMPLETED_SUCCESSFULLY = 0x00000126
STATUS_INTERRUPT_VECTOR_ALREADY_CONNECTED = 0x00000127
STATUS_INTERRUPT_STILL_CONNECTED = 0x00000128
STATUS_PROCESS_CLONED = 0x00000129
STATUS_FILE_LOCKED_WITH_ONLY_READERS = 0x0000012A
STATUS_FILE_LOCKED_WITH_WRITERS = 0x0000012B
STATUS_RESOURCEMANAGER_READ_ONLY = 0x00000202
STATUS_RING_PREVIOUSLY_EMPTY = 0x00000210
STATUS_RING_PREVIOUSLY_FULL = 0x00000211
STATUS_RING_PREVIOUSLY_ABOVE_QUOTA = 0x00000212
STATUS_RING_NEWLY_EMPTY = 0x00000213
STATUS_RING_SIGNAL_OPPOSITE_ENDPOINT = 0x00000214
STATUS_OPLOCK_SWITCHED_TO_NEW_HANDLE = 0x00000215
STATUS_OPLOCK_HANDLE_CLOSED = 0x00000216
STATUS_WAIT_FOR_OPLOCK = 0x00000367
STATUS_FLT_IO_COMPLETE = 0x001C0001
STATUS_DIS_ATTRIBUTE_BUILT = 0x003C0001
STATUS_OBJECT_NAME_EXISTS = 0x40000000
STATUS_THREAD_WAS_SUSPENDED = 0x40000001
STATUS_WORKING_SET_LIMIT_RANGE = 0x40000002
STATUS_IMAGE_NOT_AT_BASE = 0x40000003
STATUS_RXACT_STATE_CREATED = 0x40000004
STATUS_SEGMENT_NOTIFICATION = 0x40000005
STATUS_LOCAL_USER_SESSION_KEY = 0x40000006
STATUS_BAD_CURRENT_DIRECTORY = 0x40000007
STATUS_SERIAL_MORE_WRITES = 0x40000008
STATUS_REGISTRY_RECOVERED = 0x40000009
STATUS_FT_READ_RECOVERY_FROM_BACKUP = 0x4000000A
STATUS_FT_WRITE_RECOVERY = 0x4000000B
STATUS_SERIAL_COUNTER_TIMEOUT = 0x4000000C
STATUS_NULL_LM_PASSWORD = 0x4000000D
STATUS_IMAGE_MACHINE_TYPE_MISMATCH = 0x4000000E
STATUS_RECEIVE_PARTIAL = 0x4000000F
STATUS_RECEIVE_EXPEDITED = 0x40000010
STATUS_RECEIVE_PARTIAL_EXPEDITED = 0x40000011
STATUS_EVENT_DONE = 0x40000012
STATUS_EVENT_PENDING = 0x40000013
STATUS_CHECKING_FILE_SYSTEM = 0x40000014
STATUS_FATAL_APP_EXIT = 0x40000015
STATUS_PREDEFINED_HANDLE = 0x40000016
STATUS_WAS_UNLOCKED = 0x40000017
STATUS_SERVICE_NOTIFICATION = 0x40000018
STATUS_WAS_LOCKED = 0x40000019
STATUS_LOG_HARD_ERROR = 0x4000001A
STATUS_ALREADY_WIN32 = 0x4000001B
STATUS_WX86_UNSIMULATE = 0x4000001C
STATUS_WX86_CONTINUE = 0x4000001D
STATUS_WX86_SINGLE_STEP = 0x4000001E
STATUS_WX86_BREAKPOINT = 0x4000001F
STATUS_WX86_EXCEPTION_CONTINUE = 0x40000020
STATUS_WX86_EXCEPTION_LASTCHANCE = 0x40000021
STATUS_WX86_EXCEPTION_CHAIN = 0x40000022
STATUS_IMAGE_MACHINE_TYPE_MISMATCH_EXE = 0x40000023
STATUS_NO_YIELD_PERFORMED = 0x40000024
STATUS_TIMER_RESUME_IGNORED = 0x40000025
STATUS_ARBITRATION_UNHANDLED = 0x40000026
STATUS_CARDBUS_NOT_SUPPORTED = 0x40000027
STATUS_WX86_CREATEWX86TIB = 0x40000028
STATUS_MP_PROCESSOR_MISMATCH = 0x40000029
STATUS_HIBERNATED = 0x4000002A
STATUS_RESUME_HIBERNATION = 0x4000002B
STATUS_FIRMWARE_UPDATED = 0x4000002C
STATUS_DRIVERS_LEAKING_LOCKED_PAGES = 0x4000002D
STATUS_MESSAGE_RETRIEVED = 0x4000002E
STATUS_SYSTEM_POWERSTATE_TRANSITION = 0x4000002F
STATUS_ALPC_CHECK_COMPLETION_LIST = 0x40000030
STATUS_SYSTEM_POWERSTATE_COMPLEX_TRANSITION = 0x40000031
STATUS_ACCESS_AUDIT_BY_POLICY = 0x40000032
STATUS_ABANDON_HIBERFILE = 0x40000033
STATUS_BIZRULES_NOT_ENABLED = 0x40000034
STATUS_HEURISTIC_DAMAGE_POSSIBLE = 0x40190001
STATUS_GUARD_PAGE_VIOLATION = 0x80000001
STATUS_DATATYPE_MISALIGNMENT = 0x80000002
STATUS_BREAKPOINT = 0x80000003
STATUS_SINGLE_STEP = 0x80000004
STATUS_BUFFER_OVERFLOW = 0x80000005
STATUS_NO_MORE_FILES = 0x80000006
STATUS_WAKE_SYSTEM_DEBUGGER = 0x80000007
STATUS_HANDLES_CLOSED = 0x8000000A
STATUS_NO_INHERITANCE = 0x8000000B
STATUS_GUID_SUBSTITUTION_MADE = 0x8000000C
STATUS_PARTIAL_COPY = 0x8000000D
STATUS_DEVICE_PAPER_EMPTY = 0x8000000E
STATUS_DEVICE_POWERED_OFF = 0x8000000F
STATUS_DEVICE_OFF_LINE = 0x80000010
STATUS_DEVICE_BUSY = 0x80000011
STATUS_NO_MORE_EAS = 0x80000012
STATUS_INVALID_EA_NAME = 0x80000013
STATUS_EA_LIST_INCONSISTENT = 0x80000014
STATUS_INVALID_EA_FLAG = 0x80000015
STATUS_VERIFY_REQUIRED = 0x80000016
STATUS_EXTRANEOUS_INFORMATION = 0x80000017
STATUS_RXACT_COMMIT_NECESSARY = 0x80000018
STATUS_NO_MORE_ENTRIES = 0x8000001A
STATUS_FILEMARK_DETECTED = 0x8000001B
STATUS_MEDIA_CHANGED = 0x8000001C
STATUS_BUS_RESET = 0x8000001D
STATUS_END_OF_MEDIA = 0x8000001E
STATUS_BEGINNING_OF_MEDIA = 0x8000001F
STATUS_MEDIA_CHECK = 0x80000020
STATUS_SETMARK_DETECTED = 0x80000021
STATUS_NO_DATA_DETECTED = 0x80000022
STATUS_REDIRECTOR_HAS_OPEN_HANDLES = 0x80000023
STATUS_SERVER_HAS_OPEN_HANDLES = 0x80000024
STATUS_ALREADY_DISCONNECTED = 0x80000025
STATUS_LONGJUMP = 0x80000026
STATUS_CLEANER_CARTRIDGE_INSTALLED = 0x80000027
STATUS_PLUGPLAY_QUERY_VETOED = 0x80000028
STATUS_UNWIND_CONSOLIDATE = 0x80000029
STATUS_REGISTRY_HIVE_RECOVERED = 0x8000002A
STATUS_DLL_MIGHT_BE_INSECURE = 0x8000002B
STATUS_DLL_MIGHT_BE_INCOMPATIBLE = 0x8000002C
STATUS_STOPPED_ON_SYMLINK = 0x8000002D
STATUS_CANNOT_GRANT_REQUESTED_OPLOCK = 0x8000002E
STATUS_NO_ACE_CONDITION = 0x8000002F
STATUS_CLUSTER_NODE_ALREADY_UP = 0x80130001
STATUS_CLUSTER_NODE_ALREADY_DOWN = 0x80130002
STATUS_CLUSTER_NETWORK_ALREADY_ONLINE = 0x80130003
STATUS_CLUSTER_NETWORK_ALREADY_OFFLINE = 0x80130004
STATUS_CLUSTER_NODE_ALREADY_MEMBER = 0x80130005
STATUS_FLT_BUFFER_TOO_SMALL = 0x801C0001
STATUS_FVE_PARTIAL_METADATA = 0x80210001
STATUS_FVE_TRANSIENT_STATE = 0x80210002
STATUS_UNSUCCESSFUL = 0xC0000001
STATUS_NOT_IMPLEMENTED = 0xC0000002
STATUS_INVALID_INFO_CLASS = 0xC0000003
STATUS_INFO_LENGTH_MISMATCH = 0xC0000004
STATUS_ACCESS_VIOLATION = 0xC0000005
STATUS_IN_PAGE_ERROR = 0xC0000006
STATUS_PAGEFILE_QUOTA = 0xC0000007
STATUS_INVALID_HANDLE = 0xC0000008
STATUS_BAD_INITIAL_STACK = 0xC0000009
STATUS_BAD_INITIAL_PC = 0xC000000A
STATUS_INVALID_CID = 0xC000000B
STATUS_TIMER_NOT_CANCELED = 0xC000000C
STATUS_INVALID_PARAMETER = 0xC000000D
STATUS_NO_SUCH_DEVICE = 0xC000000E
STATUS_NO_SUCH_FILE = 0xC000000F
STATUS_INVALID_DEVICE_REQUEST = 0xC0000010
STATUS_END_OF_FILE = 0xC0000011
STATUS_WRONG_VOLUME = 0xC0000012
STATUS_NO_MEDIA_IN_DEVICE = 0xC0000013
STATUS_UNRECOGNIZED_MEDIA = 0xC0000014
STATUS_NONEXISTENT_SECTOR = 0xC0000015
STATUS_MORE_PROCESSING_REQUIRED = 0xC0000016
STATUS_NO_MEMORY = 0xC0000017
STATUS_CONFLICTING_ADDRESSES = 0xC0000018
STATUS_NOT_MAPPED_VIEW = 0xC0000019
STATUS_UNABLE_TO_FREE_VM = 0xC000001A
STATUS_UNABLE_TO_DELETE_SECTION = 0xC000001B
STATUS_INVALID_SYSTEM_SERVICE = 0xC000001C
STATUS_ILLEGAL_INSTRUCTION = 0xC000001D
STATUS_INVALID_LOCK_SEQUENCE = 0xC000001E
STATUS_INVALID_VIEW_SIZE = 0xC000001F
STATUS_INVALID_FILE_FOR_SECTION = 0xC0000020
STATUS_ALREADY_COMMITTED = 0xC0000021
STATUS_ACCESS_DENIED = 0xC0000022
STATUS_BUFFER_TOO_SMALL = 0xC0000023
STATUS_OBJECT_TYPE_MISMATCH = 0xC0000024
STATUS_NONCONTINUABLE_EXCEPTION = 0xC0000025
STATUS_INVALID_DISPOSITION = 0xC0000026
STATUS_UNWIND = 0xC0000027
STATUS_BAD_STACK = 0xC0000028
STATUS_INVALID_UNWIND_TARGET = 0xC0000029
STATUS_NOT_LOCKED = 0xC000002A
STATUS_PARITY_ERROR = 0xC000002B
STATUS_UNABLE_TO_DECOMMIT_VM = 0xC000002C
STATUS_NOT_COMMITTED = 0xC000002D
STATUS_INVALID_PORT_ATTRIBUTES = 0xC000002E
STATUS_PORT_MESSAGE_TOO_LONG = 0xC000002F
STATUS_INVALID_PARAMETER_MIX = 0xC0000030
STATUS_INVALID_QUOTA_LOWER = 0xC0000031
STATUS_DISK_CORRUPT_ERROR = 0xC0000032
STATUS_OBJECT_NAME_INVALID = 0xC0000033
STATUS_OBJECT_NAME_NOT_FOUND = 0xC0000034
STATUS_OBJECT_NAME_COLLISION = 0xC0000035
STATUS_PORT_DISCONNECTED = 0xC0000037
STATUS_DEVICE_ALREADY_ATTACHED = 0xC0000038
STATUS_OBJECT_PATH_INVALID = 0xC0000039
STATUS_OBJECT_PATH_NOT_FOUND = 0xC000003A
STATUS_OBJECT_PATH_SYNTAX_BAD = 0xC000003B
STATUS_DATA_OVERRUN = 0xC000003C
STATUS_DATA_LATE_ERROR = 0xC000003D
STATUS_DATA_ERROR = 0xC000003E
STATUS_CRC_ERROR = 0xC000003F
STATUS_SECTION_TOO_BIG = 0xC0000040
STATUS_PORT_CONNECTION_REFUSED = 0xC0000041
STATUS_INVALID_PORT_HANDLE = 0xC0000042
STATUS_SHARING_VIOLATION = 0xC0000043
STATUS_QUOTA_EXCEEDED = 0xC0000044
STATUS_INVALID_PAGE_PROTECTION = 0xC0000045
STATUS_MUTANT_NOT_OWNED = 0xC0000046
STATUS_SEMAPHORE_LIMIT_EXCEEDED = 0xC0000047
STATUS_PORT_ALREADY_SET = 0xC0000048
STATUS_SECTION_NOT_IMAGE = 0xC0000049
STATUS_SUSPEND_COUNT_EXCEEDED = 0xC000004A
STATUS_THREAD_IS_TERMINATING = 0xC000004B
STATUS_BAD_WORKING_SET_LIMIT = 0xC000004C
STATUS_INCOMPATIBLE_FILE_MAP = 0xC000004D
STATUS_SECTION_PROTECTION = 0xC000004E
STATUS_EAS_NOT_SUPPORTED = 0xC000004F
STATUS_EA_TOO_LARGE = 0xC0000050
STATUS_NONEXISTENT_EA_ENTRY = 0xC0000051
STATUS_NO_EAS_ON_FILE = 0xC0000052
STATUS_EA_CORRUPT_ERROR = 0xC0000053
STATUS_FILE_LOCK_CONFLICT = 0xC0000054
STATUS_LOCK_NOT_GRANTED = 0xC0000055
STATUS_DELETE_PENDING = 0xC0000056
STATUS_CTL_FILE_NOT_SUPPORTED = 0xC0000057
STATUS_UNKNOWN_REVISION = 0xC0000058
STATUS_REVISION_MISMATCH = 0xC0000059
STATUS_INVALID_OWNER = 0xC000005A
STATUS_INVALID_PRIMARY_GROUP = 0xC000005B
STATUS_NO_IMPERSONATION_TOKEN = 0xC000005C
STATUS_CANT_DISABLE_MANDATORY = 0xC000005D
STATUS_NO_LOGON_SERVERS = 0xC000005E
STATUS_NO_SUCH_LOGON_SESSION = 0xC000005F
STATUS_NO_SUCH_PRIVILEGE = 0xC0000060
STATUS_PRIVILEGE_NOT_HELD = 0xC0000061
STATUS_INVALID_ACCOUNT_NAME = 0xC0000062
STATUS_USER_EXISTS = 0xC0000063
STATUS_NO_SUCH_USER = 0xC0000064
STATUS_GROUP_EXISTS = 0xC0000065
STATUS_NO_SUCH_GROUP = 0xC0000066
STATUS_MEMBER_IN_GROUP = 0xC0000067
STATUS_MEMBER_NOT_IN_GROUP = 0xC0000068
STATUS_LAST_ADMIN = 0xC0000069
STATUS_WRONG_PASSWORD = <PASSWORD>
STATUS_ILL_FORMED_PASSWORD = <PASSWORD>
STATUS_PASSWORD_RESTRICTION = 0<PASSWORD>
STATUS_LOGON_FAILURE = 0xC000006D
STATUS_ACCOUNT_RESTRICTION = 0xC000006E
STATUS_INVALID_LOGON_HOURS = 0xC000006F
STATUS_INVALID_WORKSTATION = 0xC0000070
STATUS_PASSWORD_EXPIRED = 0xC<PASSWORD>
STATUS_ACCOUNT_DISABLED = 0xC0000072
STATUS_NONE_MAPPED = 0xC0000073
STATUS_TOO_MANY_LUIDS_REQUESTED = 0xC0000074
STATUS_LUIDS_EXHAUSTED = 0xC0000075
STATUS_INVALID_SUB_AUTHORITY = 0xC0000076
STATUS_INVALID_ACL = 0xC0000077
STATUS_INVALID_SID = 0xC0000078
STATUS_INVALID_SECURITY_DESCR = 0xC0000079
STATUS_PROCEDURE_NOT_FOUND = 0xC000007A
STATUS_INVALID_IMAGE_FORMAT = 0xC000007B
STATUS_NO_TOKEN = 0xC000007C
STATUS_BAD_INHERITANCE_ACL = 0xC000007D
STATUS_RANGE_NOT_LOCKED = 0xC000007E
STATUS_DISK_FULL = 0xC000007F
STATUS_SERVER_DISABLED = 0xC0000080
STATUS_SERVER_NOT_DISABLED = 0xC0000081
STATUS_TOO_MANY_GUIDS_REQUESTED = 0xC0000082
STATUS_GUIDS_EXHAUSTED = 0xC0000083
STATUS_INVALID_ID_AUTHORITY = 0xC0000084
STATUS_AGENTS_EXHAUSTED = 0xC0000085
STATUS_INVALID_VOLUME_LABEL = 0xC0000086
STATUS_SECTION_NOT_EXTENDED = 0xC0000087
STATUS_NOT_MAPPED_DATA = 0xC0000088
STATUS_RESOURCE_DATA_NOT_FOUND = 0xC0000089
STATUS_RESOURCE_TYPE_NOT_FOUND = 0xC000008A
STATUS_RESOURCE_NAME_NOT_FOUND = 0xC000008B
STATUS_ARRAY_BOUNDS_EXCEEDED = 0xC000008C
STATUS_FLOAT_DENORMAL_OPERAND = 0xC000008D
STATUS_FLOAT_DIVIDE_BY_ZERO = 0xC000008E
STATUS_FLOAT_INEXACT_RESULT = 0xC000008F
STATUS_FLOAT_INVALID_OPERATION = 0xC0000090
STATUS_FLOAT_OVERFLOW = 0xC0000091
STATUS_FLOAT_STACK_CHECK = 0xC0000092
STATUS_FLOAT_UNDERFLOW = 0xC0000093
STATUS_INTEGER_DIVIDE_BY_ZERO = 0xC0000094
STATUS_INTEGER_OVERFLOW = 0xC0000095
STATUS_PRIVILEGED_INSTRUCTION = 0xC0000096
STATUS_TOO_MANY_PAGING_FILES = 0xC0000097
STATUS_FILE_INVALID = 0xC0000098
STATUS_ALLOTTED_SPACE_EXCEEDED = 0xC0000099
STATUS_INSUFFICIENT_RESOURCES = 0xC000009A
STATUS_DFS_EXIT_PATH_FOUND = 0xC000009B
STATUS_DEVICE_DATA_ERROR = 0xC000009C
STATUS_DEVICE_NOT_CONNECTED = 0xC000009D
STATUS_DEVICE_POWER_FAILURE = 0xC000009E
STATUS_FREE_VM_NOT_AT_BASE = 0xC000009F
STATUS_MEMORY_NOT_ALLOCATED = 0xC00000A0
STATUS_WORKING_SET_QUOTA = 0xC00000A1
STATUS_MEDIA_WRITE_PROTECTED = 0xC00000A2
STATUS_DEVICE_NOT_READY = 0xC00000A3
STATUS_INVALID_GROUP_ATTRIBUTES = 0xC00000A4
STATUS_BAD_IMPERSONATION_LEVEL = 0xC00000A5
STATUS_CANT_OPEN_ANONYMOUS = 0xC00000A6
STATUS_BAD_VALIDATION_CLASS = 0xC00000A7
STATUS_BAD_TOKEN_TYPE = 0xC00000A8
STATUS_BAD_MASTER_BOOT_RECORD = 0xC00000A9
STATUS_INSTRUCTION_MISALIGNMENT = 0xC00000AA
STATUS_INSTANCE_NOT_AVAILABLE = 0xC00000AB
STATUS_PIPE_NOT_AVAILABLE = 0xC00000AC
STATUS_INVALID_PIPE_STATE = 0xC00000AD
STATUS_PIPE_BUSY = 0xC00000AE
STATUS_ILLEGAL_FUNCTION = 0xC00000AF
STATUS_PIPE_DISCONNECTED = 0xC00000B0
STATUS_PIPE_CLOSING = 0xC00000B1
STATUS_PIPE_CONNECTED = 0xC00000B2
STATUS_PIPE_LISTENING = 0xC00000B3
STATUS_INVALID_READ_MODE = 0xC00000B4
STATUS_IO_TIMEOUT = 0xC00000B5
STATUS_FILE_FORCED_CLOSED = 0xC00000B6
STATUS_PROFILING_NOT_STARTED = 0xC00000B7
STATUS_PROFILING_NOT_STOPPED = 0xC00000B8
STATUS_COULD_NOT_INTERPRET = 0xC00000B9
STATUS_FILE_IS_A_DIRECTORY = 0xC00000BA
STATUS_NOT_SUPPORTED = 0xC00000BB
STATUS_REMOTE_NOT_LISTENING = 0xC00000BC
STATUS_DUPLICATE_NAME = 0xC00000BD
STATUS_BAD_NETWORK_PATH = 0xC00000BE
STATUS_NETWORK_BUSY = 0xC00000BF
STATUS_DEVICE_DOES_NOT_EXIST = 0xC00000C0
STATUS_TOO_MANY_COMMANDS = 0xC00000C1
STATUS_ADAPTER_HARDWARE_ERROR = 0xC00000C2
STATUS_INVALID_NETWORK_RESPONSE = 0xC00000C3
STATUS_UNEXPECTED_NETWORK_ERROR = 0xC00000C4
STATUS_BAD_REMOTE_ADAPTER = 0xC00000C5
STATUS_PRINT_QUEUE_FULL = 0xC00000C6
STATUS_NO_SPOOL_SPACE = 0xC00000C7
STATUS_PRINT_CANCELLED = 0xC00000C8
STATUS_NETWORK_NAME_DELETED = 0xC00000C9
STATUS_NETWORK_ACCESS_DENIED = 0xC00000CA
STATUS_BAD_DEVICE_TYPE = 0xC00000CB
STATUS_BAD_NETWORK_NAME = 0xC00000CC
STATUS_TOO_MANY_NAMES = 0xC00000CD
STATUS_TOO_MANY_SESSIONS = 0xC00000CE
STATUS_SHARING_PAUSED = 0xC00000CF
STATUS_REQUEST_NOT_ACCEPTED = 0xC00000D0
STATUS_REDIRECTOR_PAUSED = 0xC00000D1
STATUS_NET_WRITE_FAULT = 0xC00000D2
STATUS_PROFILING_AT_LIMIT = 0xC00000D3
STATUS_NOT_SAME_DEVICE = 0xC00000D4
STATUS_FILE_RENAMED = 0xC00000D5
STATUS_VIRTUAL_CIRCUIT_CLOSED = 0xC00000D6
STATUS_NO_SECURITY_ON_OBJECT = 0xC00000D7
STATUS_CANT_WAIT = 0xC00000D8
STATUS_PIPE_EMPTY = 0xC00000D9
STATUS_CANT_ACCESS_DOMAIN_INFO = 0xC00000DA
STATUS_CANT_TERMINATE_SELF = 0xC00000DB
STATUS_INVALID_SERVER_STATE = 0xC00000DC
STATUS_INVALID_DOMAIN_STATE = 0xC00000DD
STATUS_INVALID_DOMAIN_ROLE = 0xC00000DE
STATUS_NO_SUCH_DOMAIN = 0xC00000DF
STATUS_DOMAIN_EXISTS = 0xC00000E0
STATUS_DOMAIN_LIMIT_EXCEEDED = 0xC00000E1
STATUS_OPLOCK_NOT_GRANTED = 0xC00000E2
STATUS_INVALID_OPLOCK_PROTOCOL = 0xC00000E3
STATUS_INTERNAL_DB_CORRUPTION = 0xC00000E4
STATUS_INTERNAL_ERROR = 0xC00000E5
STATUS_GENERIC_NOT_MAPPED = 0xC00000E6
STATUS_BAD_DESCRIPTOR_FORMAT = 0xC00000E7
STATUS_INVALID_USER_BUFFER = 0xC00000E8
STATUS_UNEXPECTED_IO_ERROR = 0xC00000E9
STATUS_UNEXPECTED_MM_CREATE_ERR = 0xC00000EA
STATUS_UNEXPECTED_MM_MAP_ERROR = 0xC00000EB
STATUS_UNEXPECTED_MM_EXTEND_ERR = 0xC00000EC
STATUS_NOT_LOGON_PROCESS = 0xC00000ED
STATUS_LOGON_SESSION_EXISTS = 0xC00000EE
STATUS_INVALID_PARAMETER_1 = 0xC00000EF
STATUS_INVALID_PARAMETER_2 = 0xC00000F0
STATUS_INVALID_PARAMETER_3 = 0xC00000F1
STATUS_INVALID_PARAMETER_4 = 0xC00000F2
STATUS_INVALID_PARAMETER_5 = 0xC00000F3
STATUS_INVALID_PARAMETER_6 = 0xC00000F4
STATUS_INVALID_PARAMETER_7 = 0xC00000F5
STATUS_INVALID_PARAMETER_8 = 0xC00000F6
STATUS_INVALID_PARAMETER_9 = 0xC00000F7
STATUS_INVALID_PARAMETER_10 = 0xC00000F8
STATUS_INVALID_PARAMETER_11 = 0xC00000F9
STATUS_INVALID_PARAMETER_12 = 0xC00000FA
STATUS_REDIRECTOR_NOT_STARTED = 0xC00000FB
STATUS_REDIRECTOR_STARTED = 0xC00000FC
STATUS_STACK_OVERFLOW = 0xC00000FD
STATUS_NO_SUCH_PACKAGE = 0xC00000FE
STATUS_BAD_FUNCTION_TABLE = 0xC00000FF
STATUS_VARIABLE_NOT_FOUND = 0xC0000100
STATUS_DIRECTORY_NOT_EMPTY = 0xC0000101
STATUS_FILE_CORRUPT_ERROR = 0xC0000102
STATUS_NOT_A_DIRECTORY = 0xC0000103
STATUS_BAD_LOGON_SESSION_STATE = 0xC0000104
STATUS_LOGON_SESSION_COLLISION = 0xC0000105
STATUS_NAME_TOO_LONG = 0xC0000106
STATUS_FILES_OPEN = 0xC0000107
STATUS_CONNECTION_IN_USE = 0xC0000108
STATUS_MESSAGE_NOT_FOUND = 0xC0000109
STATUS_PROCESS_IS_TERMINATING = 0xC000010A
STATUS_INVALID_LOGON_TYPE = 0xC000010B
STATUS_NO_GUID_TRANSLATION = 0xC000010C
STATUS_CANNOT_IMPERSONATE = 0xC000010D
STATUS_IMAGE_ALREADY_LOADED = 0xC000010E
STATUS_ABIOS_NOT_PRESENT = 0xC000010F
STATUS_ABIOS_LID_NOT_EXIST = 0xC0000110
STATUS_ABIOS_LID_ALREADY_OWNED = 0xC0000111
STATUS_ABIOS_NOT_LID_OWNER = 0xC0000112
STATUS_ABIOS_INVALID_COMMAND = 0xC0000113
STATUS_ABIOS_INVALID_LID = 0xC0000114
STATUS_ABIOS_SELECTOR_NOT_AVAILABLE = 0xC0000115
STATUS_ABIOS_INVALID_SELECTOR = 0xC0000116
STATUS_NO_LDT = 0xC0000117
STATUS_INVALID_LDT_SIZE = 0xC0000118
STATUS_INVALID_LDT_OFFSET = 0xC0000119
STATUS_INVALID_LDT_DESCRIPTOR = 0xC000011A
STATUS_INVALID_IMAGE_NE_FORMAT = 0xC000011B
STATUS_RXACT_INVALID_STATE = 0xC000011C
STATUS_RXACT_COMMIT_FAILURE = 0xC000011D
STATUS_MAPPED_FILE_SIZE_ZERO = 0xC000011E
STATUS_TOO_MANY_OPENED_FILES = 0xC000011F
STATUS_CANCELLED = 0xC0000120
STATUS_CANNOT_DELETE = 0xC0000121
STATUS_INVALID_COMPUTER_NAME = 0xC0000122
STATUS_FILE_DELETED = 0xC0000123
STATUS_SPECIAL_ACCOUNT = 0xC0000124
STATUS_SPECIAL_GROUP = 0xC0000125
STATUS_SPECIAL_USER = 0xC0000126
STATUS_MEMBERS_PRIMARY_GROUP = 0xC0000127
STATUS_FILE_CLOSED = 0xC0000128
STATUS_TOO_MANY_THREADS = 0xC0000129
STATUS_THREAD_NOT_IN_PROCESS = 0xC000012A
STATUS_TOKEN_ALREADY_IN_USE = 0xC000012B
STATUS_PAGEFILE_QUOTA_EXCEEDED = 0xC000012C
STATUS_COMMITMENT_LIMIT = 0xC000012D
STATUS_INVALID_IMAGE_LE_FORMAT = 0xC000012E
STATUS_INVALID_IMAGE_NOT_MZ = 0xC000012F
STATUS_INVALID_IMAGE_PROTECT = 0xC0000130
STATUS_INVALID_IMAGE_WIN_16 = 0xC0000131
STATUS_LOGON_SERVER_CONFLICT = 0xC0000132
STATUS_TIME_DIFFERENCE_AT_DC = 0xC0000133
STATUS_SYNCHRONIZATION_REQUIRED = 0xC0000134
STATUS_DLL_NOT_FOUND = 0xC0000135
STATUS_OPEN_FAILED = 0xC0000136
STATUS_IO_PRIVILEGE_FAILED = 0xC0000137
STATUS_ORDINAL_NOT_FOUND = 0xC0000138
STATUS_ENTRYPOINT_NOT_FOUND = 0xC0000139
STATUS_CONTROL_C_EXIT = 0xC000013A
STATUS_LOCAL_DISCONNECT = 0xC000013B
STATUS_REMOTE_DISCONNECT = 0xC000013C
STATUS_REMOTE_RESOURCES = 0xC000013D
STATUS_LINK_FAILED = 0xC000013E
STATUS_LINK_TIMEOUT = 0xC000013F
STATUS_INVALID_CONNECTION = 0xC0000140
STATUS_INVALID_ADDRESS = 0xC0000141
STATUS_DLL_INIT_FAILED = 0xC0000142
STATUS_MISSING_SYSTEMFILE = 0xC0000143
STATUS_UNHANDLED_EXCEPTION = 0xC0000144
STATUS_APP_INIT_FAILURE = 0xC0000145
STATUS_PAGEFILE_CREATE_FAILED = 0xC0000146
STATUS_NO_PAGEFILE = 0xC0000147
STATUS_INVALID_LEVEL = 0xC0000148
STATUS_WRONG_PASSWORD_CORE = 0xC0000149
STATUS_ILLEGAL_FLOAT_CONTEXT = 0xC000014A
STATUS_PIPE_BROKEN = 0xC000014B
STATUS_REGISTRY_CORRUPT = 0xC000014C
STATUS_REGISTRY_IO_FAILED = 0xC000014D
STATUS_NO_EVENT_PAIR = 0xC000014E
STATUS_UNRECOGNIZED_VOLUME = 0xC000014F
STATUS_SERIAL_NO_DEVICE_INITED = 0xC0000150
STATUS_NO_SUCH_ALIAS = 0xC0000151
STATUS_MEMBER_NOT_IN_ALIAS = 0xC0000152
STATUS_MEMBER_IN_ALIAS = 0xC0000153
STATUS_ALIAS_EXISTS = 0xC0000154
STATUS_LOGON_NOT_GRANTED = 0xC0000155
STATUS_TOO_MANY_SECRETS = 0xC0000156
STATUS_SECRET_TOO_LONG = 0xC0000157
STATUS_INTERNAL_DB_ERROR = 0xC0000158
STATUS_FULLSCREEN_MODE = 0xC0000159
STATUS_TOO_MANY_CONTEXT_IDS = 0xC000015A
STATUS_LOGON_TYPE_NOT_GRANTED = 0xC000015B
STATUS_NOT_REGISTRY_FILE = 0xC000015C
STATUS_NT_CROSS_ENCRYPTION_REQUIRED = 0xC000015D
STATUS_DOMAIN_CTRLR_CONFIG_ERROR = 0xC000015E
STATUS_FT_MISSING_MEMBER = 0xC000015F
STATUS_ILL_FORMED_SERVICE_ENTRY = 0xC0000160
STATUS_ILLEGAL_CHARACTER = 0xC0000161
STATUS_UNMAPPABLE_CHARACTER = 0xC0000162
STATUS_UNDEFINED_CHARACTER = 0xC0000163
STATUS_FLOPPY_VOLUME = 0xC0000164
STATUS_FLOPPY_ID_MARK_NOT_FOUND = 0xC0000165
STATUS_FLOPPY_WRONG_CYLINDER = 0xC0000166
STATUS_FLOPPY_UNKNOWN_ERROR = 0xC0000167
STATUS_FLOPPY_BAD_REGISTERS = 0xC0000168
STATUS_DISK_RECALIBRATE_FAILED = 0xC0000169
STATUS_DISK_OPERATION_FAILED = 0xC000016A
STATUS_DISK_RESET_FAILED = 0xC000016B
STATUS_SHARED_IRQ_BUSY = 0xC000016C
STATUS_FT_ORPHANING = 0xC000016D
STATUS_BIOS_FAILED_TO_CONNECT_INTERRUPT = 0xC000016E
STATUS_PARTITION_FAILURE = 0xC0000172
STATUS_INVALID_BLOCK_LENGTH = 0xC0000173
STATUS_DEVICE_NOT_PARTITIONED = 0xC0000174
STATUS_UNABLE_TO_LOCK_MEDIA = 0xC0000175
STATUS_UNABLE_TO_UNLOAD_MEDIA = 0xC0000176
STATUS_EOM_OVERFLOW = 0xC0000177
STATUS_NO_MEDIA = 0xC0000178
STATUS_NO_SUCH_MEMBER = 0xC000017A
STATUS_INVALID_MEMBER = 0xC000017B
STATUS_KEY_DELETED = 0xC000017C
STATUS_NO_LOG_SPACE = 0xC000017D
STATUS_TOO_MANY_SIDS = 0xC000017E
STATUS_LM_CROSS_ENCRYPTION_REQUIRED = 0xC000017F
STATUS_KEY_HAS_CHILDREN = 0xC0000180
STATUS_CHILD_MUST_BE_VOLATILE = 0xC0000181
STATUS_DEVICE_CONFIGURATION_ERROR = 0xC0000182
STATUS_DRIVER_INTERNAL_ERROR = 0xC0000183
STATUS_INVALID_DEVICE_STATE = 0xC0000184
STATUS_IO_DEVICE_ERROR = 0xC0000185
STATUS_DEVICE_PROTOCOL_ERROR = 0xC0000186
STATUS_BACKUP_CONTROLLER = 0xC0000187
STATUS_LOG_FILE_FULL = 0xC0000188
STATUS_TOO_LATE = 0xC0000189
STATUS_NO_TRUST_LSA_SECRET = 0xC000018A
STATUS_NO_TRUST_SAM_ACCOUNT = 0xC000018B
STATUS_TRUSTED_DOMAIN_FAILURE = 0xC000018C
STATUS_TRUSTED_RELATIONSHIP_FAILURE = 0xC000018D
STATUS_EVENTLOG_FILE_CORRUPT = 0xC000018E
STATUS_EVENTLOG_CANT_START = 0xC000018F
STATUS_TRUST_FAILURE = 0xC0000190
STATUS_MUTANT_LIMIT_EXCEEDED = 0xC0000191
STATUS_NETLOGON_NOT_STARTED = 0xC0000192
STATUS_ACCOUNT_EXPIRED = 0xC0000193
STATUS_POSSIBLE_DEADLOCK = 0xC0000194
STATUS_NETWORK_CREDENTIAL_CONFLICT = 0xC0000195
STATUS_REMOTE_SESSION_LIMIT = 0xC0000196
STATUS_EVENTLOG_FILE_CHANGED = 0xC0000197
STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT = 0xC0000198
STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT = 0xC0000199
STATUS_NOLOGON_SERVER_TRUST_ACCOUNT = 0xC000019A
STATUS_DOMAIN_TRUST_INCONSISTENT = 0xC000019B
STATUS_FS_DRIVER_REQUIRED = 0xC000019C
STATUS_IMAGE_ALREADY_LOADED_AS_DLL = 0xC000019D
STATUS_INCOMPATIBLE_WITH_GLOBAL_SHORT_NAME_REGISTRY_SETTING = 0xC000019E
STATUS_SHORT_NAMES_NOT_ENABLED_ON_VOLUME = 0xC000019F
STATUS_SECURITY_STREAM_IS_INCONSISTENT = 0xC00001A0
STATUS_INVALID_LOCK_RANGE = 0xC00001A1
STATUS_INVALID_ACE_CONDITION = 0xC00001A2
STATUS_IMAGE_SUBSYSTEM_NOT_PRESENT = 0xC00001A3
STATUS_NOTIFICATION_GUID_ALREADY_DEFINED = 0xC00001A4
STATUS_NETWORK_OPEN_RESTRICTION = 0xC0000201
STATUS_NO_USER_SESSION_KEY = 0xC0000202
STATUS_USER_SESSION_DELETED = 0xC0000203
STATUS_RESOURCE_LANG_NOT_FOUND = 0xC0000204
STATUS_INSUFF_SERVER_RESOURCES = 0xC0000205
STATUS_INVALID_BUFFER_SIZE = 0xC0000206
STATUS_INVALID_ADDRESS_COMPONENT = 0xC0000207
STATUS_INVALID_ADDRESS_WILDCARD = 0xC0000208
STATUS_TOO_MANY_ADDRESSES = 0xC0000209
STATUS_ADDRESS_ALREADY_EXISTS = 0xC000020A
STATUS_ADDRESS_CLOSED = 0xC000020B
STATUS_CONNECTION_DISCONNECTED = 0xC000020C
STATUS_CONNECTION_RESET = 0xC000020D
STATUS_TOO_MANY_NODES = 0xC000020E
STATUS_TRANSACTION_ABORTED = 0xC000020F
STATUS_TRANSACTION_TIMED_OUT = 0xC0000210
STATUS_TRANSACTION_NO_RELEASE = 0xC0000211
STATUS_TRANSACTION_NO_MATCH = 0xC0000212
STATUS_TRANSACTION_RESPONDED = 0xC0000213
STATUS_TRANSACTION_INVALID_ID = 0xC0000214
STATUS_TRANSACTION_INVALID_TYPE = 0xC0000215
STATUS_NOT_SERVER_SESSION = 0xC0000216
STATUS_NOT_CLIENT_SESSION = 0xC0000217
STATUS_CANNOT_LOAD_REGISTRY_FILE = 0xC0000218
STATUS_DEBUG_ATTACH_FAILED = 0xC0000219
STATUS_SYSTEM_PROCESS_TERMINATED = 0xC000021A
STATUS_DATA_NOT_ACCEPTED = 0xC000021B
STATUS_NO_BROWSER_SERVERS_FOUND = 0xC000021C
STATUS_VDM_HARD_ERROR = 0xC000021D
STATUS_DRIVER_CANCEL_TIMEOUT = 0xC000021E
STATUS_REPLY_MESSAGE_MISMATCH = 0xC000021F
STATUS_MAPPED_ALIGNMENT = 0xC0000220
STATUS_IMAGE_CHECKSUM_MISMATCH = 0xC0000221
STATUS_LOST_WRITEBEHIND_DATA = 0xC0000222
STATUS_CLIENT_SERVER_PARAMETERS_INVALID = 0xC0000223
STATUS_PASSWORD_MUST_CHANGE = 0xC0000224
STATUS_NOT_FOUND = 0xC0000225
STATUS_NOT_TINY_STREAM = 0xC0000226
STATUS_RECOVERY_FAILURE = 0xC0000227
STATUS_STACK_OVERFLOW_READ = 0xC0000228
STATUS_FAIL_CHECK = 0xC0000229
STATUS_DUPLICATE_OBJECTID = 0xC000022A
STATUS_OBJECTID_EXISTS = 0xC000022B
STATUS_CONVERT_TO_LARGE = 0xC000022C
STATUS_RETRY = 0xC000022D
STATUS_FOUND_OUT_OF_SCOPE = 0xC000022E
STATUS_ALLOCATE_BUCKET = 0xC000022F
STATUS_PROPSET_NOT_FOUND = 0xC0000230
STATUS_MARSHALL_OVERFLOW = 0xC0000231
STATUS_INVALID_VARIANT = 0xC0000232
STATUS_DOMAIN_CONTROLLER_NOT_FOUND = 0xC0000233
STATUS_ACCOUNT_LOCKED_OUT = 0xC0000234
STATUS_HANDLE_NOT_CLOSABLE = 0xC0000235
STATUS_CONNECTION_REFUSED = 0xC0000236
STATUS_GRACEFUL_DISCONNECT = 0xC0000237
STATUS_ADDRESS_ALREADY_ASSOCIATED = 0xC0000238
STATUS_ADDRESS_NOT_ASSOCIATED = 0xC0000239
STATUS_CONNECTION_INVALID = 0xC000023A
STATUS_CONNECTION_ACTIVE = 0xC000023B
STATUS_NETWORK_UNREACHABLE = 0xC000023C
STATUS_HOST_UNREACHABLE = 0xC000023D
STATUS_PROTOCOL_UNREACHABLE = 0xC000023E
STATUS_PORT_UNREACHABLE = 0xC000023F
STATUS_REQUEST_ABORTED = 0xC0000240
STATUS_CONNECTION_ABORTED = 0xC0000241
STATUS_BAD_COMPRESSION_BUFFER = 0xC0000242
STATUS_USER_MAPPED_FILE = 0xC0000243
STATUS_AUDIT_FAILED = 0xC0000244
STATUS_TIMER_RESOLUTION_NOT_SET = 0xC0000245
STATUS_CONNECTION_COUNT_LIMIT = 0xC0000246
STATUS_LOGIN_TIME_RESTRICTION = 0xC0000247
STATUS_LOGIN_WKSTA_RESTRICTION = 0xC0000248
STATUS_IMAGE_MP_UP_MISMATCH = 0xC0000249
STATUS_INSUFFICIENT_LOGON_INFO = 0xC0000250
STATUS_BAD_DLL_ENTRYPOINT = 0xC0000251
STATUS_BAD_SERVICE_ENTRYPOINT = 0xC0000252
STATUS_LPC_REPLY_LOST = 0xC0000253
STATUS_IP_ADDRESS_CONFLICT1 = 0xC0000254
STATUS_IP_ADDRESS_CONFLICT2 = 0xC0000255
STATUS_REGISTRY_QUOTA_LIMIT = 0xC0000256
STATUS_PATH_NOT_COVERED = 0xC0000257
STATUS_NO_CALLBACK_ACTIVE = 0xC0000258
STATUS_LICENSE_QUOTA_EXCEEDED = 0xC0000259
STATUS_PWD_TOO_SHORT = 0xC000025A
STATUS_PWD_TOO_RECENT = 0xC000025B
STATUS_PWD_HISTORY_CONFLICT = 0xC000025C
STATUS_PLUGPLAY_NO_DEVICE = 0xC000025E
STATUS_UNSUPPORTED_COMPRESSION = 0xC000025F
STATUS_INVALID_HW_PROFILE = 0xC0000260
STATUS_INVALID_PLUGPLAY_DEVICE_PATH = 0xC0000261
STATUS_DRIVER_ORDINAL_NOT_FOUND = 0xC0000262
STATUS_DRIVER_ENTRYPOINT_NOT_FOUND = 0xC0000263
STATUS_RESOURCE_NOT_OWNED = 0xC0000264
STATUS_TOO_MANY_LINKS = 0xC0000265
STATUS_QUOTA_LIST_INCONSISTENT = 0xC0000266
STATUS_FILE_IS_OFFLINE = 0xC0000267
STATUS_EVALUATION_EXPIRATION = 0xC0000268
STATUS_ILLEGAL_DLL_RELOCATION = 0xC0000269
STATUS_LICENSE_VIOLATION = 0xC000026A
STATUS_DLL_INIT_FAILED_LOGOFF = 0xC000026B
STATUS_DRIVER_UNABLE_TO_LOAD = 0xC000026C
STATUS_DFS_UNAVAILABLE = 0xC000026D
STATUS_VOLUME_DISMOUNTED = 0xC000026E
STATUS_WX86_INTERNAL_ERROR = 0xC000026F
STATUS_WX86_FLOAT_STACK_CHECK = 0xC0000270
STATUS_VALIDATE_CONTINUE = 0xC0000271
STATUS_NO_MATCH = 0xC0000272
STATUS_NO_MORE_MATCHES = 0xC0000273
STATUS_NOT_A_REPARSE_POINT = 0xC0000275
STATUS_IO_REPARSE_TAG_INVALID = 0xC0000276
STATUS_IO_REPARSE_TAG_MISMATCH = 0xC0000277
STATUS_IO_REPARSE_DATA_INVALID = 0xC0000278
STATUS_IO_REPARSE_TAG_NOT_HANDLED = 0xC0000279
STATUS_REPARSE_POINT_NOT_RESOLVED = 0xC0000280
STATUS_DIRECTORY_IS_A_REPARSE_POINT = 0xC0000281
STATUS_RANGE_LIST_CONFLICT = 0xC0000282
STATUS_SOURCE_ELEMENT_EMPTY = 0xC0000283
STATUS_DESTINATION_ELEMENT_FULL = 0xC0000284
STATUS_ILLEGAL_ELEMENT_ADDRESS = 0xC0000285
STATUS_MAGAZINE_NOT_PRESENT = 0xC0000286
STATUS_REINITIALIZATION_NEEDED = 0xC0000287
STATUS_DEVICE_REQUIRES_CLEANING = 0x80000288
STATUS_DEVICE_DOOR_OPEN = 0x80000289
STATUS_ENCRYPTION_FAILED = 0xC000028A
STATUS_DECRYPTION_FAILED = 0xC000028B
STATUS_RANGE_NOT_FOUND = 0xC000028C
STATUS_NO_RECOVERY_POLICY = 0xC000028D
STATUS_NO_EFS = 0xC000028E
STATUS_WRONG_EFS = 0xC000028F
STATUS_NO_USER_KEYS = 0xC0000290
STATUS_FILE_NOT_ENCRYPTED = 0xC0000291
STATUS_NOT_EXPORT_FORMAT = 0xC0000292
STATUS_FILE_ENCRYPTED = 0xC0000293
STATUS_WAKE_SYSTEM = 0x40000294
STATUS_WMI_GUID_NOT_FOUND = 0xC0000295
STATUS_WMI_INSTANCE_NOT_FOUND = 0xC0000296
STATUS_WMI_ITEMID_NOT_FOUND = 0xC0000297
STATUS_WMI_TRY_AGAIN = 0xC0000298
STATUS_SHARED_POLICY = 0xC0000299
STATUS_POLICY_OBJECT_NOT_FOUND = 0xC000029A
STATUS_POLICY_ONLY_IN_DS = 0xC000029B
STATUS_VOLUME_NOT_UPGRADED = 0xC000029C
STATUS_REMOTE_STORAGE_NOT_ACTIVE = 0xC000029D
STATUS_REMOTE_STORAGE_MEDIA_ERROR = 0xC000029E
STATUS_NO_TRACKING_SERVICE = 0xC000029F
STATUS_SERVER_SID_MISMATCH = 0xC00002A0
STATUS_DS_NO_ATTRIBUTE_OR_VALUE = 0xC00002A1
STATUS_DS_INVALID_ATTRIBUTE_SYNTAX = 0xC00002A2
STATUS_DS_ATTRIBUTE_TYPE_UNDEFINED = 0xC00002A3
STATUS_DS_ATTRIBUTE_OR_VALUE_EXISTS = 0xC00002A4
STATUS_DS_BUSY = 0xC00002A5
STATUS_DS_UNAVAILABLE = 0xC00002A6
STATUS_DS_NO_RIDS_ALLOCATED = 0xC00002A7
STATUS_DS_NO_MORE_RIDS = 0xC00002A8
STATUS_DS_INCORRECT_ROLE_OWNER = 0xC00002A9
STATUS_DS_RIDMGR_INIT_ERROR = 0xC00002AA
STATUS_DS_OBJ_CLASS_VIOLATION = 0xC00002AB
STATUS_DS_CANT_ON_NON_LEAF = 0xC00002AC
STATUS_DS_CANT_ON_RDN = 0xC00002AD
STATUS_DS_CANT_MOD_OBJ_CLASS = 0xC00002AE
STATUS_DS_CROSS_DOM_MOVE_FAILED = 0xC00002AF
STATUS_DS_GC_NOT_AVAILABLE = 0xC00002B0
STATUS_DIRECTORY_SERVICE_REQUIRED = 0xC00002B1
STATUS_REPARSE_ATTRIBUTE_CONFLICT = 0xC00002B2
STATUS_CANT_ENABLE_DENY_ONLY = 0xC00002B3
STATUS_FLOAT_MULTIPLE_FAULTS = 0xC00002B4
STATUS_FLOAT_MULTIPLE_TRAPS = 0xC00002B5
STATUS_DEVICE_REMOVED = 0xC00002B6
STATUS_JOURNAL_DELETE_IN_PROGRESS = 0xC00002B7
STATUS_JOURNAL_NOT_ACTIVE = 0xC00002B8
STATUS_NOINTERFACE = 0xC00002B9
STATUS_DS_ADMIN_LIMIT_EXCEEDED = 0xC00002C1
STATUS_DRIVER_FAILED_SLEEP = 0xC00002C2
STATUS_MUTUAL_AUTHENTICATION_FAILED = 0xC00002C3
STATUS_CORRUPT_SYSTEM_FILE = 0xC00002C4
STATUS_DATATYPE_MISALIGNMENT_ERROR = 0xC00002C5
STATUS_WMI_READ_ONLY = 0xC00002C6
STATUS_WMI_SET_FAILURE = 0xC00002C7
STATUS_COMMITMENT_MINIMUM = 0xC00002C8
STATUS_REG_NAT_CONSUMPTION = 0xC00002C9
STATUS_TRANSPORT_FULL = 0xC00002CA
STATUS_DS_SAM_INIT_FAILURE = 0xC00002CB
STATUS_ONLY_IF_CONNECTED = 0xC00002CC
STATUS_DS_SENSITIVE_GROUP_VIOLATION = 0xC00002CD
STATUS_PNP_RESTART_ENUMERATION = 0xC00002CE
STATUS_JOURNAL_ENTRY_DELETED = 0xC00002CF
STATUS_DS_CANT_MOD_PRIMARYGROUPID = 0xC00002D0
STATUS_SYSTEM_IMAGE_BAD_SIGNATURE = 0xC00002D1
STATUS_PNP_REBOOT_REQUIRED = 0xC00002D2
STATUS_POWER_STATE_INVALID = 0xC00002D3
STATUS_DS_INVALID_GROUP_TYPE = 0xC00002D4
STATUS_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN = 0xC00002D5
STATUS_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN = 0xC00002D6
STATUS_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER = 0xC00002D7
STATUS_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER = 0xC00002D8
STATUS_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER = 0xC00002D9
STATUS_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER = 0xC00002DA
STATUS_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER = 0xC00002DB
STATUS_DS_HAVE_PRIMARY_MEMBERS = 0xC00002DC
STATUS_WMI_NOT_SUPPORTED = 0xC00002DD
STATUS_INSUFFICIENT_POWER = 0xC00002DE
STATUS_SAM_NEED_BOOTKEY_PASSWORD = <PASSWORD>
STATUS_SAM_NEED_BOOTKEY_FLOPPY = 0xC00002E0
STATUS_DS_CANT_START = 0xC00002E1
STATUS_DS_INIT_FAILURE = 0xC00002E2
STATUS_SAM_INIT_FAILURE = 0xC00002E3
STATUS_DS_GC_REQUIRED = 0xC00002E4
STATUS_DS_LOCAL_MEMBER_OF_LOCAL_ONLY = 0xC00002E5
STATUS_DS_NO_FPO_IN_UNIVERSAL_GROUPS = 0xC00002E6
STATUS_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED = 0xC00002E7
STATUS_MULTIPLE_FAULT_VIOLATION = 0xC00002E8
STATUS_CURRENT_DOMAIN_NOT_ALLOWED = 0xC00002E9
STATUS_CANNOT_MAKE = 0xC00002EA
STATUS_SYSTEM_SHUTDOWN = 0xC00002EB
STATUS_DS_INIT_FAILURE_CONSOLE = 0xC00002EC
STATUS_DS_SAM_INIT_FAILURE_CONSOLE = 0xC00002ED
STATUS_UNFINISHED_CONTEXT_DELETED = 0xC00002EE
STATUS_NO_TGT_REPLY = 0xC00002EF
STATUS_OBJECTID_NOT_FOUND = 0xC00002F0
STATUS_NO_IP_ADDRESSES = 0xC00002F1
STATUS_WRONG_CREDENTIAL_HANDLE = 0xC00002F2
STATUS_CRYPTO_SYSTEM_INVALID = 0xC00002F3
STATUS_MAX_REFERRALS_EXCEEDED = 0xC00002F4
STATUS_MUST_BE_KDC = 0xC00002F5
STATUS_STRONG_CRYPTO_NOT_SUPPORTED = 0xC00002F6
STATUS_TOO_MANY_PRINCIPALS = 0xC00002F7
STATUS_NO_PA_DATA = 0xC00002F8
STATUS_PKINIT_NAME_MISMATCH = 0xC00002F9
STATUS_SMARTCARD_LOGON_REQUIRED = 0xC00002FA
STATUS_KDC_INVALID_REQUEST = 0xC00002FB
STATUS_KDC_UNABLE_TO_REFER = 0xC00002FC
STATUS_KDC_UNKNOWN_ETYPE = 0xC00002FD
STATUS_SHUTDOWN_IN_PROGRESS = 0xC00002FE
STATUS_SERVER_SHUTDOWN_IN_PROGRESS = 0xC00002FF
STATUS_NOT_SUPPORTED_ON_SBS = 0xC0000300
STATUS_WMI_GUID_DISCONNECTED = 0xC0000301
STATUS_WMI_ALREADY_DISABLED = 0xC0000302
STATUS_WMI_ALREADY_ENABLED = 0xC0000303
STATUS_MFT_TOO_FRAGMENTED = 0xC0000304
STATUS_COPY_PROTECTION_FAILURE = 0xC0000305
STATUS_CSS_AUTHENTICATION_FAILURE = 0xC0000306
STATUS_CSS_KEY_NOT_PRESENT = 0xC0000307
STATUS_CSS_KEY_NOT_ESTABLISHED = 0xC0000308
STATUS_CSS_SCRAMBLED_SECTOR = 0xC0000309
STATUS_CSS_REGION_MISMATCH = 0xC000030A
STATUS_CSS_RESETS_EXHAUSTED = 0xC000030B
STATUS_PKINIT_FAILURE = 0xC0000320
STATUS_SMARTCARD_SUBSYSTEM_FAILURE = 0xC0000321
STATUS_NO_KERB_KEY = 0xC0000322
STATUS_HOST_DOWN = 0xC0000350
STATUS_UNSUPPORTED_PREAUTH = 0xC0000351
STATUS_EFS_ALG_BLOB_TOO_BIG = 0xC0000352
STATUS_PORT_NOT_SET = 0xC0000353
STATUS_DEBUGGER_INACTIVE = 0xC0000354
STATUS_DS_VERSION_CHECK_FAILURE = 0xC0000355
STATUS_AUDITING_DISABLED = 0xC0000356
STATUS_PRENT4_MACHINE_ACCOUNT = 0xC0000357
STATUS_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER = 0xC0000358
STATUS_INVALID_IMAGE_WIN_32 = 0xC0000359
STATUS_INVALID_IMAGE_WIN_64 = 0xC000035A
STATUS_BAD_BINDINGS = 0xC000035B
STATUS_NETWORK_SESSION_EXPIRED = 0xC000035C
STATUS_APPHELP_BLOCK = 0xC000035D
STATUS_ALL_SIDS_FILTERED = 0xC000035E
STATUS_NOT_SAFE_MODE_DRIVER = 0xC000035F
STATUS_ACCESS_DISABLED_BY_POLICY_DEFAULT = 0xC0000361
STATUS_ACCESS_DISABLED_BY_POLICY_PATH = 0xC0000362
STATUS_ACCESS_DISABLED_BY_POLICY_PUBLISHER = 0xC0000363
STATUS_ACCESS_DISABLED_BY_POLICY_OTHER = 0xC0000364
STATUS_FAILED_DRIVER_ENTRY = 0xC0000365
STATUS_DEVICE_ENUMERATION_ERROR = 0xC0000366
STATUS_MOUNT_POINT_NOT_RESOLVED = 0xC0000368
STATUS_INVALID_DEVICE_OBJECT_PARAMETER = 0xC0000369
STATUS_MCA_OCCURED = 0xC000036A
STATUS_DRIVER_BLOCKED_CRITICAL = 0xC000036B
STATUS_DRIVER_BLOCKED = 0xC000036C
STATUS_DRIVER_DATABASE_ERROR = 0xC000036D
STATUS_SYSTEM_HIVE_TOO_LARGE = 0xC000036E
STATUS_INVALID_IMPORT_OF_NON_DLL = 0xC000036F
STATUS_DS_SHUTTING_DOWN = 0x40000370
STATUS_NO_SECRETS = 0xC0000371
STATUS_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY = 0xC0000372
STATUS_FAILED_STACK_SWITCH = 0xC0000373
STATUS_HEAP_CORRUPTION = 0xC0000374
STATUS_SMARTCARD_WRONG_PIN = 0xC0000380
STATUS_SMARTCARD_CARD_BLOCKED = 0xC0000381
STATUS_SMARTCARD_CARD_NOT_AUTHENTICATED = 0xC0000382
STATUS_SMARTCARD_NO_CARD = 0xC0000383
STATUS_SMARTCARD_NO_KEY_CONTAINER = 0xC0000384
STATUS_SMARTCARD_NO_CERTIFICATE = 0xC0000385
STATUS_SMARTCARD_NO_KEYSET = 0xC0000386
STATUS_SMARTCARD_IO_ERROR = 0xC0000387
STATUS_DOWNGRADE_DETECTED = 0xC0000388
STATUS_SMARTCARD_CERT_REVOKED = 0xC0000389
STATUS_ISSUING_CA_UNTRUSTED = 0xC000038A
STATUS_REVOCATION_OFFLINE_C = 0xC000038B
STATUS_PKINIT_CLIENT_FAILURE = 0xC000038C
STATUS_SMARTCARD_CERT_EXPIRED = 0xC000038D
STATUS_DRIVER_FAILED_PRIOR_UNLOAD = 0xC000038E
STATUS_SMARTCARD_SILENT_CONTEXT = 0xC000038F
STATUS_PER_USER_TRUST_QUOTA_EXCEEDED = 0xC0000401
STATUS_ALL_USER_TRUST_QUOTA_EXCEEDED = 0xC0000402
STATUS_USER_DELETE_TRUST_QUOTA_EXCEEDED = 0xC0000403
STATUS_DS_NAME_NOT_UNIQUE = 0xC0000404
STATUS_DS_DUPLICATE_ID_FOUND = 0xC0000405
STATUS_DS_GROUP_CONVERSION_ERROR = 0xC0000406
STATUS_VOLSNAP_PREPARE_HIBERNATE = 0xC0000407
STATUS_USER2USER_REQUIRED = 0xC0000408
STATUS_STACK_BUFFER_OVERRUN = 0xC0000409
STATUS_NO_S4U_PROT_SUPPORT = 0xC000040A
STATUS_CROSSREALM_DELEGATION_FAILURE = 0xC000040B
STATUS_REVOCATION_OFFLINE_KDC = 0xC000040C
STATUS_ISSUING_CA_UNTRUSTED_KDC = 0xC000040D
STATUS_KDC_CERT_EXPIRED = 0xC000040E
STATUS_KDC_CERT_REVOKED = 0xC000040F
STATUS_PARAMETER_QUOTA_EXCEEDED = 0xC0000410
STATUS_HIBERNATION_FAILURE = 0xC0000411
STATUS_DELAY_LOAD_FAILED = 0xC0000412
STATUS_AUTHENTICATION_FIREWALL_FAILED = 0xC0000413
STATUS_VDM_DISALLOWED = 0xC0000414
STATUS_HUNG_DISPLAY_DRIVER_THREAD = 0xC0000415
STATUS_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE = 0xC0000416
STATUS_INVALID_CRUNTIME_PARAMETER = 0xC0000417
STATUS_NTLM_BLOCKED = 0xC0000418
STATUS_DS_SRC_SID_EXISTS_IN_FOREST = 0xC0000419
STATUS_DS_DOMAIN_NAME_EXISTS_IN_FOREST = 0xC000041A
STATUS_DS_FLAT_NAME_EXISTS_IN_FOREST = 0xC000041B
STATUS_INVALID_USER_PRINCIPAL_NAME = 0xC000041C
STATUS_FATAL_USER_CALLBACK_EXCEPTION = 0xC000041D
STATUS_ASSERTION_FAILURE = 0xC0000420
STATUS_VERIFIER_STOP = 0xC0000421
STATUS_CALLBACK_POP_STACK = 0xC0000423
STATUS_INCOMPATIBLE_DRIVER_BLOCKED = 0xC0000424
STATUS_HIVE_UNLOADED = 0xC0000425
STATUS_COMPRESSION_DISABLED = 0xC0000426
STATUS_FILE_SYSTEM_LIMITATION = 0xC0000427
STATUS_INVALID_IMAGE_HASH = 0xC0000428
STATUS_NOT_CAPABLE = 0xC0000429
STATUS_REQUEST_OUT_OF_SEQUENCE = 0xC000042A
STATUS_IMPLEMENTATION_LIMIT = 0xC000042B
STATUS_ELEVATION_REQUIRED = 0xC000042C
STATUS_NO_SECURITY_CONTEXT = 0xC000042D
STATUS_PKU2U_CERT_FAILURE = 0xC000042F
STATUS_BEYOND_VDL = 0xC0000432
STATUS_ENCOUNTERED_WRITE_IN_PROGRESS = 0xC0000433
STATUS_PTE_CHANGED = 0xC0000434
STATUS_PURGE_FAILED = 0xC0000435
STATUS_CRED_REQUIRES_CONFIRMATION = 0xC0000440
STATUS_CS_ENCRYPTION_INVALID_SERVER_RESPONSE = 0xC0000441
STATUS_CS_ENCRYPTION_UNSUPPORTED_SERVER = 0xC0000442
STATUS_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE = 0xC0000443
STATUS_CS_ENCRYPTION_NEW_ENCRYPTED_FILE = 0xC0000444
STATUS_CS_ENCRYPTION_FILE_NOT_CSE = 0xC0000445
STATUS_INVALID_LABEL = 0xC0000446
STATUS_DRIVER_PROCESS_TERMINATED = 0xC0000450
STATUS_AMBIGUOUS_SYSTEM_DEVICE = 0xC0000451
STATUS_SYSTEM_DEVICE_NOT_FOUND = 0xC0000452
STATUS_RESTART_BOOT_APPLICATION = 0xC0000453
STATUS_INSUFFICIENT_NVRAM_RESOURCES = 0xC0000454
STATUS_INVALID_TASK_NAME = 0xC0000500
STATUS_INVALID_TASK_INDEX = 0xC0000501
STATUS_THREAD_ALREADY_IN_TASK = 0xC0000502
STATUS_CALLBACK_BYPASS = 0xC0000503
STATUS_FAIL_FAST_EXCEPTION = 0xC0000602
STATUS_IMAGE_CERT_REVOKED = 0xC0000603
STATUS_PORT_CLOSED = 0xC0000700
STATUS_MESSAGE_LOST = 0xC0000701
STATUS_INVALID_MESSAGE = 0xC0000702
STATUS_REQUEST_CANCELED = 0xC0000703
STATUS_RECURSIVE_DISPATCH = 0xC0000704
STATUS_LPC_RECEIVE_BUFFER_EXPECTED = 0xC0000705
STATUS_LPC_INVALID_CONNECTION_USAGE = 0xC0000706
STATUS_LPC_REQUESTS_NOT_ALLOWED = 0xC0000707
STATUS_RESOURCE_IN_USE = 0xC0000708
STATUS_HARDWARE_MEMORY_ERROR = 0xC0000709
STATUS_THREADPOOL_HANDLE_EXCEPTION = 0xC000070A
STATUS_THREADPOOL_SET_EVENT_ON_COMPLETION_FAILED = 0xC000070B
STATUS_THREADPOOL_RELEASE_SEMAPHORE_ON_COMPLETION_FAILED = 0xC000070C
STATUS_THREADPOOL_RELEASE_MUTEX_ON_COMPLETION_FAILED = 0xC000070D
STATUS_THREADPOOL_FREE_LIBRARY_ON_COMPLETION_FAILED = 0xC000070E
STATUS_THREADPOOL_RELEASED_DURING_OPERATION = 0xC000070F
STATUS_CALLBACK_RETURNED_WHILE_IMPERSONATING = 0xC0000710
STATUS_APC_RETURNED_WHILE_IMPERSONATING = 0xC0000711
STATUS_PROCESS_IS_PROTECTED = 0xC0000712
STATUS_MCA_EXCEPTION = 0xC0000713
STATUS_CERTIFICATE_MAPPING_NOT_UNIQUE = 0xC0000714
STATUS_SYMLINK_CLASS_DISABLED = 0xC0000715
STATUS_INVALID_IDN_NORMALIZATION = 0xC0000716
STATUS_NO_UNICODE_TRANSLATION = 0xC0000717
STATUS_ALREADY_REGISTERED = 0xC0000718
STATUS_CONTEXT_MISMATCH = 0xC0000719
STATUS_PORT_ALREADY_HAS_COMPLETION_LIST = 0xC000071A
STATUS_CALLBACK_RETURNED_THREAD_PRIORITY = 0xC000071B
STATUS_INVALID_THREAD = 0xC000071C
STATUS_CALLBACK_RETURNED_TRANSACTION = 0xC000071D
STATUS_CALLBACK_RETURNED_LDR_LOCK = 0xC000071E
STATUS_CALLBACK_RETURNED_LANG = 0xC000071F
STATUS_CALLBACK_RETURNED_PRI_BACK = 0xC0000720
STATUS_CALLBACK_RETURNED_THREAD_AFFINITY = 0xC0000721
STATUS_DISK_REPAIR_DISABLED = 0xC0000800
STATUS_DS_DOMAIN_RENAME_IN_PROGRESS = 0xC0000801
STATUS_DISK_QUOTA_EXCEEDED = 0xC0000802
STATUS_DATA_LOST_REPAIR = 0x80000803
STATUS_CONTENT_BLOCKED = 0xC0000804
STATUS_BAD_CLUSTERS = 0xC0000805
STATUS_VOLUME_DIRTY = 0xC0000806
STATUS_FILE_CHECKED_OUT = 0xC0000901
STATUS_CHECKOUT_REQUIRED = 0xC0000902
STATUS_BAD_FILE_TYPE = 0xC0000903
STATUS_FILE_TOO_LARGE = 0xC0000904
STATUS_FORMS_AUTH_REQUIRED = 0xC0000905
STATUS_VIRUS_INFECTED = 0xC0000906
STATUS_VIRUS_DELETED = 0xC0000907
STATUS_BAD_MCFG_TABLE = 0xC0000908
STATUS_CANNOT_BREAK_OPLOCK = 0xC0000909
STATUS_WOW_ASSERTION = 0xC0009898
STATUS_INVALID_SIGNATURE = 0xC000A000
STATUS_HMAC_NOT_SUPPORTED = 0xC000A001
STATUS_AUTH_TAG_MISMATCH = 0xC000A002
STATUS_IPSEC_QUEUE_OVERFLOW = 0xC000A010
STATUS_ND_QUEUE_OVERFLOW = 0xC000A011
STATUS_HOPLIMIT_EXCEEDED = 0xC000A012
STATUS_PROTOCOL_NOT_SUPPORTED = 0xC000A013
STATUS_FASTPATH_REJECTED = 0xC000A014
STATUS_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED = 0xC000A080
STATUS_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR = 0xC000A081
STATUS_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR = 0xC000A082
STATUS_XML_PARSE_ERROR = 0xC000A083
STATUS_XMLDSIG_ERROR = 0xC000A084
STATUS_WRONG_COMPARTMENT = 0xC000A085
STATUS_AUTHIP_FAILURE = 0xC000A086
STATUS_DS_OID_MAPPED_GROUP_CANT_HAVE_MEMBERS = 0xC000A087
STATUS_DS_OID_NOT_FOUND = 0xC000A088
STATUS_HASH_NOT_SUPPORTED = 0xC000A100
STATUS_HASH_NOT_PRESENT = 0xC000A101
STATUS_ACPI_INVALID_OPCODE = 0xC0140001
STATUS_ACPI_STACK_OVERFLOW = 0xC0140002
STATUS_ACPI_ASSERT_FAILED = 0xC0140003
STATUS_ACPI_INVALID_INDEX = 0xC0140004
STATUS_ACPI_INVALID_ARGUMENT = 0xC0140005
STATUS_ACPI_FATAL = 0xC0140006
STATUS_ACPI_INVALID_SUPERNAME = 0xC0140007
STATUS_ACPI_INVALID_ARGTYPE = 0xC0140008
STATUS_ACPI_INVALID_OBJTYPE = 0xC0140009
STATUS_ACPI_INVALID_TARGETTYPE = 0xC014000A
STATUS_ACPI_INCORRECT_ARGUMENT_COUNT = 0xC014000B
STATUS_ACPI_ADDRESS_NOT_MAPPED = 0xC014000C
STATUS_ACPI_INVALID_EVENTTYPE = 0xC014000D
STATUS_ACPI_HANDLER_COLLISION = 0xC014000E
STATUS_ACPI_INVALID_DATA = 0xC014000F
STATUS_ACPI_INVALID_REGION = 0xC0140010
STATUS_ACPI_INVALID_ACCESS_SIZE = 0xC0140011
STATUS_ACPI_ACQUIRE_GLOBAL_LOCK = 0xC0140012
STATUS_ACPI_ALREADY_INITIALIZED = 0xC0140013
STATUS_ACPI_NOT_INITIALIZED = 0xC0140014
STATUS_ACPI_INVALID_MUTEX_LEVEL = 0xC0140015
STATUS_ACPI_MUTEX_NOT_OWNED = 0xC0140016
STATUS_ACPI_MUTEX_NOT_OWNER = 0xC0140017
STATUS_ACPI_RS_ACCESS = 0xC0140018
STATUS_ACPI_INVALID_TABLE = 0xC0140019
STATUS_ACPI_REG_HANDLER_FAILED = 0xC0140020
STATUS_ACPI_POWER_REQUEST_FAILED = 0xC0140021
STATUS_CTX_WINSTATION_NAME_INVALID = 0xC00A0001
STATUS_CTX_INVALID_PD = 0xC00A0002
STATUS_CTX_PD_NOT_FOUND = 0xC00A0003
STATUS_CTX_CDM_CONNECT = 0x400A0004
STATUS_CTX_CDM_DISCONNECT = 0x400A0005
STATUS_CTX_CLOSE_PENDING = 0xC00A0006
STATUS_CTX_NO_OUTBUF = 0xC00A0007
STATUS_CTX_MODEM_INF_NOT_FOUND = 0xC00A0008
STATUS_CTX_INVALID_MODEMNAME = 0xC00A0009
STATUS_CTX_RESPONSE_ERROR = 0xC00A000A
STATUS_CTX_MODEM_RESPONSE_TIMEOUT = 0xC00A000B
STATUS_CTX_MODEM_RESPONSE_NO_CARRIER = 0xC00A000C
STATUS_CTX_MODEM_RESPONSE_NO_DIALTONE = 0xC00A000D
STATUS_CTX_MODEM_RESPONSE_BUSY = 0xC00A000E
STATUS_CTX_MODEM_RESPONSE_VOICE = 0xC00A000F
STATUS_CTX_TD_ERROR = 0xC00A0010
STATUS_CTX_LICENSE_CLIENT_INVALID = 0xC00A0012
STATUS_CTX_LICENSE_NOT_AVAILABLE = 0xC00A0013
STATUS_CTX_LICENSE_EXPIRED = 0xC00A0014
STATUS_CTX_WINSTATION_NOT_FOUND = 0xC00A0015
STATUS_CTX_WINSTATION_NAME_COLLISION = 0xC00A0016
STATUS_CTX_WINSTATION_BUSY = 0xC00A0017
STATUS_CTX_BAD_VIDEO_MODE = 0xC00A0018
STATUS_CTX_GRAPHICS_INVALID = 0xC00A0022
STATUS_CTX_NOT_CONSOLE = 0xC00A0024
STATUS_CTX_CLIENT_QUERY_TIMEOUT = 0xC00A0026
STATUS_CTX_CONSOLE_DISCONNECT = 0xC00A0027
STATUS_CTX_CONSOLE_CONNECT = 0xC00A0028
STATUS_CTX_SHADOW_DENIED = 0xC00A002A
STATUS_CTX_WINSTATION_ACCESS_DENIED = 0xC00A002B
STATUS_CTX_INVALID_WD = 0xC00A002E
STATUS_CTX_WD_NOT_FOUND = 0xC00A002F
STATUS_CTX_SHADOW_INVALID = 0xC00A0030
STATUS_CTX_SHADOW_DISABLED = 0xC00A0031
STATUS_RDP_PROTOCOL_ERROR = 0xC00A0032
STATUS_CTX_CLIENT_LICENSE_NOT_SET = 0xC00A0033
STATUS_CTX_CLIENT_LICENSE_IN_USE = 0xC00A0034
STATUS_CTX_SHADOW_ENDED_BY_MODE_CHANGE = 0xC00A0035
STATUS_CTX_SHADOW_NOT_RUNNING = 0xC00A0036
STATUS_CTX_LOGON_DISABLED = 0xC00A0037
STATUS_CTX_SECURITY_LAYER_ERROR = 0xC00A0038
STATUS_TS_INCOMPATIBLE_SESSIONS = 0xC00A0039
STATUS_TS_VIDEO_SUBSYSTEM_ERROR = 0xC00A003A
STATUS_PNP_BAD_MPS_TABLE = 0xC0040035
STATUS_PNP_TRANSLATION_FAILED = 0xC0040036
STATUS_PNP_IRQ_TRANSLATION_FAILED = 0xC0040037
STATUS_PNP_INVALID_ID = 0xC0040038
STATUS_IO_REISSUE_AS_CACHED = 0xC0040039
STATUS_MUI_FILE_NOT_FOUND = 0xC00B0001
STATUS_MUI_INVALID_FILE = 0xC00B0002
STATUS_MUI_INVALID_RC_CONFIG = 0xC00B0003
STATUS_MUI_INVALID_LOCALE_NAME = 0xC00B0004
STATUS_MUI_INVALID_ULTIMATEFALLBACK_NAME = 0xC00B0005
STATUS_MUI_FILE_NOT_LOADED = 0xC00B0006
STATUS_RESOURCE_ENUM_USER_STOP = 0xC00B0007
STATUS_FLT_NO_HANDLER_DEFINED = 0xC01C0001
STATUS_FLT_CONTEXT_ALREADY_DEFINED = 0xC01C0002
STATUS_FLT_INVALID_ASYNCHRONOUS_REQUEST = 0xC01C0003
STATUS_FLT_DISALLOW_FAST_IO = 0xC01C0004
STATUS_FLT_INVALID_NAME_REQUEST = 0xC01C0005
STATUS_FLT_NOT_SAFE_TO_POST_OPERATION = 0xC01C0006
STATUS_FLT_NOT_INITIALIZED = 0xC01C0007
STATUS_FLT_FILTER_NOT_READY = 0xC01C0008
STATUS_FLT_POST_OPERATION_CLEANUP = 0xC01C0009
STATUS_FLT_INTERNAL_ERROR = 0xC01C000A
STATUS_FLT_DELETING_OBJECT = 0xC01C000B
STATUS_FLT_MUST_BE_NONPAGED_POOL = 0xC01C000C
STATUS_FLT_DUPLICATE_ENTRY = 0xC01C000D
STATUS_FLT_CBDQ_DISABLED = 0xC01C000E
STATUS_FLT_DO_NOT_ATTACH = 0xC01C000F
STATUS_FLT_DO_NOT_DETACH = 0xC01C0010
STATUS_FLT_INSTANCE_ALTITUDE_COLLISION = 0xC01C0011
STATUS_FLT_INSTANCE_NAME_COLLISION = 0xC01C0012
STATUS_FLT_FILTER_NOT_FOUND = 0xC01C0013
STATUS_FLT_VOLUME_NOT_FOUND = 0xC01C0014
STATUS_FLT_INSTANCE_NOT_FOUND = 0xC01C0015
STATUS_FLT_CONTEXT_ALLOCATION_NOT_FOUND = 0xC01C0016
STATUS_FLT_INVALID_CONTEXT_REGISTRATION = 0xC01C0017
STATUS_FLT_NAME_CACHE_MISS = 0xC01C0018
STATUS_FLT_NO_DEVICE_OBJECT = 0xC01C0019
STATUS_FLT_VOLUME_ALREADY_MOUNTED = 0xC01C001A
STATUS_FLT_ALREADY_ENLISTED = 0xC01C001B
STATUS_FLT_CONTEXT_ALREADY_LINKED = 0xC01C001C
STATUS_FLT_NO_WAITER_FOR_REPLY = 0xC01C0020
STATUS_SXS_SECTION_NOT_FOUND = 0xC0150001
STATUS_SXS_CANT_GEN_ACTCTX = 0xC0150002
STATUS_SXS_INVALID_ACTCTXDATA_FORMAT = 0xC0150003
STATUS_SXS_ASSEMBLY_NOT_FOUND = 0xC0150004
STATUS_SXS_MANIFEST_FORMAT_ERROR = 0xC0150005
STATUS_SXS_MANIFEST_PARSE_ERROR = 0xC0150006
STATUS_SXS_ACTIVATION_CONTEXT_DISABLED = 0xC0150007
STATUS_SXS_KEY_NOT_FOUND = 0xC0150008
STATUS_SXS_VERSION_CONFLICT = 0xC0150009
STATUS_SXS_WRONG_SECTION_TYPE = 0xC015000A
STATUS_SXS_THREAD_QUERIES_DISABLED = 0xC015000B
STATUS_SXS_ASSEMBLY_MISSING = 0xC015000C
STATUS_SXS_RELEASE_ACTIVATION_CONTEXT = 0x4015000D
STATUS_SXS_PROCESS_DEFAULT_ALREADY_SET = 0xC015000E
STATUS_SXS_EARLY_DEACTIVATION = 0xC015000F
STATUS_SXS_INVALID_DEACTIVATION = 0xC0150010
STATUS_SXS_MULTIPLE_DEACTIVATION = 0xC0150011
STATUS_SXS_SYSTEM_DEFAULT_ACTIVATION_CONTEXT_EMPTY = 0xC0150012
STATUS_SXS_PROCESS_TERMINATION_REQUESTED = 0xC0150013
STATUS_SXS_CORRUPT_ACTIVATION_STACK = 0xC0150014
STATUS_SXS_CORRUPTION = 0xC0150015
STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_VALUE = 0xC0150016
STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_NAME = 0xC0150017
STATUS_SXS_IDENTITY_DUPLICATE_ATTRIBUTE = 0xC0150018
STATUS_SXS_IDENTITY_PARSE_ERROR = 0xC0150019
STATUS_SXS_COMPONENT_STORE_CORRUPT = 0xC015001A
STATUS_SXS_FILE_HASH_MISMATCH = 0xC015001B
STATUS_SXS_MANIFEST_IDENTITY_SAME_BUT_CONTENTS_DIFFERENT = 0xC015001C
STATUS_SXS_IDENTITIES_DIFFERENT = 0xC015001D
STATUS_SXS_ASSEMBLY_IS_NOT_A_DEPLOYMENT = 0xC015001E
STATUS_SXS_FILE_NOT_PART_OF_ASSEMBLY = 0xC015001F
STATUS_ADVANCED_INSTALLER_FAILED = 0xC0150020
STATUS_XML_ENCODING_MISMATCH = 0xC0150021
STATUS_SXS_MANIFEST_TOO_BIG = 0xC0150022
STATUS_SXS_SETTING_NOT_REGISTERED = 0xC0150023
STATUS_SXS_TRANSACTION_CLOSURE_INCOMPLETE = 0xC0150024
STATUS_SMI_PRIMITIVE_INSTALLER_FAILED = 0xC0150025
STATUS_GENERIC_COMMAND_FAILED = 0xC0150026
STATUS_SXS_FILE_HASH_MISSING = 0xC0150027
STATUS_CLUSTER_INVALID_NODE = 0xC0130001
STATUS_CLUSTER_NODE_EXISTS = 0xC0130002
STATUS_CLUSTER_JOIN_IN_PROGRESS = 0xC0130003
STATUS_CLUSTER_NODE_NOT_FOUND = 0xC0130004
STATUS_CLUSTER_LOCAL_NODE_NOT_FOUND = 0xC0130005
STATUS_CLUSTER_NETWORK_EXISTS = 0xC0130006
STATUS_CLUSTER_NETWORK_NOT_FOUND = 0xC0130007
STATUS_CLUSTER_NETINTERFACE_EXISTS = 0xC0130008
STATUS_CLUSTER_NETINTERFACE_NOT_FOUND = 0xC0130009
STATUS_CLUSTER_INVALID_REQUEST = 0xC013000A
STATUS_CLUSTER_INVALID_NETWORK_PROVIDER = 0xC013000B
STATUS_CLUSTER_NODE_DOWN = 0xC013000C
STATUS_CLUSTER_NODE_UNREACHABLE = 0xC013000D
STATUS_CLUSTER_NODE_NOT_MEMBER = 0xC013000E
STATUS_CLUSTER_JOIN_NOT_IN_PROGRESS = 0xC013000F
STATUS_CLUSTER_INVALID_NETWORK = 0xC0130010
STATUS_CLUSTER_NO_NET_ADAPTERS = 0xC0130011
STATUS_CLUSTER_NODE_UP = 0xC0130012
STATUS_CLUSTER_NODE_PAUSED = 0xC0130013
STATUS_CLUSTER_NODE_NOT_PAUSED = 0xC0130014
STATUS_CLUSTER_NO_SECURITY_CONTEXT = 0xC0130015
STATUS_CLUSTER_NETWORK_NOT_INTERNAL = 0xC0130016
STATUS_CLUSTER_POISONED = 0xC0130017
STATUS_CLUSTER_NON_CSV_PATH = 0xC0130018
STATUS_CLUSTER_CSV_VOLUME_NOT_LOCAL = 0xC0130019
STATUS_TRANSACTIONAL_CONFLICT = 0xC0190001
STATUS_INVALID_TRANSACTION = 0xC0190002
STATUS_TRANSACTION_NOT_ACTIVE = 0xC0190003
STATUS_TM_INITIALIZATION_FAILED = 0xC0190004
STATUS_RM_NOT_ACTIVE = 0xC0190005
STATUS_RM_METADATA_CORRUPT = 0xC0190006
STATUS_TRANSACTION_NOT_JOINED = 0xC0190007
STATUS_DIRECTORY_NOT_RM = 0xC0190008
STATUS_COULD_NOT_RESIZE_LOG = 0x80190009
STATUS_TRANSACTIONS_UNSUPPORTED_REMOTE = 0xC019000A
STATUS_LOG_RESIZE_INVALID_SIZE = 0xC019000B
STATUS_REMOTE_FILE_VERSION_MISMATCH = 0xC019000C
STATUS_CRM_PROTOCOL_ALREADY_EXISTS = 0xC019000F
STATUS_TRANSACTION_PROPAGATION_FAILED = 0xC0190010
STATUS_CRM_PROTOCOL_NOT_FOUND = 0xC0190011
STATUS_TRANSACTION_SUPERIOR_EXISTS = 0xC0190012
STATUS_TRANSACTION_REQUEST_NOT_VALID = 0xC0190013
STATUS_TRANSACTION_NOT_REQUESTED = 0xC0190014
STATUS_TRANSACTION_ALREADY_ABORTED = 0xC0190015
STATUS_TRANSACTION_ALREADY_COMMITTED = 0xC0190016
STATUS_TRANSACTION_INVALID_MARSHALL_BUFFER = 0xC0190017
STATUS_CURRENT_TRANSACTION_NOT_VALID = 0xC0190018
STATUS_LOG_GROWTH_FAILED = 0xC0190019
STATUS_OBJECT_NO_LONGER_EXISTS = 0xC0190021
STATUS_STREAM_MINIVERSION_NOT_FOUND = 0xC0190022
STATUS_STREAM_MINIVERSION_NOT_VALID = 0xC0190023
STATUS_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION = 0xC0190024
STATUS_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT = 0xC0190025
STATUS_CANT_CREATE_MORE_STREAM_MINIVERSIONS = 0xC0190026
STATUS_HANDLE_NO_LONGER_VALID = 0xC0190028
STATUS_NO_TXF_METADATA = 0x80190029
STATUS_LOG_CORRUPTION_DETECTED = 0xC0190030
STATUS_CANT_RECOVER_WITH_HANDLE_OPEN = 0x80190031
STATUS_RM_DISCONNECTED = 0xC0190032
STATUS_ENLISTMENT_NOT_SUPERIOR = 0xC0190033
STATUS_RECOVERY_NOT_NEEDED = 0x40190034
STATUS_RM_ALREADY_STARTED = 0x40190035
STATUS_FILE_IDENTITY_NOT_PERSISTENT = 0xC0190036
STATUS_CANT_BREAK_TRANSACTIONAL_DEPENDENCY = 0xC0190037
STATUS_CANT_CROSS_RM_BOUNDARY = 0xC0190038
STATUS_TXF_DIR_NOT_EMPTY = 0xC0190039
STATUS_INDOUBT_TRANSACTIONS_EXIST = 0xC019003A
STATUS_TM_VOLATILE = 0xC019003B
STATUS_ROLLBACK_TIMER_EXPIRED = 0xC019003C
STATUS_TXF_ATTRIBUTE_CORRUPT = 0xC019003D
STATUS_EFS_NOT_ALLOWED_IN_TRANSACTION = 0xC019003E
STATUS_TRANSACTIONAL_OPEN_NOT_ALLOWED = 0xC019003F
STATUS_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE = 0xC0190040
STATUS_TXF_METADATA_ALREADY_PRESENT = 0x80190041
STATUS_TRANSACTION_SCOPE_CALLBACKS_NOT_SET = 0x80190042
STATUS_TRANSACTION_REQUIRED_PROMOTION = 0xC0190043
STATUS_CANNOT_EXECUTE_FILE_IN_TRANSACTION = 0xC0190044
STATUS_TRANSACTIONS_NOT_FROZEN = 0xC0190045
STATUS_TRANSACTION_FREEZE_IN_PROGRESS = 0xC0190046
STATUS_NOT_SNAPSHOT_VOLUME = 0xC0190047
STATUS_NO_SAVEPOINT_WITH_OPEN_FILES = 0xC0190048
STATUS_SPARSE_NOT_ALLOWED_IN_TRANSACTION = 0xC0190049
STATUS_TM_IDENTITY_MISMATCH = 0xC019004A
STATUS_FLOATED_SECTION = 0xC019004B
STATUS_CANNOT_ACCEPT_TRANSACTED_WORK = 0xC019004C
STATUS_CANNOT_ABORT_TRANSACTIONS = 0xC019004D
STATUS_TRANSACTION_NOT_FOUND = 0xC019004E
STATUS_RESOURCEMANAGER_NOT_FOUND = 0xC019004F
STATUS_ENLISTMENT_NOT_FOUND = 0xC0190050
STATUS_TRANSACTIONMANAGER_NOT_FOUND = 0xC0190051
STATUS_TRANSACTIONMANAGER_NOT_ONLINE = 0xC0190052
STATUS_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION = 0xC0190053
STATUS_TRANSACTION_NOT_ROOT = 0xC0190054
STATUS_TRANSACTION_OBJECT_EXPIRED = 0xC0190055
STATUS_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION = 0xC0190056
STATUS_TRANSACTION_RESPONSE_NOT_ENLISTED = 0xC0190057
STATUS_TRANSACTION_RECORD_TOO_LONG = 0xC0190058
STATUS_NO_LINK_TRACKING_IN_TRANSACTION = 0xC0190059
STATUS_OPERATION_NOT_SUPPORTED_IN_TRANSACTION = 0xC019005A
STATUS_TRANSACTION_INTEGRITY_VIOLATED = 0xC019005B
STATUS_TRANSACTIONMANAGER_IDENTITY_MISMATCH = 0xC019005C
STATUS_RM_CANNOT_BE_FROZEN_FOR_SNAPSHOT = 0xC019005D
STATUS_TRANSACTION_MUST_WRITETHROUGH = 0xC019005E
STATUS_TRANSACTION_NO_SUPERIOR = 0xC019005F
STATUS_EXPIRED_HANDLE = 0xC0190060
STATUS_TRANSACTION_NOT_ENLISTED = 0xC0190061
STATUS_LOG_SECTOR_INVALID = 0xC01A0001
STATUS_LOG_SECTOR_PARITY_INVALID = 0xC01A0002
STATUS_LOG_SECTOR_REMAPPED = 0xC01A0003
STATUS_LOG_BLOCK_INCOMPLETE = 0xC01A0004
STATUS_LOG_INVALID_RANGE = 0xC01A0005
STATUS_LOG_BLOCKS_EXHAUSTED = 0xC01A0006
STATUS_LOG_READ_CONTEXT_INVALID = 0xC01A0007
STATUS_LOG_RESTART_INVALID = 0xC01A0008
STATUS_LOG_BLOCK_VERSION = 0xC01A0009
STATUS_LOG_BLOCK_INVALID = 0xC01A000A
STATUS_LOG_READ_MODE_INVALID = 0xC01A000B
STATUS_LOG_NO_RESTART = 0x401A000C
STATUS_LOG_METADATA_CORRUPT = 0xC01A000D
STATUS_LOG_METADATA_INVALID = 0xC01A000E
STATUS_LOG_METADATA_INCONSISTENT = 0xC01A000F
STATUS_LOG_RESERVATION_INVALID = 0xC01A0010
STATUS_LOG_CANT_DELETE = 0xC01A0011
STATUS_LOG_CONTAINER_LIMIT_EXCEEDED = 0xC01A0012
STATUS_LOG_START_OF_LOG = 0xC01A0013
STATUS_LOG_POLICY_ALREADY_INSTALLED = 0xC01A0014
STATUS_LOG_POLICY_NOT_INSTALLED = 0xC01A0015
STATUS_LOG_POLICY_INVALID = 0xC01A0016
STATUS_LOG_POLICY_CONFLICT = 0xC01A0017
STATUS_LOG_PINNED_ARCHIVE_TAIL = 0xC01A0018
STATUS_LOG_RECORD_NONEXISTENT = 0xC01A0019
STATUS_LOG_RECORDS_RESERVED_INVALID = 0xC01A001A
STATUS_LOG_SPACE_RESERVED_INVALID = 0xC01A001B
STATUS_LOG_TAIL_INVALID = 0xC01A001C
STATUS_LOG_FULL = 0xC01A001D
STATUS_LOG_MULTIPLEXED = 0xC01A001E
STATUS_LOG_DEDICATED = 0xC01A001F
STATUS_LOG_ARCHIVE_NOT_IN_PROGRESS = 0xC01A0020
STATUS_LOG_ARCHIVE_IN_PROGRESS = 0xC01A0021
STATUS_LOG_EPHEMERAL = 0xC01A0022
STATUS_LOG_NOT_ENOUGH_CONTAINERS = 0xC01A0023
STATUS_LOG_CLIENT_ALREADY_REGISTERED = 0xC01A0024
STATUS_LOG_CLIENT_NOT_REGISTERED = 0xC01A0025
STATUS_LOG_FULL_HANDLER_IN_PROGRESS = 0xC01A0026
STATUS_LOG_CONTAINER_READ_FAILED = 0xC01A0027
STATUS_LOG_CONTAINER_WRITE_FAILED = 0xC01A0028
STATUS_LOG_CONTAINER_OPEN_FAILED = 0xC01A0029
STATUS_LOG_CONTAINER_STATE_INVALID = 0xC01A002A
STATUS_LOG_STATE_INVALID = 0xC01A002B
STATUS_LOG_PINNED = 0xC01A002C
STATUS_LOG_METADATA_FLUSH_FAILED = 0xC01A002D
STATUS_LOG_INCONSISTENT_SECURITY = 0xC01A002E
STATUS_LOG_APPENDED_FLUSH_FAILED = 0xC01A002F
STATUS_LOG_PINNED_RESERVATION = 0xC01A0030
STATUS_VIDEO_HUNG_DISPLAY_DRIVER_THREAD = 0xC01B00EA
STATUS_VIDEO_HUNG_DISPLAY_DRIVER_THREAD_RECOVERED = 0x801B00EB
STATUS_VIDEO_DRIVER_DEBUG_REPORT_REQUEST = 0x401B00EC
STATUS_MONITOR_NO_DESCRIPTOR = 0xC01D0001
STATUS_MONITOR_UNKNOWN_DESCRIPTOR_FORMAT = 0xC01D0002
STATUS_MONITOR_INVALID_DESCRIPTOR_CHECKSUM = 0xC01D0003
STATUS_MONITOR_INVALID_STANDARD_TIMING_BLOCK = 0xC01D0004
STATUS_MONITOR_WMI_DATABLOCK_REGISTRATION_FAILED = 0xC01D0005
STATUS_MONITOR_INVALID_SERIAL_NUMBER_MONDSC_BLOCK = 0xC01D0006
STATUS_MONITOR_INVALID_USER_FRIENDLY_MONDSC_BLOCK = 0xC01D0007
STATUS_MONITOR_NO_MORE_DESCRIPTOR_DATA = 0xC01D0008
STATUS_MONITOR_INVALID_DETAILED_TIMING_BLOCK = 0xC01D0009
STATUS_MONITOR_INVALID_MANUFACTURE_DATE = 0xC01D000A
STATUS_GRAPHICS_NOT_EXCLUSIVE_MODE_OWNER = 0xC01E0000
STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER = 0xC01E0001
STATUS_GRAPHICS_INVALID_DISPLAY_ADAPTER = 0xC01E0002
STATUS_GRAPHICS_ADAPTER_WAS_RESET = 0xC01E0003
STATUS_GRAPHICS_INVALID_DRIVER_MODEL = 0xC01E0004
STATUS_GRAPHICS_PRESENT_MODE_CHANGED = 0xC01E0005
STATUS_GRAPHICS_PRESENT_OCCLUDED = 0xC01E0006
STATUS_GRAPHICS_PRESENT_DENIED = 0xC01E0007
STATUS_GRAPHICS_CANNOTCOLORCONVERT = 0xC01E0008
STATUS_GRAPHICS_DRIVER_MISMATCH = 0xC01E0009
STATUS_GRAPHICS_PARTIAL_DATA_POPULATED = 0x401E000A
STATUS_GRAPHICS_PRESENT_REDIRECTION_DISABLED = 0xC01E000B
STATUS_GRAPHICS_PRESENT_UNOCCLUDED = 0xC01E000C
STATUS_GRAPHICS_NO_VIDEO_MEMORY = 0xC01E0100
STATUS_GRAPHICS_CANT_LOCK_MEMORY = 0xC01E0101
STATUS_GRAPHICS_ALLOCATION_BUSY = 0xC01E0102
STATUS_GRAPHICS_TOO_MANY_REFERENCES = 0xC01E0103
STATUS_GRAPHICS_TRY_AGAIN_LATER = 0xC01E0104
STATUS_GRAPHICS_TRY_AGAIN_NOW = 0xC01E0105
STATUS_GRAPHICS_ALLOCATION_INVALID = 0xC01E0106
STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNAVAILABLE = 0xC01E0107
STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNSUPPORTED = 0xC01E0108
STATUS_GRAPHICS_CANT_EVICT_PINNED_ALLOCATION = 0xC01E0109
STATUS_GRAPHICS_INVALID_ALLOCATION_USAGE = 0xC01E0110
STATUS_GRAPHICS_CANT_RENDER_LOCKED_ALLOCATION = 0xC01E0111
STATUS_GRAPHICS_ALLOCATION_CLOSED = 0xC01E0112
STATUS_GRAPHICS_INVALID_ALLOCATION_INSTANCE = 0xC01E0113
STATUS_GRAPHICS_INVALID_ALLOCATION_HANDLE = 0xC01E0114
STATUS_GRAPHICS_WRONG_ALLOCATION_DEVICE = 0xC01E0115
STATUS_GRAPHICS_ALLOCATION_CONTENT_LOST = 0xC01E0116
STATUS_GRAPHICS_GPU_EXCEPTION_ON_DEVICE = 0xC01E0200
STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY = 0xC01E0300
STATUS_GRAPHICS_VIDPN_TOPOLOGY_NOT_SUPPORTED = 0xC01E0301
STATUS_GRAPHICS_VIDPN_TOPOLOGY_CURRENTLY_NOT_SUPPORTED = 0xC01E0302
STATUS_GRAPHICS_INVALID_VIDPN = 0xC01E0303
STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE = 0xC01E0304
STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET = 0xC01E0305
STATUS_GRAPHICS_VIDPN_MODALITY_NOT_SUPPORTED = 0xC01E0306
STATUS_GRAPHICS_MODE_NOT_PINNED = 0x401E0307
STATUS_GRAPHICS_INVALID_VIDPN_SOURCEMODESET = 0xC01E0308
STATUS_GRAPHICS_INVALID_VIDPN_TARGETMODESET = 0xC01E0309
STATUS_GRAPHICS_INVALID_FREQUENCY = 0xC01E030A
STATUS_GRAPHICS_INVALID_ACTIVE_REGION = 0xC01E030B
STATUS_GRAPHICS_INVALID_TOTAL_REGION = 0xC01E030C
STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE_MODE = 0xC01E0310
STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET_MODE = 0xC01E0311
STATUS_GRAPHICS_PINNED_MODE_MUST_REMAIN_IN_SET = 0xC01E0312
STATUS_GRAPHICS_PATH_ALREADY_IN_TOPOLOGY = 0xC01E0313
STATUS_GRAPHICS_MODE_ALREADY_IN_MODESET = 0xC01E0314
STATUS_GRAPHICS_INVALID_VIDEOPRESENTSOURCESET = 0xC01E0315
STATUS_GRAPHICS_INVALID_VIDEOPRESENTTARGETSET = 0xC01E0316
STATUS_GRAPHICS_SOURCE_ALREADY_IN_SET = 0xC01E0317
STATUS_GRAPHICS_TARGET_ALREADY_IN_SET = 0xC01E0318
STATUS_GRAPHICS_INVALID_VIDPN_PRESENT_PATH = 0xC01E0319
STATUS_GRAPHICS_NO_RECOMMENDED_VIDPN_TOPOLOGY = 0xC01E031A
STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGESET = 0xC01E031B
STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE = 0xC01E031C
STATUS_GRAPHICS_FREQUENCYRANGE_NOT_IN_SET = 0xC01E031D
STATUS_GRAPHICS_NO_PREFERRED_MODE = 0x401E031E
STATUS_GRAPHICS_FREQUENCYRANGE_ALREADY_IN_SET = 0xC01E031F
STATUS_GRAPHICS_STALE_MODESET = 0xC01E0320
STATUS_GRAPHICS_INVALID_MONITOR_SOURCEMODESET = 0xC01E0321
STATUS_GRAPHICS_INVALID_MONITOR_SOURCE_MODE = 0xC01E0322
STATUS_GRAPHICS_NO_RECOMMENDED_FUNCTIONAL_VIDPN = 0xC01E0323
STATUS_GRAPHICS_MODE_ID_MUST_BE_UNIQUE = 0xC01E0324
STATUS_GRAPHICS_EMPTY_ADAPTER_MONITOR_MODE_SUPPORT_INTERSECTION = 0xC01E0325
STATUS_GRAPHICS_VIDEO_PRESENT_TARGETS_LESS_THAN_SOURCES = 0xC01E0326
STATUS_GRAPHICS_PATH_NOT_IN_TOPOLOGY = 0xC01E0327
STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_SOURCE = 0xC01E0328
STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_TARGET = 0xC01E0329
STATUS_GRAPHICS_INVALID_MONITORDESCRIPTORSET = 0xC01E032A
STATUS_GRAPHICS_INVALID_MONITORDESCRIPTOR = 0xC01E032B
STATUS_GRAPHICS_MONITORDESCRIPTOR_NOT_IN_SET = 0xC01E032C
STATUS_GRAPHICS_MONITORDESCRIPTOR_ALREADY_IN_SET = 0xC01E032D
STATUS_GRAPHICS_MONITORDESCRIPTOR_ID_MUST_BE_UNIQUE = 0xC01E032E
STATUS_GRAPHICS_INVALID_VIDPN_TARGET_SUBSET_TYPE = 0xC01E032F
STATUS_GRAPHICS_RESOURCES_NOT_RELATED = 0xC01E0330
STATUS_GRAPHICS_SOURCE_ID_MUST_BE_UNIQUE = 0xC01E0331
STATUS_GRAPHICS_TARGET_ID_MUST_BE_UNIQUE = 0xC01E0332
STATUS_GRAPHICS_NO_AVAILABLE_VIDPN_TARGET = 0xC01E0333
STATUS_GRAPHICS_MONITOR_COULD_NOT_BE_ASSOCIATED_WITH_ADAPTER = 0xC01E0334
STATUS_GRAPHICS_NO_VIDPNMGR = 0xC01E0335
STATUS_GRAPHICS_NO_ACTIVE_VIDPN = 0xC01E0336
STATUS_GRAPHICS_STALE_VIDPN_TOPOLOGY = 0xC01E0337
STATUS_GRAPHICS_MONITOR_NOT_CONNECTED = 0xC01E0338
STATUS_GRAPHICS_SOURCE_NOT_IN_TOPOLOGY = 0xC01E0339
STATUS_GRAPHICS_INVALID_PRIMARYSURFACE_SIZE = 0xC01E033A
STATUS_GRAPHICS_INVALID_VISIBLEREGION_SIZE = 0xC01E033B
STATUS_GRAPHICS_INVALID_STRIDE = 0xC01E033C
STATUS_GRAPHICS_INVALID_PIXELFORMAT = 0xC01E033D
STATUS_GRAPHICS_INVALID_COLORBASIS = 0xC01E033E
STATUS_GRAPHICS_INVALID_PIXELVALUEACCESSMODE = 0xC01E033F
STATUS_GRAPHICS_TARGET_NOT_IN_TOPOLOGY = 0xC01E0340
STATUS_GRAPHICS_NO_DISPLAY_MODE_MANAGEMENT_SUPPORT = 0xC01E0341
STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE = 0xC01E0342
STATUS_GRAPHICS_CANT_ACCESS_ACTIVE_VIDPN = 0xC01E0343
STATUS_GRAPHICS_INVALID_PATH_IMPORTANCE_ORDINAL = 0xC01E0344
STATUS_GRAPHICS_INVALID_PATH_CONTENT_GEOMETRY_TRANSFORMATION = 0xC01E0345
STATUS_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_SUPPORTED = 0xC01E0346
STATUS_GRAPHICS_INVALID_GAMMA_RAMP = 0xC01E0347
STATUS_GRAPHICS_GAMMA_RAMP_NOT_SUPPORTED = 0xC01E0348
STATUS_GRAPHICS_MULTISAMPLING_NOT_SUPPORTED = 0xC01E0349
STATUS_GRAPHICS_MODE_NOT_IN_MODESET = 0xC01E034A
STATUS_GRAPHICS_DATASET_IS_EMPTY = 0x401E034B
STATUS_GRAPHICS_NO_MORE_ELEMENTS_IN_DATASET = 0x401E034C
STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY_RECOMMENDATION_REASON = 0xC01E034D
STATUS_GRAPHICS_INVALID_PATH_CONTENT_TYPE = 0xC01E034E
STATUS_GRAPHICS_INVALID_COPYPROTECTION_TYPE = 0xC01E034F
STATUS_GRAPHICS_UNASSIGNED_MODESET_ALREADY_EXISTS = 0xC01E0350
STATUS_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_PINNED = 0x401E0351
STATUS_GRAPHICS_INVALID_SCANLINE_ORDERING = 0xC01E0352
STATUS_GRAPHICS_TOPOLOGY_CHANGES_NOT_ALLOWED = 0xC01E0353
STATUS_GRAPHICS_NO_AVAILABLE_IMPORTANCE_ORDINALS = 0xC01E0354
STATUS_GRAPHICS_INCOMPATIBLE_PRIVATE_FORMAT = 0xC01E0355
STATUS_GRAPHICS_INVALID_MODE_PRUNING_ALGORITHM = 0xC01E0356
STATUS_GRAPHICS_INVALID_MONITOR_CAPABILITY_ORIGIN = 0xC01E0357
STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE_CONSTRAINT = 0xC01E0358
STATUS_GRAPHICS_MAX_NUM_PATHS_REACHED = 0xC01E0359
STATUS_GRAPHICS_CANCEL_VIDPN_TOPOLOGY_AUGMENTATION = 0xC01E035A
STATUS_GRAPHICS_INVALID_CLIENT_TYPE = 0xC01E035B
STATUS_GRAPHICS_CLIENTVIDPN_NOT_SET = 0xC01E035C
STATUS_GRAPHICS_SPECIFIED_CHILD_ALREADY_CONNECTED = 0xC01E0400
STATUS_GRAPHICS_CHILD_DESCRIPTOR_NOT_SUPPORTED = 0xC01E0401
STATUS_GRAPHICS_UNKNOWN_CHILD_STATUS = 0x401E042F
STATUS_GRAPHICS_NOT_A_LINKED_ADAPTER = 0xC01E0430
STATUS_GRAPHICS_LEADLINK_NOT_ENUMERATED = 0xC01E0431
STATUS_GRAPHICS_CHAINLINKS_NOT_ENUMERATED = 0xC01E0432
STATUS_GRAPHICS_ADAPTER_CHAIN_NOT_READY = 0xC01E0433
STATUS_GRAPHICS_CHAINLINKS_NOT_STARTED = 0xC01E0434
STATUS_GRAPHICS_CHAINLINKS_NOT_POWERED_ON = 0xC01E0435
STATUS_GRAPHICS_INCONSISTENT_DEVICE_LINK_STATE = 0xC01E0436
STATUS_GRAPHICS_LEADLINK_START_DEFERRED = 0x401E0437
STATUS_GRAPHICS_NOT_POST_DEVICE_DRIVER = 0xC01E0438
STATUS_GRAPHICS_POLLING_TOO_FREQUENTLY = 0x401E0439
STATUS_GRAPHICS_START_DEFERRED = 0x401E043A
STATUS_GRAPHICS_ADAPTER_ACCESS_NOT_EXCLUDED = 0xC01E043B
STATUS_GRAPHICS_OPM_NOT_SUPPORTED = 0xC01E0500
STATUS_GRAPHICS_COPP_NOT_SUPPORTED = 0xC01E0501
STATUS_GRAPHICS_UAB_NOT_SUPPORTED = 0xC01E0502
STATUS_GRAPHICS_OPM_INVALID_ENCRYPTED_PARAMETERS = 0xC01E0503
STATUS_GRAPHICS_OPM_NO_PROTECTED_OUTPUTS_EXIST = 0xC01E0505
STATUS_GRAPHICS_OPM_INTERNAL_ERROR = 0xC01E050B
STATUS_GRAPHICS_OPM_INVALID_HANDLE = 0xC01E050C
STATUS_GRAPHICS_PVP_INVALID_CERTIFICATE_LENGTH = 0xC01E050E
STATUS_GRAPHICS_OPM_SPANNING_MODE_ENABLED = 0xC01E050F
STATUS_GRAPHICS_OPM_THEATER_MODE_ENABLED = 0xC01E0510
STATUS_GRAPHICS_PVP_HFS_FAILED = 0xC01E0511
STATUS_GRAPHICS_OPM_INVALID_SRM = 0xC01E0512
STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_HDCP = 0xC01E0513
STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_ACP = 0xC01E0514
STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_CGMSA = 0xC01E0515
STATUS_GRAPHICS_OPM_HDCP_SRM_NEVER_SET = 0xC01E0516
STATUS_GRAPHICS_OPM_RESOLUTION_TOO_HIGH = 0xC01E0517
STATUS_GRAPHICS_OPM_ALL_HDCP_HARDWARE_ALREADY_IN_USE = 0xC01E0518
STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_NO_LONGER_EXISTS = 0xC01E051A
STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_COPP_SEMANTICS = 0xC01E051C
STATUS_GRAPHICS_OPM_INVALID_INFORMATION_REQUEST = 0xC01E051D
STATUS_GRAPHICS_OPM_DRIVER_INTERNAL_ERROR = 0xC01E051E
STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_OPM_SEMANTICS = 0xC01E051F
STATUS_GRAPHICS_OPM_SIGNALING_NOT_SUPPORTED = 0xC01E0520
STATUS_GRAPHICS_OPM_INVALID_CONFIGURATION_REQUEST = 0xC01E0521
STATUS_GRAPHICS_I2C_NOT_SUPPORTED = 0xC01E0580
STATUS_GRAPHICS_I2C_DEVICE_DOES_NOT_EXIST = 0xC01E0581
STATUS_GRAPHICS_I2C_ERROR_TRANSMITTING_DATA = 0xC01E0582
STATUS_GRAPHICS_I2C_ERROR_RECEIVING_DATA = 0xC01E0583
STATUS_GRAPHICS_DDCCI_VCP_NOT_SUPPORTED = 0xC01E0584
STATUS_GRAPHICS_DDCCI_INVALID_DATA = 0xC01E0585
STATUS_GRAPHICS_DDCCI_MONITOR_RETURNED_INVALID_TIMING_STATUS_BYTE = 0xC01E0586
STATUS_GRAPHICS_DDCCI_INVALID_CAPABILITIES_STRING = 0xC01E0587
STATUS_GRAPHICS_MCA_INTERNAL_ERROR = 0xC01E0588
STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_COMMAND = 0xC01E0589
STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_LENGTH = 0xC01E058A
STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_CHECKSUM = 0xC01E058B
STATUS_GRAPHICS_INVALID_PHYSICAL_MONITOR_HANDLE = 0xC01E058C
STATUS_GRAPHICS_MONITOR_NO_LONGER_EXISTS = 0xC01E058D
STATUS_GRAPHICS_ONLY_CONSOLE_SESSION_SUPPORTED = 0xC01E05E0
STATUS_GRAPHICS_NO_DISPLAY_DEVICE_CORRESPONDS_TO_NAME = 0xC01E05E1
STATUS_GRAPHICS_DISPLAY_DEVICE_NOT_ATTACHED_TO_DESKTOP = 0xC01E05E2
STATUS_GRAPHICS_MIRRORING_DEVICES_NOT_SUPPORTED = 0xC01E05E3
STATUS_GRAPHICS_INVALID_POINTER = 0xC01E05E4
STATUS_GRAPHICS_NO_MONITORS_CORRESPOND_TO_DISPLAY_DEVICE = 0xC01E05E5
STATUS_GRAPHICS_PARAMETER_ARRAY_TOO_SMALL = 0xC01E05E6
STATUS_GRAPHICS_INTERNAL_ERROR = 0xC01E05E7
STATUS_GRAPHICS_SESSION_TYPE_CHANGE_IN_PROGRESS = 0xC01E05E8
STATUS_FVE_LOCKED_VOLUME = 0xC0210000
STATUS_FVE_NOT_ENCRYPTED = 0xC0210001
STATUS_FVE_BAD_INFORMATION = 0xC0210002
STATUS_FVE_TOO_SMALL = 0xC0210003
STATUS_FVE_FAILED_WRONG_FS = 0xC0210004
STATUS_FVE_BAD_PARTITION_SIZE = 0xC0210005
STATUS_FVE_FS_NOT_EXTENDED = 0xC0210006
STATUS_FVE_FS_MOUNTED = 0xC0210007
STATUS_FVE_NO_LICENSE = 0xC0210008
STATUS_FVE_ACTION_NOT_ALLOWED = 0xC0210009
STATUS_FVE_BAD_DATA = 0xC021000A
STATUS_FVE_VOLUME_NOT_BOUND = 0xC021000B
STATUS_FVE_NOT_DATA_VOLUME = 0xC021000C
STATUS_FVE_CONV_READ_ERROR = 0xC021000D
STATUS_FVE_CONV_WRITE_ERROR = 0xC021000E
STATUS_FVE_OVERLAPPED_UPDATE = 0xC021000F
STATUS_FVE_FAILED_SECTOR_SIZE = 0xC0210010
STATUS_FVE_FAILED_AUTHENTICATION = 0xC0210011
STATUS_FVE_NOT_OS_VOLUME = 0xC0210012
STATUS_FVE_KEYFILE_NOT_FOUND = 0xC0210013
STATUS_FVE_KEYFILE_INVALID = 0xC0210014
STATUS_FVE_KEYFILE_NO_VMK = 0xC0210015
STATUS_FVE_TPM_DISABLED = 0xC0210016
STATUS_FVE_TPM_SRK_AUTH_NOT_ZERO = 0xC0210017
STATUS_FVE_TPM_INVALID_PCR = 0xC0210018
STATUS_FVE_TPM_NO_VMK = 0xC0210019
STATUS_FVE_PIN_INVALID = 0xC021001A
STATUS_FVE_AUTH_INVALID_APPLICATION = 0xC021001B
STATUS_FVE_AUTH_INVALID_CONFIG = 0xC021001C
STATUS_FVE_DEBUGGER_ENABLED = 0xC021001D
STATUS_FVE_DRY_RUN_FAILED = 0xC021001E
STATUS_FVE_BAD_METADATA_POINTER = 0xC021001F
STATUS_FVE_OLD_METADATA_COPY = 0xC0210020
STATUS_FVE_REBOOT_REQUIRED = 0xC0210021
STATUS_FVE_RAW_ACCESS = 0xC0210022
STATUS_FVE_RAW_BLOCKED = 0xC0210023
STATUS_FVE_NO_AUTOUNLOCK_MASTER_KEY = 0xC0210024
STATUS_FVE_MOR_FAILED = 0xC0210025
STATUS_FVE_NO_FEATURE_LICENSE = 0xC0210026
STATUS_FVE_POLICY_USER_DISABLE_RDV_NOT_ALLOWED = 0xC0210027
STATUS_FVE_CONV_RECOVERY_FAILED = 0xC0210028
STATUS_FVE_VIRTUALIZED_SPACE_TOO_BIG = 0xC0210029
STATUS_FVE_INVALID_DATUM_TYPE = 0xC021002A
STATUS_FVE_VOLUME_TOO_SMALL = 0xC0210030
STATUS_FVE_ENH_PIN_INVALID = 0xC0210031
STATUS_FWP_CALLOUT_NOT_FOUND = 0xC0220001
STATUS_FWP_CONDITION_NOT_FOUND = 0xC0220002
STATUS_FWP_FILTER_NOT_FOUND = 0xC0220003
STATUS_FWP_LAYER_NOT_FOUND = 0xC0220004
STATUS_FWP_PROVIDER_NOT_FOUND = 0xC0220005
STATUS_FWP_PROVIDER_CONTEXT_NOT_FOUND = 0xC0220006
STATUS_FWP_SUBLAYER_NOT_FOUND = 0xC0220007
STATUS_FWP_NOT_FOUND = 0xC0220008
STATUS_FWP_ALREADY_EXISTS = 0xC0220009
STATUS_FWP_IN_USE = 0xC022000A
STATUS_FWP_DYNAMIC_SESSION_IN_PROGRESS = 0xC022000B
STATUS_FWP_WRONG_SESSION = 0xC022000C
STATUS_FWP_NO_TXN_IN_PROGRESS = 0xC022000D
STATUS_FWP_TXN_IN_PROGRESS = 0xC022000E
STATUS_FWP_TXN_ABORTED = 0xC022000F
STATUS_FWP_SESSION_ABORTED = 0xC0220010
STATUS_FWP_INCOMPATIBLE_TXN = 0xC0220011
STATUS_FWP_TIMEOUT = 0xC0220012
STATUS_FWP_NET_EVENTS_DISABLED = 0xC0220013
STATUS_FWP_INCOMPATIBLE_LAYER = 0xC0220014
STATUS_FWP_KM_CLIENTS_ONLY = 0xC0220015
STATUS_FWP_LIFETIME_MISMATCH = 0xC0220016
STATUS_FWP_BUILTIN_OBJECT = 0xC0220017
STATUS_FWP_TOO_MANY_CALLOUTS = 0xC0220018
STATUS_FWP_NOTIFICATION_DROPPED = 0xC0220019
STATUS_FWP_TRAFFIC_MISMATCH = 0xC022001A
STATUS_FWP_INCOMPATIBLE_SA_STATE = 0xC022001B
STATUS_FWP_NULL_POINTER = 0xC022001C
STATUS_FWP_INVALID_ENUMERATOR = 0xC022001D
STATUS_FWP_INVALID_FLAGS = 0xC022001E
STATUS_FWP_INVALID_NET_MASK = 0xC022001F
STATUS_FWP_INVALID_RANGE = 0xC0220020
STATUS_FWP_INVALID_INTERVAL = 0xC0220021
STATUS_FWP_ZERO_LENGTH_ARRAY = 0xC0220022
STATUS_FWP_NULL_DISPLAY_NAME = 0xC0220023
STATUS_FWP_INVALID_ACTION_TYPE = 0xC0220024
STATUS_FWP_INVALID_WEIGHT = 0xC0220025
STATUS_FWP_MATCH_TYPE_MISMATCH = 0xC0220026
STATUS_FWP_TYPE_MISMATCH = 0xC0220027
STATUS_FWP_OUT_OF_BOUNDS = 0xC0220028
STATUS_FWP_RESERVED = 0xC0220029
STATUS_FWP_DUPLICATE_CONDITION = 0xC022002A
STATUS_FWP_DUPLICATE_KEYMOD = 0xC022002B
STATUS_FWP_ACTION_INCOMPATIBLE_WITH_LAYER = 0xC022002C
STATUS_FWP_ACTION_INCOMPATIBLE_WITH_SUBLAYER = 0xC022002D
STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_LAYER = 0xC022002E
STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_CALLOUT = 0xC022002F
STATUS_FWP_INCOMPATIBLE_AUTH_METHOD = 0xC0220030
STATUS_FWP_INCOMPATIBLE_DH_GROUP = 0xC0220031
STATUS_FWP_EM_NOT_SUPPORTED = 0xC0220032
STATUS_FWP_NEVER_MATCH = 0xC0220033
STATUS_FWP_PROVIDER_CONTEXT_MISMATCH = 0xC0220034
STATUS_FWP_INVALID_PARAMETER = 0xC0220035
STATUS_FWP_TOO_MANY_SUBLAYERS = 0xC0220036
STATUS_FWP_CALLOUT_NOTIFICATION_FAILED = 0xC0220037
STATUS_FWP_INVALID_AUTH_TRANSFORM = 0xC0220038
STATUS_FWP_INVALID_CIPHER_TRANSFORM = 0xC0220039
STATUS_FWP_INCOMPATIBLE_CIPHER_TRANSFORM = 0xC022003A
STATUS_FWP_INVALID_TRANSFORM_COMBINATION = 0xC022003B
STATUS_FWP_DUPLICATE_AUTH_METHOD = 0xC022003C
STATUS_FWP_TCPIP_NOT_READY = 0xC0220100
STATUS_FWP_INJECT_HANDLE_CLOSING = 0xC0220101
STATUS_FWP_INJECT_HANDLE_STALE = 0xC0220102
STATUS_FWP_CANNOT_PEND = 0xC0220103
STATUS_FWP_DROP_NOICMP = 0xC0220104
STATUS_NDIS_CLOSING = 0xC0230002
STATUS_NDIS_BAD_VERSION = 0xC0230004
STATUS_NDIS_BAD_CHARACTERISTICS = 0xC0230005
STATUS_NDIS_ADAPTER_NOT_FOUND = 0xC0230006
STATUS_NDIS_OPEN_FAILED = 0xC0230007
STATUS_NDIS_DEVICE_FAILED = 0xC0230008
STATUS_NDIS_MULTICAST_FULL = 0xC0230009
STATUS_NDIS_MULTICAST_EXISTS = 0xC023000A
STATUS_NDIS_MULTICAST_NOT_FOUND = 0xC023000B
STATUS_NDIS_REQUEST_ABORTED = 0xC023000C
STATUS_NDIS_RESET_IN_PROGRESS = 0xC023000D
STATUS_NDIS_NOT_SUPPORTED = 0xC02300BB
STATUS_NDIS_INVALID_PACKET = 0xC023000F
STATUS_NDIS_ADAPTER_NOT_READY = 0xC0230011
STATUS_NDIS_INVALID_LENGTH = 0xC0230014
STATUS_NDIS_INVALID_DATA = 0xC0230015
STATUS_NDIS_BUFFER_TOO_SHORT = 0xC0230016
STATUS_NDIS_INVALID_OID = 0xC0230017
STATUS_NDIS_ADAPTER_REMOVED = 0xC0230018
STATUS_NDIS_UNSUPPORTED_MEDIA = 0xC0230019
STATUS_NDIS_GROUP_ADDRESS_IN_USE = 0xC023001A
STATUS_NDIS_FILE_NOT_FOUND = 0xC023001B
STATUS_NDIS_ERROR_READING_FILE = 0xC023001C
STATUS_NDIS_ALREADY_MAPPED = 0xC023001D
STATUS_NDIS_RESOURCE_CONFLICT = 0xC023001E
STATUS_NDIS_MEDIA_DISCONNECTED = 0xC023001F
STATUS_NDIS_INVALID_ADDRESS = 0xC0230022
STATUS_NDIS_INVALID_DEVICE_REQUEST = 0xC0230010
STATUS_NDIS_PAUSED = 0xC023002A
STATUS_NDIS_INTERFACE_NOT_FOUND = 0xC023002B
STATUS_NDIS_UNSUPPORTED_REVISION = 0xC023002C
STATUS_NDIS_INVALID_PORT = 0xC023002D
STATUS_NDIS_INVALID_PORT_STATE = 0xC023002E
STATUS_NDIS_LOW_POWER_STATE = 0xC023002F
STATUS_NDIS_DOT11_AUTO_CONFIG_ENABLED = 0xC0232000
STATUS_NDIS_DOT11_MEDIA_IN_USE = 0xC0232001
STATUS_NDIS_DOT11_POWER_STATE_INVALID = 0xC0232002
STATUS_NDIS_PM_WOL_PATTERN_LIST_FULL = 0xC0232003
STATUS_NDIS_PM_PROTOCOL_OFFLOAD_LIST_FULL = 0xC0232004
STATUS_NDIS_INDICATION_REQUIRED = 0x40230001
STATUS_NDIS_OFFLOAD_POLICY = 0xC023100F
STATUS_NDIS_OFFLOAD_CONNECTION_REJECTED = 0xC0231012
STATUS_NDIS_OFFLOAD_PATH_REJECTED = 0xC0231013
STATUS_HV_INVALID_HYPERCALL_CODE = 0xC0350002
STATUS_HV_INVALID_HYPERCALL_INPUT = 0xC0350003
STATUS_HV_INVALID_ALIGNMENT = 0xC0350004
STATUS_HV_INVALID_PARAMETER = 0xC0350005
STATUS_HV_ACCESS_DENIED = 0xC0350006
STATUS_HV_INVALID_PARTITION_STATE = 0xC0350007
STATUS_HV_OPERATION_DENIED = 0xC0350008
STATUS_HV_UNKNOWN_PROPERTY = 0xC0350009
STATUS_HV_PROPERTY_VALUE_OUT_OF_RANGE = 0xC035000A
STATUS_HV_INSUFFICIENT_MEMORY = 0xC035000B
STATUS_HV_PARTITION_TOO_DEEP = 0xC035000C
STATUS_HV_INVALID_PARTITION_ID = 0xC035000D
STATUS_HV_INVALID_VP_INDEX = 0xC035000E
STATUS_HV_INVALID_PORT_ID = 0xC0350011
STATUS_HV_INVALID_CONNECTION_ID = 0xC0350012
STATUS_HV_INSUFFICIENT_BUFFERS = 0xC0350013
STATUS_HV_NOT_ACKNOWLEDGED = 0xC0350014
STATUS_HV_ACKNOWLEDGED = 0xC0350016
STATUS_HV_INVALID_SAVE_RESTORE_STATE = 0xC0350017
STATUS_HV_INVALID_SYNIC_STATE = 0xC0350018
STATUS_HV_OBJECT_IN_USE = 0xC0350019
STATUS_HV_INVALID_PROXIMITY_DOMAIN_INFO = 0xC035001A
STATUS_HV_NO_DATA = 0xC035001B
STATUS_HV_INACTIVE = 0xC035001C
STATUS_HV_NO_RESOURCES = 0xC035001D
STATUS_HV_FEATURE_UNAVAILABLE = 0xC035001E
STATUS_HV_NOT_PRESENT = 0xC0351000
STATUS_VID_DUPLICATE_HANDLER = 0xC0370001
STATUS_VID_TOO_MANY_HANDLERS = 0xC0370002
STATUS_VID_QUEUE_FULL = 0xC0370003
STATUS_VID_HANDLER_NOT_PRESENT = 0xC0370004
STATUS_VID_INVALID_OBJECT_NAME = 0xC0370005
STATUS_VID_PARTITION_NAME_TOO_LONG = 0xC0370006
STATUS_VID_MESSAGE_QUEUE_NAME_TOO_LONG = 0xC0370007
STATUS_VID_PARTITION_ALREADY_EXISTS = 0xC0370008
STATUS_VID_PARTITION_DOES_NOT_EXIST = 0xC0370009
STATUS_VID_PARTITION_NAME_NOT_FOUND = 0xC037000A
STATUS_VID_MESSAGE_QUEUE_ALREADY_EXISTS = 0xC037000B
STATUS_VID_EXCEEDED_MBP_ENTRY_MAP_LIMIT = 0xC037000C
STATUS_VID_MB_STILL_REFERENCED = 0xC037000D
STATUS_VID_CHILD_GPA_PAGE_SET_CORRUPTED = 0xC037000E
STATUS_VID_INVALID_NUMA_SETTINGS = 0xC037000F
STATUS_VID_INVALID_NUMA_NODE_INDEX = 0xC0370010
STATUS_VID_NOTIFICATION_QUEUE_ALREADY_ASSOCIATED = 0xC0370011
STATUS_VID_INVALID_MEMORY_BLOCK_HANDLE = 0xC0370012
STATUS_VID_PAGE_RANGE_OVERFLOW = 0xC0370013
STATUS_VID_INVALID_MESSAGE_QUEUE_HANDLE = 0xC0370014
STATUS_VID_INVALID_GPA_RANGE_HANDLE = 0xC0370015
STATUS_VID_NO_MEMORY_BLOCK_NOTIFICATION_QUEUE = 0xC0370016
STATUS_VID_MEMORY_BLOCK_LOCK_COUNT_EXCEEDED = 0xC0370017
STATUS_VID_INVALID_PPM_HANDLE = 0xC0370018
STATUS_VID_MBPS_ARE_LOCKED = 0xC0370019
STATUS_VID_MESSAGE_QUEUE_CLOSED = 0xC037001A
STATUS_VID_VIRTUAL_PROCESSOR_LIMIT_EXCEEDED = 0xC037001B
STATUS_VID_STOP_PENDING = 0xC037001C
STATUS_VID_INVALID_PROCESSOR_STATE = 0xC037001D
STATUS_VID_EXCEEDED_KM_CONTEXT_COUNT_LIMIT = 0xC037001E
STATUS_VID_KM_INTERFACE_ALREADY_INITIALIZED = 0xC037001F
STATUS_VID_MB_PROPERTY_ALREADY_SET_RESET = 0xC0370020
STATUS_VID_MMIO_RANGE_DESTROYED = 0xC0370021
STATUS_VID_INVALID_CHILD_GPA_PAGE_SET = 0xC0370022
STATUS_VID_RESERVE_PAGE_SET_IS_BEING_USED = 0xC0370023
STATUS_VID_RESERVE_PAGE_SET_TOO_SMALL = 0xC0370024
STATUS_VID_MBP_ALREADY_LOCKED_USING_RESERVED_PAGE = 0xC0370025
STATUS_VID_MBP_COUNT_EXCEEDED_LIMIT = 0xC0370026
STATUS_VID_SAVED_STATE_CORRUPT = 0xC0370027
STATUS_VID_SAVED_STATE_UNRECOGNIZED_ITEM = 0xC0370028
STATUS_VID_SAVED_STATE_INCOMPATIBLE = 0xC0370029
STATUS_VID_REMOTE_NODE_PARENT_GPA_PAGES_USED = 0x80370001
STATUS_IPSEC_BAD_SPI = 0xC0360001
STATUS_IPSEC_SA_LIFETIME_EXPIRED = 0xC0360002
STATUS_IPSEC_WRONG_SA = 0xC0360003
STATUS_IPSEC_REPLAY_CHECK_FAILED = 0xC0360004
STATUS_IPSEC_INVALID_PACKET = 0xC0360005
STATUS_IPSEC_INTEGRITY_CHECK_FAILED = 0xC0360006
STATUS_IPSEC_CLEAR_TEXT_DROP = 0xC0360007
STATUS_IPSEC_AUTH_FIREWALL_DROP = 0xC0360008
STATUS_IPSEC_THROTTLE_DROP = 0xC0360009
STATUS_IPSEC_DOSP_BLOCK = 0xC0368000
STATUS_IPSEC_DOSP_RECEIVED_MULTICAST = 0xC0368001
STATUS_IPSEC_DOSP_INVALID_PACKET = 0xC0368002
STATUS_IPSEC_DOSP_STATE_LOOKUP_FAILED = 0xC0368003
STATUS_IPSEC_DOSP_MAX_ENTRIES = 0xC0368004
STATUS_IPSEC_DOSP_KEYMOD_NOT_ALLOWED = 0xC0368005
STATUS_IPSEC_DOSP_MAX_PER_IP_RATELIMIT_QUEUES = 0xC0368006
STATUS_VOLMGR_INCOMPLETE_REGENERATION = 0x80380001
STATUS_VOLMGR_INCOMPLETE_DISK_MIGRATION = 0x80380002
STATUS_VOLMGR_DATABASE_FULL = 0xC0380001
STATUS_VOLMGR_DISK_CONFIGURATION_CORRUPTED = 0xC0380002
STATUS_VOLMGR_DISK_CONFIGURATION_NOT_IN_SYNC = 0xC0380003
STATUS_VOLMGR_PACK_CONFIG_UPDATE_FAILED = 0xC0380004
STATUS_VOLMGR_DISK_CONTAINS_NON_SIMPLE_VOLUME = 0xC0380005
STATUS_VOLMGR_DISK_DUPLICATE = 0xC0380006
STATUS_VOLMGR_DISK_DYNAMIC = 0xC0380007
STATUS_VOLMGR_DISK_ID_INVALID = 0xC0380008
STATUS_VOLMGR_DISK_INVALID = 0xC0380009
STATUS_VOLMGR_DISK_LAST_VOTER = 0xC038000A
STATUS_VOLMGR_DISK_LAYOUT_INVALID = 0xC038000B
STATUS_VOLMGR_DISK_LAYOUT_NON_BASIC_BETWEEN_BASIC_PARTITIONS = 0xC038000C
STATUS_VOLMGR_DISK_LAYOUT_NOT_CYLINDER_ALIGNED = 0xC038000D
STATUS_VOLMGR_DISK_LAYOUT_PARTITIONS_TOO_SMALL = 0xC038000E
STATUS_VOLMGR_DISK_LAYOUT_PRIMARY_BETWEEN_LOGICAL_PARTITIONS = 0xC038000F
STATUS_VOLMGR_DISK_LAYOUT_TOO_MANY_PARTITIONS = 0xC0380010
STATUS_VOLMGR_DISK_MISSING = 0xC0380011
STATUS_VOLMGR_DISK_NOT_EMPTY = 0xC0380012
STATUS_VOLMGR_DISK_NOT_ENOUGH_SPACE = 0xC0380013
STATUS_VOLMGR_DISK_REVECTORING_FAILED = 0xC0380014
STATUS_VOLMGR_DISK_SECTOR_SIZE_INVALID = 0xC0380015
STATUS_VOLMGR_DISK_SET_NOT_CONTAINED = 0xC0380016
STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_MEMBERS = 0xC0380017
STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_PLEXES = 0xC0380018
STATUS_VOLMGR_DYNAMIC_DISK_NOT_SUPPORTED = 0xC0380019
STATUS_VOLMGR_EXTENT_ALREADY_USED = 0xC038001A
STATUS_VOLMGR_EXTENT_NOT_CONTIGUOUS = 0xC038001B
STATUS_VOLMGR_EXTENT_NOT_IN_PUBLIC_REGION = 0xC038001C
STATUS_VOLMGR_EXTENT_NOT_SECTOR_ALIGNED = 0xC038001D
STATUS_VOLMGR_EXTENT_OVERLAPS_EBR_PARTITION = 0xC038001E
STATUS_VOLMGR_EXTENT_VOLUME_LENGTHS_DO_NOT_MATCH = 0xC038001F
STATUS_VOLMGR_FAULT_TOLERANT_NOT_SUPPORTED = 0xC0380020
STATUS_VOLMGR_INTERLEAVE_LENGTH_INVALID = 0xC0380021
STATUS_VOLMGR_MAXIMUM_REGISTERED_USERS = 0xC0380022
STATUS_VOLMGR_MEMBER_IN_SYNC = 0xC0380023
STATUS_VOLMGR_MEMBER_INDEX_DUPLICATE = 0xC0380024
STATUS_VOLMGR_MEMBER_INDEX_INVALID = 0xC0380025
STATUS_VOLMGR_MEMBER_MISSING = 0xC0380026
STATUS_VOLMGR_MEMBER_NOT_DETACHED = 0xC0380027
STATUS_VOLMGR_MEMBER_REGENERATING = 0xC0380028
STATUS_VOLMGR_ALL_DISKS_FAILED = 0xC0380029
STATUS_VOLMGR_NO_REGISTERED_USERS = 0xC038002A
STATUS_VOLMGR_NO_SUCH_USER = 0xC038002B
STATUS_VOLMGR_NOTIFICATION_RESET = 0xC038002C
STATUS_VOLMGR_NUMBER_OF_MEMBERS_INVALID = 0xC038002D
STATUS_VOLMGR_NUMBER_OF_PLEXES_INVALID = 0xC038002E
STATUS_VOLMGR_PACK_DUPLICATE = 0xC038002F
STATUS_VOLMGR_PACK_ID_INVALID = 0xC0380030
STATUS_VOLMGR_PACK_INVALID = 0xC0380031
STATUS_VOLMGR_PACK_NAME_INVALID = 0xC0380032
STATUS_VOLMGR_PACK_OFFLINE = 0xC0380033
STATUS_VOLMGR_PACK_HAS_QUORUM = 0xC0380034
STATUS_VOLMGR_PACK_WITHOUT_QUORUM = 0xC0380035
STATUS_VOLMGR_PARTITION_STYLE_INVALID = 0xC0380036
STATUS_VOLMGR_PARTITION_UPDATE_FAILED = 0xC0380037
STATUS_VOLMGR_PLEX_IN_SYNC = 0xC0380038
STATUS_VOLMGR_PLEX_INDEX_DUPLICATE = 0xC0380039
STATUS_VOLMGR_PLEX_INDEX_INVALID = 0xC038003A
STATUS_VOLMGR_PLEX_LAST_ACTIVE = 0xC038003B
STATUS_VOLMGR_PLEX_MISSING = 0xC038003C
STATUS_VOLMGR_PLEX_REGENERATING = 0xC038003D
STATUS_VOLMGR_PLEX_TYPE_INVALID = 0xC038003E
STATUS_VOLMGR_PLEX_NOT_RAID5 = 0xC038003F
STATUS_VOLMGR_PLEX_NOT_SIMPLE = 0xC0380040
STATUS_VOLMGR_STRUCTURE_SIZE_INVALID = 0xC0380041
STATUS_VOLMGR_TOO_MANY_NOTIFICATION_REQUESTS = 0xC0380042
STATUS_VOLMGR_TRANSACTION_IN_PROGRESS = 0xC0380043
STATUS_VOLMGR_UNEXPECTED_DISK_LAYOUT_CHANGE = 0xC0380044
STATUS_VOLMGR_VOLUME_CONTAINS_MISSING_DISK = 0xC0380045
STATUS_VOLMGR_VOLUME_ID_INVALID = 0xC0380046
STATUS_VOLMGR_VOLUME_LENGTH_INVALID = 0xC0380047
STATUS_VOLMGR_VOLUME_LENGTH_NOT_SECTOR_SIZE_MULTIPLE = 0xC0380048
STATUS_VOLMGR_VOLUME_NOT_MIRRORED = 0xC0380049
STATUS_VOLMGR_VOLUME_NOT_RETAINED = 0xC038004A
STATUS_VOLMGR_VOLUME_OFFLINE = 0xC038004B
STATUS_VOLMGR_VOLUME_RETAINED = 0xC038004C
STATUS_VOLMGR_NUMBER_OF_EXTENTS_INVALID = 0xC038004D
STATUS_VOLMGR_DIFFERENT_SECTOR_SIZE = 0xC038004E
STATUS_VOLMGR_BAD_BOOT_DISK = 0xC038004F
STATUS_VOLMGR_PACK_CONFIG_OFFLINE = 0xC0380050
STATUS_VOLMGR_PACK_CONFIG_ONLINE = 0xC0380051
STATUS_VOLMGR_NOT_PRIMARY_PACK = 0xC0380052
STATUS_VOLMGR_PACK_LOG_UPDATE_FAILED = 0xC0380053
STATUS_VOLMGR_NUMBER_OF_DISKS_IN_PLEX_INVALID = 0xC0380054
STATUS_VOLMGR_NUMBER_OF_DISKS_IN_MEMBER_INVALID = 0xC0380055
STATUS_VOLMGR_VOLUME_MIRRORED = 0xC0380056
STATUS_VOLMGR_PLEX_NOT_SIMPLE_SPANNED = 0xC0380057
STATUS_VOLMGR_NO_VALID_LOG_COPIES = 0xC0380058
STATUS_VOLMGR_PRIMARY_PACK_PRESENT = 0xC0380059
STATUS_VOLMGR_NUMBER_OF_DISKS_INVALID = 0xC038005A
STATUS_VOLMGR_MIRROR_NOT_SUPPORTED = 0xC038005B
STATUS_VOLMGR_RAID5_NOT_SUPPORTED = 0xC038005C
STATUS_BCD_NOT_ALL_ENTRIES_IMPORTED = 0x80390001
STATUS_BCD_TOO_MANY_ELEMENTS = 0xC0390002
STATUS_BCD_NOT_ALL_ENTRIES_SYNCHRONIZED = 0x80390003
STATUS_VHD_DRIVE_FOOTER_MISSING = 0xC03A0001
STATUS_VHD_DRIVE_FOOTER_CHECKSUM_MISMATCH = 0xC03A0002
STATUS_VHD_DRIVE_FOOTER_CORRUPT = 0xC03A0003
STATUS_VHD_FORMAT_UNKNOWN = 0xC03A0004
STATUS_VHD_FORMAT_UNSUPPORTED_VERSION = 0xC03A0005
STATUS_VHD_SPARSE_HEADER_CHECKSUM_MISMATCH = 0xC03A0006
STATUS_VHD_SPARSE_HEADER_UNSUPPORTED_VERSION = 0xC03A0007
STATUS_VHD_SPARSE_HEADER_CORRUPT = 0xC03A0008
STATUS_VHD_BLOCK_ALLOCATION_FAILURE = 0xC03A0009
STATUS_VHD_BLOCK_ALLOCATION_TABLE_CORRUPT = 0xC03A000A
STATUS_VHD_INVALID_BLOCK_SIZE = 0xC03A000B
STATUS_VHD_BITMAP_MISMATCH = 0xC03A000C
STATUS_VHD_PARENT_VHD_NOT_FOUND = 0xC03A000D
STATUS_VHD_CHILD_PARENT_ID_MISMATCH = 0xC03A000E
STATUS_VHD_CHILD_PARENT_TIMESTAMP_MISMATCH = 0xC03A000F
STATUS_VHD_METADATA_READ_FAILURE = 0xC03A0010
STATUS_VHD_METADATA_WRITE_FAILURE = 0xC03A0011
STATUS_VHD_INVALID_SIZE = 0xC03A0012
STATUS_VHD_INVALID_FILE_SIZE = 0xC03A0013
STATUS_VIRTDISK_PROVIDER_NOT_FOUND = 0xC03A0014
STATUS_VIRTDISK_NOT_VIRTUAL_DISK = 0xC03A0015
STATUS_VHD_PARENT_VHD_ACCESS_DENIED = 0xC03A0016
STATUS_VHD_CHILD_PARENT_SIZE_MISMATCH = 0xC03A0017
STATUS_VHD_DIFFERENCING_CHAIN_CYCLE_DETECTED = 0xC03A0018
STATUS_VHD_DIFFERENCING_CHAIN_ERROR_IN_PARENT = 0xC03A0019
STATUS_VIRTUAL_DISK_LIMITATION = 0xC03A001A
STATUS_VHD_INVALID_TYPE = 0xC03A001B
STATUS_VHD_INVALID_STATE = 0xC03A001C
STATUS_VIRTDISK_UNSUPPORTED_DISK_SECTOR_SIZE = 0xC03A001D
STATUS_QUERY_STORAGE_ERROR = 0x803A0001
STATUS_DIS_NOT_PRESENT = 0xC03C0001
STATUS_DIS_ATTRIBUTE_NOT_FOUND = 0xC03C0002
STATUS_DIS_UNRECOGNIZED_ATTRIBUTE = 0xC03C0003
STATUS_DIS_PARTIAL_DATA = 0xC03C0004
| StarcoderdataPython |
3201691 | <filename>Beginner/8/primeNumberChecker.py
# EXERCISE 2 : Prime Number Checker
def prime_checker(number):
is_prime = True
if number<=1:
is_prime = False
for i in range(2,number):
if i*i > number:
break
if number%i==0:
is_prime = False
break
if is_prime:
print(f"{number} is a prime number")
else:
print(f"{number} is not a prime number")
prime_checker(number = int(input("Check this number: "))) | StarcoderdataPython |
4821053 | <filename>yatube/posts/migrations/0009_remove_post_group.py
# Generated by Django 2.2.19 on 2022-03-27 12:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('posts', '0008_auto_20220327_1713'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='group',
),
]
| StarcoderdataPython |
68538 | # Generated by Django 2.0.5 on 2018-05-22 14:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0009_auto_20180522_1409'),
]
operations = [
migrations.AlterModelOptions(
name='contactnumbers',
options={'verbose_name_plural': 'Contact Points'},
),
]
| StarcoderdataPython |
3394490 | <filename>tests/test_base_model.py
#!/usr/bin/python3
"""test for BaseModel"""
import unittest
import os
from models.base_model import BaseModel
import pep8
class TestBaseModel(unittest.TestCase):
"""base model"""
@classmethod
def setclass(cls):
"""setup for the test"""
cls.base = BaseModel()
cls.base.name = "Kev"
cls.base.num = 20
@classmethod
def teardown(cls):
"""at the end of the test this will tear it down"""
del cls.base
def teardown2(self):
"""teardown"""
try:
os.remove("file.json")
except Exception:
pass
def methodBaseModel(self):
"""chekcing if Basemodel have methods"""
self.assertTrue(hasattr(BaseModel, "__init__"))
self.assertTrue(hasattr(BaseModel, "save"))
self.assertTrue(hasattr(BaseModel, "to_dict"))
def initBase(self):
"""test if the base is an type BaseModel"""
self.assertTrue(isinstance(self.base, BaseModel))
def saveBase(self):
"""test if the save works"""
self.base.save()
self.assertNotEqual(self.base.created_at, self.base.updated_at)
def tdictBase(self):
"""test if dictionary works"""
base_dict = self.base.to_dict()
self.assertEqual(self.base.__class__.__name__, 'BaseModel')
self.assertIsInstance(base_dict['created_at'], str)
self.assertIsInstance(base_dict['updated_at'], str)
def pepBase(self):
"""Testing for pep8"""
style = pep8.StyleGuide(quiet=True)
p = style.check_files(['models/base_model.py'])
self.assertEqual(p.total_errors, 0, "fix pep8")
def docstringBase(self):
"""checking for docstrings"""
self.assertIsNotNone(BaseModel.__doc__)
self.assertIsNotNone(BaseModel.__init__.__doc__)
self.assertIsNotNone(BaseModel.__str__.__doc__)
self.assertIsNotNone(BaseModel.save.__doc__)
self.assertIsNotNone(BaseModel.to_dict.__doc__)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
91659 | <gh_stars>10-100
import sys
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
import qdarkstyle
from PyQt5.QtSql import *
import hashlib
class SignUpWidget(QWidget):
student_signup_signal = pyqtSignal(str)
def __init__(self):
super().__init__()
self.setUpUI()
def setUpUI(self):
self.resize(900, 600)
self.setWindowTitle("欢迎登陆图书馆管理系统")
self.signUpLabel = QLabel("注 册")
self.signUpLabel.setAlignment(Qt.AlignCenter)
# self.signUpLabel.setFixedWidth(300)
self.signUpLabel.setFixedHeight(100)
font = QFont()
font.setPixelSize(36)
lineEditFont = QFont()
lineEditFont.setPixelSize(16)
self.signUpLabel.setFont(font)
self.layout = QVBoxLayout()
self.layout.addWidget(self.signUpLabel, Qt.AlignHCenter)
self.setLayout(self.layout)
# 表单,包括学号,姓名,密码,确认密码
self.formlayout = QFormLayout()
font.setPixelSize(18)
# Row1
self.studentIdLabel = QLabel("学 号: ")
self.studentIdLabel.setFont(font)
self.studentIdLineEdit = QLineEdit()
self.studentIdLineEdit.setFixedWidth(180)
self.studentIdLineEdit.setFixedHeight(32)
self.studentIdLineEdit.setFont(lineEditFont)
self.studentIdLineEdit.setMaxLength(10)
self.formlayout.addRow(self.studentIdLabel, self.studentIdLineEdit)
# Row2
self.studentNameLabel = QLabel("姓 名: ")
self.studentNameLabel.setFont(font)
self.studentNameLineEdit = QLineEdit()
self.studentNameLineEdit.setFixedHeight(32)
self.studentNameLineEdit.setFixedWidth(180)
self.studentNameLineEdit.setFont(lineEditFont)
self.studentNameLineEdit.setMaxLength(10)
self.formlayout.addRow(self.studentNameLabel, self.studentNameLineEdit)
lineEditFont.setPixelSize(10)
# Row3
self.passwordLabel = QLabel("密 码: ")
self.passwordLabel.setFont(font)
self.passwordLineEdit = QLineEdit()
self.passwordLineEdit.setFixedWidth(180)
self.passwordLineEdit.setFixedHeight(32)
self.passwordLineEdit.setFont(lineEditFont)
self.passwordLineEdit.setEchoMode(QLineEdit.Password)
self.passwordLineEdit.setMaxLength(16)
self.formlayout.addRow(self.passwordLabel, self.passwordLineEdit)
# Row4
self.passwordConfirmLabel = QLabel("确认密码: ")
self.passwordConfirmLabel.setFont(font)
self.passwordConfirmLineEdit = QLineEdit()
self.passwordConfirmLineEdit.setFixedWidth(180)
self.passwordConfirmLineEdit.setFixedHeight(32)
self.passwordConfirmLineEdit.setFont(lineEditFont)
self.passwordConfirmLineEdit.setEchoMode(QLineEdit.Password)
self.passwordConfirmLineEdit.setMaxLength(16)
self.formlayout.addRow(self.passwordConfirmLabel, self.passwordConfirmLineEdit)
# Row5
self.signUpbutton = QPushButton("注 册")
self.signUpbutton.setFixedWidth(120)
self.signUpbutton.setFixedHeight(30)
self.signUpbutton.setFont(font)
self.formlayout.addRow("", self.signUpbutton)
widget = QWidget()
widget.setLayout(self.formlayout)
widget.setFixedHeight(250)
widget.setFixedWidth(300)
self.Hlayout = QHBoxLayout()
self.Hlayout.addWidget(widget, Qt.AlignCenter)
widget = QWidget()
widget.setLayout(self.Hlayout)
self.layout.addWidget(widget, Qt.AlignHCenter)
# 设置验证
reg = QRegExp("PB[0~9]{8}")
pValidator = QRegExpValidator(self)
pValidator.setRegExp(reg)
self.studentIdLineEdit.setValidator(pValidator)
reg = QRegExp("[a-zA-z0-9]+$")
pValidator.setRegExp(reg)
self.passwordLineEdit.setValidator(pValidator)
self.passwordConfirmLineEdit.setValidator(pValidator)
self.signUpbutton.clicked.connect(self.SignUp)
self.studentIdLineEdit.returnPressed.connect(self.SignUp)
self.studentNameLineEdit.returnPressed.connect(self.SignUp)
self.passwordLineEdit.returnPressed.connect(self.SignUp)
self.passwordConfirmLineEdit.returnPressed.connect(self.SignUp)
def SignUp(self):
studentId = self.studentIdLineEdit.text()
studentName = self.studentNameLineEdit.text()
password = self.passwordLineEdit.text()
confirmPassword = self.passwordConfirmLineEdit.text()
if (studentId == "" or studentName == "" or password == "" or confirmPassword == ""):
print(QMessageBox.warning(self, "警告", "表单不可为空,请重新输入", QMessageBox.Yes, QMessageBox.Yes))
return
else: # 需要处理逻辑,1.账号已存在;2.密码不匹配;3.插入user表
db = QSqlDatabase.addDatabase("QSQLITE")
db.setDatabaseName('./db/LibraryManagement.db')
db.open()
query = QSqlQuery()
if (confirmPassword != password):
print(QMessageBox.warning(self, "警告", "两次输入密码不一致,请重新输入", QMessageBox.Yes, QMessageBox.Yes))
return
elif (confirmPassword == password):
# md5编码
hl = hashlib.md5()
hl.update(password.encode(encoding='utf-8'))
md5password = hl.hexdigest()
sql = "SELECT * FROM user WHERE StudentId='%s'" % (studentId)
query.exec_(sql)
if (query.next()):
print(QMessageBox.warning(self, "警告", "该账号已存在,请重新输入", QMessageBox.Yes, QMessageBox.Yes))
return
else:
sql = "INSERT INTO user VALUES ('%s','%s','%s',0,0,0)" % (
studentId, studentName, md5password)
db.exec_(sql)
db.commit()
print(QMessageBox.information(self, "提醒", "您已成功注册账号!", QMessageBox.Yes, QMessageBox.Yes))
self.student_signup_signal.emit(studentId)
db.close()
return
if __name__ == "__main__":
app = QApplication(sys.argv)
app.setWindowIcon(QIcon("./images/MainWindow_1.png"))
app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
mainMindow = SignUpWidget()
mainMindow.show()
sys.exit(app.exec_())
| StarcoderdataPython |
1696726 | import contextlib
import six
@contextlib.contextmanager
def open_filename(*args, **kwargs):
"""A context manager for open(..) for a filename OR a file-like object."""
if isinstance(args[0], six.string_types):
with open(*args, **kwargs) as fh:
yield fh
else:
yield args[0]
if kwargs.get('closing', False):
args[0].close()
| StarcoderdataPython |
6661 | <gh_stars>1-10
'''
Created on 2011-6-22
@author: dholer
'''
| StarcoderdataPython |
3302121 | <gh_stars>1-10
from django.template.defaultfilters import date as _date
from django.utils.translation import get_language, ugettext_lazy as _
def daterange(df, dt):
lng = get_language()
if lng.startswith("de"):
if df.year == dt.year and df.month == dt.month:
return "{}.–{}".format(_date(df, "j"), _date(dt, "j. F Y"))
elif df.year == dt.year:
return "{} – {}".format(_date(df, "j. F"), _date(dt, "j. F Y"))
elif lng.startswith("en"):
if df.year == dt.year and df.month == dt.month:
return "{} – {}".format(_date(df, "N jS"), _date(dt, "jS, Y"))
elif df.year == dt.year:
return "{} – {}".format(_date(df, "N jS"), _date(dt, "N jS, Y"))
return _("{date_from} – {date_to}").format(
date_from=_date(df, "DATE_FORMAT"), date_to=_date(dt, "DATE_FORMAT")
)
| StarcoderdataPython |
3326306 | from typing import Any, Dict
from flask import Flask, current_app
from pynamodb.connection import Connection
from flask_pynamodb.model import Model as ModelClass
__version__ = "0.0.2"
DYNAMODB_SETTINGS = (
"DYNAMODB_REGION",
"DYNAMODB_HOST",
"DYNAMODB_CONNECT_TIMEOUT_SECONDS",
"DYNAMODB_READ_TIMEOUT_SECONDS",
"DYNAMODB_BASE_BACKOFF_MS",
"DYNAMODB_MAX_RETRY_ATTEMPTS",
"DYNAMODB_MAX_POOL_CONNECTIONS",
"DYNAMODB_EXTRA_HEADERS",
"DYNAMODB_AWS_ACCESS_KEY_ID",
"DYNAMODB_AWS_SECRET_ACCESS_KEY",
"DYNAMODB_AWS_SESSION_TOKEN",
"DYNAMODB_READ_CAPACITY_UNITS",
"DYNAMODB_WRITE_CAPACITY_UNITS",
)
class PynamoDB:
"""
The main class for initializing and managing PynamoDB integration to one / multiple
Flask applications.
Attributes:
app (Flask): The Flask application that uses DynamoDB. For using more that one app,
you should use ``init_app`` method. Please note the model class supports a configuration
of one app only.
"""
Model = ModelClass
def __init__(self, app: Flask = None):
self.app = app
if app:
self.init_app(app)
def init_app(self, app: Flask):
"""
Initializes a Flask application for using the integration.
Currently, the model class supports a single app configuration only.
Therefore, if there are multiple app configurations for this integration,
the configuration will be overriden.
Args:
app (Flask): The flask application to initialize.
"""
if not app or not isinstance(app, Flask):
raise TypeError("Invalid Flask app instance.")
self.Model._app_config.update(
{self._convert_key(k): v for k, v in app.config.items() if k in DYNAMODB_SETTINGS}
)
connection = self._create_connection(self.Model._app_config)
app.extensions["pynamodb"] = {"db": self, "connection": connection}
@property
def connection(self) -> Connection:
"""
str: Base connection object, for accessing DynamoDB.
"""
try:
return current_app.extensions["pynamodb"]["connection"]
except KeyError:
new_connection = self._create_connection(self.Model._app_config)
current_app.extensions["pynamodb"] = {
"db": self,
"connection": new_connection,
}
return new_connection
@staticmethod
def _convert_key(key: str) -> str:
return key.split("_", 1)[1].lower()
@staticmethod
def _create_connection(config: Dict[str, Any]) -> Connection:
connection = Connection(
config.get("region"),
config.get("host"),
config.get("connect_timeout_seconds"),
config.get("read_timeout_seconds"),
config.get("max_retry_attempts"),
config.get("base_backoff_ms"),
config.get("max_pool_connections"),
config.get("extra_headers"),
)
connection.session.set_credentials(
config.get("aws_access_key_id"), config.get("aws_secret_access_key")
)
return connection
| StarcoderdataPython |
4829556 | <gh_stars>1-10
import json
import requests
from django.db import models
from django.contrib.auth.models import User
from .utils import *
#
# 1: 'ece-1',
# 2: 'ece-2',
# 3: 'mse-1',
# 4: 'pas-1',
# 5: 'pas-2',
# 6: 'osc-1',
# 7: 'osc-2',
# 8: 'osc-3',
# 9: 'osc-4',
# 10: 'osc-5',
# 11: 'bio-1',
# 12: 'bio-2',
# 13: 'bio-3',
# 14: '',
# 15: '',
# 16: ''
# class CrossLabUser(models.Model):
# """
# User model for CrossLab(iLab)
# """
# # "owner": {
# # "id": 1701472,
# # "name": "<NAME>",
# # "first_name": "Zhaohui",
# # "last_name": "Yang",
# # "email": "<EMAIL>",
# # "phone": "5205247501",
# # "employee_id": "N/A"
# # },
# ilab_id = models.CharField(max_length=20, unique=True)
# name = models.CharField(max_length=30)
# first_name = models.CharField(max_length=20)
# last_name = models.CharField(max_length=20)
# email = models.EmailField()
# phone = models.CharField(max_length=20)
# employee_id = models.CharField(max_length=20)
#
#
# class UserLabMap(models.Model):
# """
# Logical user -- Specific laboratory mapping information
# """
#
# class Meta:
# unique_together = ['ilab_lab_id', 'ilab_user_id']
#
# ilab_lab_id = models.CharField(max_length=20)
# ilab_user_id = models.CharField(max_length=20)
# lab_name = models.CharField(max_length=100)
class CrossLabQuagentUserMap(models.Model):
"""
Mapping: Laboratories in iLab -> Physical users in Quagent
"""
user = models.ForeignKey(User, on_delete=models.CASCADE)
ilab_lab_id = models.CharField(max_length=20, verbose_name='iLab laboratory id')
ilab_lab_name = models.CharField(max_length=50, verbose_name='iLab laboratory name')
| StarcoderdataPython |
3284866 | <reponame>Eyalcohenx/tonic
import abc
class Agent(abc.ABC):
'''Abstract class used to build agents.'''
def initialize(self, observation_space, action_space, seed=None):
pass
@abc.abstractmethod
def step(self, observations):
'''Returns actions during training.'''
pass
def update(self, observations, rewards, resets, terminations):
'''Informs the agent of the latest transitions during training.'''
pass
@abc.abstractmethod
def test_step(self, observations):
'''Returns actions during testing.'''
pass
def test_update(self, observations, rewards, resets, terminations):
'''Informs the agent of the latest transitions during testing.'''
pass
def save(self, path):
'''Saves the agent weights during training.'''
pass
def load(self, path):
'''Reloads the agent weights from a checkpoint.'''
pass
| StarcoderdataPython |
1749952 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
# Copyright 2017 ROBOTIS CO., LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Author: <NAME> (Leon)
#
# ********* Protocol Combined Example *********
#
#
# Available Dynamixel model on this example : All models using Protocol 1.0 and 2.0
# This example is tested with a Dynamixel MX-28, a Dynamixel PRO 54-200 and an USB2DYNAMIXEL
# Be sure that properties of Dynamixel MX and PRO are already set as %% MX - ID : 1 / Baudnum : 34 (Baudrate : 57600) , PRO - ID : 1 / Baudnum : 1 (Baudrate : 57600)
#
# Be aware that:
# This example configures two different control tables (especially, if it uses Dynamixel and Dynamixel PRO). It may modify critical Dynamixel parameter on the control table, if Dynamixels have wrong ID.
#
import os
if os.name == 'nt':
import msvcrt
def getch():
return msvcrt.getch().decode()
else:
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
def getch():
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
os.sys.path.append('../dynamixel_functions_py') # Path setting
import dynamixel_functions as dynamixel # Uses Dynamixel SDK library
# Control table address for Dynamixel MX
ADDR_MX_TORQUE_ENABLE = 24 # Control table address is different in Dynamixel model
ADDR_MX_GOAL_POSITION = 30
ADDR_MX_PRESENT_POSITION = 36
# Control table address for Dynamixel PRO
ADDR_PRO_TORQUE_ENABLE = 562
ADDR_PRO_GOAL_POSITION = 596
ADDR_PRO_PRESENT_POSITION = 611
# Protocol version
PROTOCOL_VERSION1 = 1 # See which protocol version is used in the Dynamixel
PROTOCOL_VERSION2 = 2
# Default setting
DXL1_ID = 1 # Dynamixel ID: 1
DXL2_ID = 2 # Dynamixel ID: 2
BAUDRATE = 57600
DEVICENAME = "/dev/ttyUSB0".encode('utf-8')# Check which port is being used on your controller
# ex) Windows: "COM1" Linux: "/dev/ttyUSB0" Mac: "/dev/tty.usbserial-*"
TORQUE_ENABLE = 1 # Value for enabling the torque
TORQUE_DISABLE = 0 # Value for disabling the torque
DXL1_MINIMUM_POSITION_VALUE = 100 # Dynamixel will rotate between this value
DXL1_MAXIMUM_POSITION_VALUE = 4000 # and this value (note that the Dynamixel would not move when the position value is out of movable range. Check e-manual about the range of the Dynamixel you use.)
DXL2_MINIMUM_POSITION_VALUE = -150000
DXL2_MAXIMUM_POSITION_VALUE = 150000
DXL1_MOVING_STATUS_THRESHOLD = 10 # Dynamixel moving status threshold
DXL2_MOVING_STATUS_THRESHOLD = 20
ESC_ASCII_VALUE = 0x1b
COMM_SUCCESS = 0 # Communication Success result value
COMM_TX_FAIL = -1001 # Communication Tx Failed
# Initialize PortHandler Structs
# Set the port path
# Get methods and members of PortHandlerLinux or PortHandlerWindows
port_num = dynamixel.portHandler(DEVICENAME)
# Initialize PacketHandler Structs
dynamixel.packetHandler()
index = 0
dxl_comm_result = COMM_TX_FAIL # Communication result
dxl1_goal_position = [DXL1_MINIMUM_POSITION_VALUE, DXL1_MAXIMUM_POSITION_VALUE] # Goal position of Dynamixel MX
dxl2_goal_position = [DXL2_MINIMUM_POSITION_VALUE, DXL2_MAXIMUM_POSITION_VALUE] # Goal position of Dynamixel PRO
dxl_error = 0 # Dynamixel error
dxl1_present_position = 0 # Present position of Dynamixel MX
dxl2_present_position = 0 # Present position of Dynamixel PRO
# Open port
if dynamixel.openPort(port_num):
print("Succeeded to open the port!")
else:
print("Failed to open the port!")
print("Press any key to terminate...")
getch()
quit()
# Set port baudrate
if dynamixel.setBaudRate(port_num, BAUDRATE):
print("Succeeded to change the baudrate!")
else:
print("Failed to change the baudrate!")
print("Press any key to terminate...")
getch()
quit()
# Enable Dynamixel#1 torque
dynamixel.write1ByteTxRx(port_num, PROTOCOL_VERSION1, DXL1_ID, ADDR_MX_TORQUE_ENABLE, TORQUE_ENABLE)
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION1)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION1)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION1, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION1, dxl_error))
else:
print("Dynamixel#%d has been successfully connected" % (DXL1_ID))
# Enable Dynamixel#1 torque
dynamixel.write1ByteTxRx(port_num, PROTOCOL_VERSION2, DXL2_ID, ADDR_PRO_TORQUE_ENABLE, TORQUE_ENABLE)
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION2)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION2)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION2, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION2, dxl_error))
else:
print("Dynamixel#%d has been successfully connected" % (DXL2_ID))
while 1:
print("Press any key to continue! (or press ESC to quit!)")
if getch() == chr(ESC_ASCII_VALUE):
break
# Write Dynamixel#1 goal position
dynamixel.write2ByteTxRx(port_num, PROTOCOL_VERSION1, DXL1_ID, ADDR_MX_GOAL_POSITION, dxl1_goal_position[index])
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION1)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION1)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION1, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION1, dxl_error))
# Write Dynamixel#1 goal position
dynamixel.write4ByteTxRx(port_num, PROTOCOL_VERSION2, DXL2_ID, ADDR_PRO_GOAL_POSITION, dxl2_goal_position[index])
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION2)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION2)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION2, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION2, dxl_error))
while 1:
# Read Dynamixel#1 present position
dxl1_present_position = dynamixel.read2ByteTxRx(port_num, PROTOCOL_VERSION1, DXL1_ID, ADDR_MX_PRESENT_POSITION)
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION1)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION1)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION1, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION1, dxl_error))
# Read Dynamixel#2 present position
dxl2_present_position = dynamixel.read4ByteTxRx(port_num, PROTOCOL_VERSION2, DXL2_ID, ADDR_PRO_PRESENT_POSITION)
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION2)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION2)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION2, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION2, dxl_error))
print("[ID:%03d] GoalPos:%03d PresPos:%03d [ID:%03d] GoalPos:%03d PresPos:%03d" % (DXL1_ID, dxl1_goal_position[index], dxl1_present_position, DXL2_ID, dxl2_goal_position[index], dxl2_present_position))
if not ((abs(dxl1_goal_position[index] - dxl1_present_position) > DXL1_MOVING_STATUS_THRESHOLD) or (abs(dxl2_goal_position[index] - dxl2_present_position) > DXL2_MOVING_STATUS_THRESHOLD)):
break
# Change goal position
if index == 0:
index = 1
else:
index = 0
# Disable Dynamixel#1 Torque
dynamixel.write1ByteTxRx(port_num, PROTOCOL_VERSION1, DXL1_ID, ADDR_MX_TORQUE_ENABLE, TORQUE_DISABLE)
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION1)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION1)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION1, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION1, dxl_error))
# Disable Dynamixel#2 Torque
dynamixel.write1ByteTxRx(port_num, PROTOCOL_VERSION2, DXL2_ID, ADDR_PRO_TORQUE_ENABLE, TORQUE_DISABLE)
dxl_comm_result = dynamixel.getLastTxRxResult(port_num, PROTOCOL_VERSION2)
dxl_error = dynamixel.getLastRxPacketError(port_num, PROTOCOL_VERSION2)
if dxl_comm_result != COMM_SUCCESS:
print(dynamixel.getTxRxResult(PROTOCOL_VERSION2, dxl_comm_result))
elif dxl_error != 0:
print(dynamixel.getRxPacketError(PROTOCOL_VERSION2, dxl_error))
# Close port
dynamixel.closePort(port_num)
| StarcoderdataPython |
183474 | <reponame>LandRegistry/audit<gh_stars>0
import os, sys
from flask import Flask
app = Flask(__name__)
# add config
app.config.from_object('config')
if not os.environ.get('REDIS_URL'):
print "REDIS_URL not set. Using default=[%s]" % app.config['REDIS_URL']
if not os.environ.get('REDIS_NS'):
print "REDIS_NS not set. Using default=[%s]" % app.config['REDIS_NS']
| StarcoderdataPython |
1690977 | <reponame>kevinksyTRD/pyCOSIM
import os
import random
import numpy as np
import pandas
import pytest
from pyOSPParser.logging_configuration import OspLoggingConfiguration
from pyOSPParser.scenario import OSPScenario, OSPEvent
from pycosim.osp_command_line_interface import get_model_description, run_single_fmu, \
ModelVariables, run_cosimulation, \
LoggingLevel, SimulationError
path_to_fmu = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'fmus',
'ControlVolume.fmu'
)
path_to_system_structure = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'fmus'
)
def test_get_model_description():
model_description = get_model_description(path_to_fmu)
assert model_description.name == 'ControlVolume'
assert model_description.uuid != ''
assert type(model_description.model_variable) is ModelVariables
assert len(model_description.model_variable.parameters) > 0
assert len(model_description.model_variable.inputs) > 0
assert len(model_description.model_variable.outputs) > 0
assert len(model_description.model_variable.others) > 0
print('Parameters:')
for param in model_description.model_variable.get_parameters_names():
print('\t%s' % param)
print('Inputs:')
for param in model_description.model_variable.get_input_names():
print('\t%s' % param)
print('Outputs:')
for param in model_description.model_variable.get_output_names():
print('\t%s' % param)
print('Others:')
for param in model_description.model_variable.get_other_variable_names():
print('\t%s' % param)
def test_run_single_fmu():
result, _ = run_single_fmu(path_to_fmu)
# Check if the output file does not exist if the output_file_path is not given
assert not os.path.isfile('model-output.csv')
print('')
print(result)
# Check if the file exists if the output_file_path is given
output_file_path = 'output.csv'
run_single_fmu(path_to_fmu, output_file_path=output_file_path)
assert os.path.isfile(output_file_path)
os.remove(output_file_path)
# Check if the initial values are effective
initial_values = {
'p_loss.T': 330,
'p_in.dQ': 100,
}
result, _ = run_single_fmu(path_to_fmu, initial_values=initial_values)
# Collect the column names that matches the initial value specified
columns = [column for column in result.columns if any(list(map(lambda x: column.startswith(x), initial_values)))]
for column in columns:
for key in initial_values:
if column.startswith(key):
break
# Compare the initial value
# noinspection PyUnboundLocalVariable
comparison = result[column].values == initial_values[key]
assert all(comparison.tolist())
# Check if the duration arg is effective
# Duration is rounded to the second decimal place because the
# step size is 0.01 by default.
duration = np.round(random.random() * 10, 2)
result, _ = run_single_fmu(path_to_fmu, duration=duration)
assert result['Time'].values[-1] == pytest.approx(duration, rel=1e-3)
# Check if the step size arg is effective
step_size = 0.05
result, _ = run_single_fmu(path_to_fmu, step_size=step_size)
step_size_sim = np.diff(result['Time'].values)
assert np.any(step_size_sim == step_size)
def test_run_cosimulation():
duration = random.randint(5, 10)
result, log, error = run_cosimulation(
path_to_system_structure=path_to_system_structure,
duration=duration,
logging_level=LoggingLevel.info,
logging_stream=True
)
for each in result:
assert isinstance(result[each], pandas.DataFrame)
assert result[each]['Time'].values[-1] == duration
assert isinstance(log, str)
assert len(log) > 0
# Test with logging configuration and output directory
path_to_sim_temp = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sim_temp')
path_to_logging_config = os.path.join(path_to_sim_temp, 'LogConfig.xml')
logging_config = OspLoggingConfiguration(xml_source=path_to_logging_config)
result, log, error = run_cosimulation(
path_to_system_structure=path_to_system_structure,
output_file_path=path_to_sim_temp,
logging_config=logging_config,
logging_level=LoggingLevel.info,
logging_stream=True
)
output_files = [file_name for file_name in os.listdir(path_to_sim_temp) if file_name.endswith('.csv')]
for file_name in output_files:
os.remove(os.path.join(path_to_sim_temp, file_name))
os.remove(os.path.join(path_to_system_structure, 'LogConfig.xml'))
assert len(output_files) == len(logging_config.simulators)
for simulator in logging_config.simulators:
assert len(result[simulator.name].columns) == len(simulator.variables) + 2
for variable in simulator.variables:
assert variable.name in result[simulator.name].columns
# Test with a scenario
duration = 50
model = 'chassis'
variable = 'C.mChassis'
scenario = OSPScenario(name='test scenario', end=50)
scenario.add_event(
OSPEvent(time=5, model=model, variable=variable, action=OSPEvent.OVERRIDE, value=500)
)
scenario.add_event(
OSPEvent(time=15, model=model, variable=variable, action=OSPEvent.OVERRIDE, value=600)
)
scenario.add_event(
OSPEvent(time=30, model=model, variable=variable, action=OSPEvent.OVERRIDE, value=700)
)
scenario.add_event(
OSPEvent(time=45, model=model, variable=variable, action=OSPEvent.OVERRIDE, value=800)
)
result, log, error = run_cosimulation(
path_to_system_structure=path_to_system_structure,
duration=duration,
scenario=scenario,
logging_level=LoggingLevel.info,
logging_stream=True
)
if len(error) > 0:
raise SimulationError(f'An error or errors occured during the simulation: {error}')
print(log)
os.remove(os.path.join(path_to_system_structure, scenario.get_file_name()))
time_array = result[model]['Time'].values
for i, event in enumerate(scenario.events):
if i < len(scenario.events) - 1:
next_event = scenario.events[i + 1]
index = np.bitwise_and(time_array > event.time, time_array <= next_event.time)
else:
index = time_array > event.time
assert np.all(result[model][variable].values[index] == event.value) | StarcoderdataPython |
1734595 | from typing import List, Optional, Set
from numpy import ndarray
from livia.process.analyzer.object_detection.DetectedObject import DetectedObject
class FrameObjectDetection:
def __init__(self, frame: ndarray, objects: List[DetectedObject], class_names: Optional[Set[str]] = None):
self.__frame: ndarray = frame
self.__objects: List[DetectedObject] = list(objects)
self.__class_names: Optional[Set[str]] = None
if class_names:
self.__class_names = set(class_names)
elif all([obj.has_class_name() for obj in objects]):
self.__class_names = set([obj.class_name for obj in objects if obj.class_name is not None])
elif any([obj.has_class_name() for obj in objects]):
raise ValueError("Objects with and without class can't be combined")
if self.__class_names:
if len(objects) > 0 and self.__class_names != set([obj.class_name for obj in objects]):
cn = self.__class_names
ocn = set([obj.class_name for obj in objects])
message = f"Class names ({cn}) do not match the classes in the objects ({ocn})"
raise ValueError(message)
@property
def frame(self) -> ndarray:
return self.__frame
@property
def class_names(self) -> Optional[Set[str]]:
return set(self.__class_names) if self.__class_names is not None else None
@property
def objects(self) -> List[DetectedObject]:
return self.__objects.copy()
def has_objects(self) -> bool:
return len(self.__objects) > 0
def merge(self, detection: "FrameObjectDetection") -> "FrameObjectDetection":
if self.__frame != detection.frame:
raise ValueError("Detection objects must share the same frame")
self_class_names = set(self.__class_names if self.__class_names is not None else [])
detection_class_names = set(detection.class_names if detection.class_names is not None else [])
if self_class_names != detection_class_names:
raise ValueError("Detection objects must share the same classes")
return FrameObjectDetection(self.__frame, self.__objects + detection.objects, self.__class_names)
| StarcoderdataPython |
172030 | <gh_stars>1-10
"""Custom logging module wrapping standard logging, but with special pyindigoConfig.
It mostly serves debug purposes, logging low-level stuff like property events.
See https://docs.python.org/3/howto/logging.html for info on basic logging
Example use:
>>> import pyindigo.logging as logging
>>> logging.basicConfig(filename='pyindigo.log', encoding='utf-8', level=logging.DEBUG)
>>> logging.pyindigoConfig(log_driver_actions=True, log_property_set=True)
>>> logging.warn("Beware!")
>>> logging.debug("Some details here")
"""
from logging import * # noqa
import inspect
class pyindigoConfig:
# info
log_property_set: bool = False
log_driver_actions: bool = False
log_callback_dispatching: bool = False
log_device_connection: bool = False
log_blocking_property_settings: bool = False
# warnings
log_callback_exceptions: bool = True
log_alert_properties: bool = False
# this seems overengineered and does not allow intellisensing logging options, need to find a better way
def __new__(cls, *args, **kwargs):
"""Not an actual instantiation, but setting class attributes, mocking logging.basicConfig behaviour"""
if len(args) == 1 and isinstance(args[0], bool):
for name, flag in inspect.getmembers(cls, lambda val: isinstance(val, bool)):
setattr(cls, name, args[0])
if len(args) > 1:
raise ValueError("Only one positional argument can be set (all flags at once)")
for key, val in kwargs.items():
if not hasattr(cls, key):
raise KeyError(f"Unknown logging option {key}={val}")
setattr(cls, key, val)
| StarcoderdataPython |
3387459 | <gh_stars>0
import datetime
import os
import socket
import sys
import time
from urllib.parse import quote, unquote
from library import (
magic,
html
)
from threading import Thread
LOG = "./servidor.log"
DIRECTORYINDEX = "index.html"
def escrever_log(msg):
with open(LOG, 'a') as f:
f.write(msg)
def processa_requisicao(conexao, info_cliente):
''' Função que processa a requisição feita pelo navegador e retorno os
recursos solicitados. A função roda em threads separadas para permitir
multiplas conexões.
Args:
conexao (object): Recebe um objeto socket capaz de receber e enviar
os dados.
info_cliente (tuple): Tupla com o par ip/porta que conectou no
servidor
Returns:
O recurso solicitado através do socket conforme a requisição.
'''
metodo, recurso, versao = '', '', ''
req = con.recv(1024).decode()
earquivo = False
try:
metodo, recurso, versao = req.split('\n')[0].split(' ')
# Suporta apenas o método GET
if metodo != 'GET':
raise ValueError()
atributos = {}
for l in req.split('\n')[1:]:
campo = l.split(':')[0].strip()
valor = ':'.join(l.strip('\r').split(':')[1:]).strip()
atributos[campo] = valor
# Sanitizar o recurso solicitado para previnir local file inclusion a
# partir da url informada
# https://www.owasp.org/index.php/Testing_for_Local_File_Inclusion
recurso = unquote(recurso.replace('..', '.').replace('//', '/'))
# Resposta padrão, sobrecarregado se for encontrado outro recurso
codigo_http = '200'
menssagem_http = 'OK'
resposta = html.HTML_BASE.format('Página Inicial',
'<h1> Esta funcionando ... </h1>')
tipo_conteudo = 'text/html'
# Se o recurso igual a / e existe DIRECTORYINDEX, esse arquivo será
# utilizado como arquivo padrão para exibição
if recurso == '/' and os.path.exists((diretorio + '/' + DIRECTORYINDEX)):
fpath = (diretorio + '/' + DIRECTORYINDEX).replace('//', '/')
else:
fpath = (diretorio + recurso).replace('//', '/')
if os.path.isfile(fpath):
tipo_conteudo = magic.from_file(fpath, mime=True)
resposta = b''
earquivo = True
elif os.path.isdir(fpath):
if not os.listdir(fpath) and recurso == '/':
# Se o diretório é vazio, mantém a mensagem padrão
pass
else:
# Senão lista tudo dentro do diretório base
lista = ''
# Definição da pasta pai
anterior = '/'.join(recurso.split('/')[0:-1])
if anterior == '':
anterior = '/'
dir_pai = '<tr><td colspan=5><a href="'+ anterior + '">Diretório pai</a></td></tr>'
if fpath.rstrip('/') != diretorio.rstrip('/'):
lista += dir_pai
# Monta a lista de arquivos a ser exibidos
for i in os.listdir(fpath):
frecurso = fpath.rstrip('/') + '/' + i
data_modificacao = time.ctime(os.path.getmtime(frecurso))
if os.path.isfile(frecurso):
bytes = str(os.path.getsize(frecurso))
ftype = '-'
else:
bytes = '-'
ftype = 'DIR'
lista += '<tr>'
lista += '<td>' + ftype +'</td>'
urlrecurso = recurso.rstrip('/') + '/' + i
lista += '<td><a href="' + quote(urlrecurso) + '">' + i + '</a></td>'
lista += '<td>' + data_modificacao + '</td>'
lista += '<td>' + bytes + '</td>'
lista += '</tr>'
resposta = html.HTML_INDEX.format('Index of ' + recurso,
recurso,
lista)
else:
codigo_http = '404'
menssagem_http = 'NOT FOUND'
tipo_conteudo = 'text/html'
resposta = html.HTML_BASE.format(
'Recurso não encontrado',
'<h1> HTTP 404 - Not found </h1>' +
'<p> O recurso solicitado não foi encontrado </p>')
except ValueError as e:
codigo_http = '400'
menssagem_http = 'Bad request'
tipo_conteudo = 'text/html'
resposta = html.HTML_BASE.format('Formato de requisição inválido',
'<h1> HTTP 400 - Bad request </h1>')
finally:
escrever_log("{0} - {1} - {2} {3} - {4}\n".format(
str(info_cliente[0]),
datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S'),
metodo,
recurso,
codigo_http))
conexao.send('HTTP/1.1 {0} {1}\nContent-Type: {2}\n\n'.format(
codigo_http,
menssagem_http,
tipo_conteudo).encode())
# Se a resposta não for binária, codifique para envio
if isinstance(resposta, str):
resposta = resposta.encode()
for i in range(0, len(resposta), 1024):
try:
conexao.send(resposta[i:i + 1024])
except BrokenPipeError:
pass
elif earquivo:
with open(fpath, 'rb') as arquivo:
while True:
data = arquivo.read(1024)
if not data:
break
try:
conexao.send(data)
except BrokenPipeError:
pass
# Franciona o envio da resposta em blocos de 1024 bytes
conexao.close()
if __name__ == '__main__':
# Obtém a porta e o diretório por linha de comando
try:
porta = int(sys.argv[1])
diretorio = sys.argv[2]
if not os.path.isdir(diretorio):
print('Forneça um diretorio base para este virtual host')
sys.exit()
except Exception as e:
print('Modo de uso: python3 servidor.py porta diretorio')
sys.exit()
print('Preparando para receber conexões em {0}:{1}'.format('0.0.0.0', porta))
while True:
# Instancia o socket TCP IPv4
s = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM
)
# Opção que permite o reuso da porta, sem que seja necessário aguardar
# a syscall para a do endereço
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
s.bind(('0.0.0.0', porta))
except PermissionError:
print('Você não possui permissão para utilizar essa porta.')
sys.exit()
s.listen(1)
con, info_cliente = s.accept()
print('Conexão efetuada por', ':'.join([str(i) for i in info_cliente]))
# Processa cada conexão em uma thread diferente
Thread(target=processa_requisicao, args=(con, info_cliente, )).start()
| StarcoderdataPython |
1744662 | <filename>code/ui/mainWindow.py
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainWindow.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(712, 473)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.widget = QtWidgets.QWidget(self.centralwidget)
self.widget.setGeometry(QtCore.QRect(9, 9, 657, 394))
self.widget.setObjectName("widget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.widget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.tabWidget = QtWidgets.QTabWidget(self.widget)
self.tabWidget.setObjectName("tabWidget")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.tabWidget.addTab(self.tab, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.tabWidget.addTab(self.tab_2, "")
self.horizontalLayout.addWidget(self.tabWidget)
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.planePolarizedView = QtWidgets.QGraphicsView(self.widget)
self.planePolarizedView.setObjectName("planePolarizedView")
self.verticalLayout_2.addWidget(self.planePolarizedView)
self.crossPolarizedView = QtWidgets.QGraphicsView(self.widget)
self.crossPolarizedView.setObjectName("crossPolarizedView")
self.verticalLayout_2.addWidget(self.crossPolarizedView)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.textBrowser_2 = QtWidgets.QTextBrowser(self.widget)
self.textBrowser_2.setObjectName("textBrowser_2")
self.verticalLayout_3.addWidget(self.textBrowser_2)
self.textBrowser = QtWidgets.QTextBrowser(self.widget)
self.textBrowser.setObjectName("textBrowser")
self.verticalLayout_3.addWidget(self.textBrowser)
self.horizontalLayout.addLayout(self.verticalLayout_3)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 712, 22))
self.menubar.setObjectName("menubar")
self.menu = QtWidgets.QMenu(self.menubar)
self.menu.setObjectName("menu")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.menubar.addAction(self.menu.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Tab 1"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Tab 2"))
self.menu.setTitle(_translate("MainWindow", "文件"))
| StarcoderdataPython |
5520 | <reponame>scottwedge/OpenStack-Stein
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Author: <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from designate.tests.test_api.test_v2 import ApiV2TestCase
class ApiV2LimitsTest(ApiV2TestCase):
def test_get_limits(self):
response = self.client.get('/limits/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn('max_zones', response.json)
self.assertIn('max_zone_records', response.json)
self.assertIn('max_zone_recordsets',
response.json)
self.assertIn('max_recordset_records',
response.json)
self.assertIn('min_ttl', response.json)
self.assertIn('max_zone_name_length',
response.json)
self.assertIn('max_recordset_name_length',
response.json)
self.assertIn('max_page_limit',
response.json)
absolutelimits = response.json
self.assertEqual(cfg.CONF.quota_zones, absolutelimits['max_zones'])
self.assertEqual(cfg.CONF.quota_zone_records,
absolutelimits['max_zone_recordsets'])
self.assertEqual(cfg.CONF['service:central'].min_ttl,
absolutelimits['min_ttl'])
self.assertEqual(cfg.CONF['service:central'].max_zone_name_len,
absolutelimits['max_zone_name_length'])
self.assertEqual(cfg.CONF['service:central'].max_recordset_name_len,
absolutelimits['max_recordset_name_length'])
self.assertEqual(cfg.CONF['service:api'].max_limit_v2,
absolutelimits['max_page_limit'])
| StarcoderdataPython |
86010 | <reponame>vdonnefort/lisa
#! /usr/bin/env python3
# SPDX-License-Identifier: Apache-2.0
#
# Copyright (C) 2018, ARM Limited and contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import collections
import contextlib
import fnmatch
import functools
import gc
import importlib
import inspect
import io
import itertools
import logging
import pathlib
import pickle
import subprocess
import sys
import tempfile
import traceback
import types
import uuid
import glob
import textwrap
import argparse
DB_FILENAME = 'VALUE_DB.pickle.xz'
class NotSerializableError(Exception):
pass
def get_class_from_name(cls_name, module_map=None):
"""
Get a class object from its full name (including the module name).
"""
# Avoid argument default value that would be huge in Sphinx doc
module_map = module_map if module_map is not None else sys.modules
possible_mod_set = {
mod_name
for mod_name in module_map.keys()
if cls_name.startswith(mod_name)
}
# Longest match in term of number of components
possible_mod_list = sorted(possible_mod_set, key=lambda name: len(name.split('.')))
if possible_mod_list:
mod_name = possible_mod_list[-1]
else:
return None
mod = module_map[mod_name]
cls_name = cls_name[len(mod_name)+1:]
return _get_class_from_name(cls_name, mod)
def _get_class_from_name(cls_name, namespace):
if isinstance(namespace, collections.abc.Mapping):
namespace = types.SimpleNamespace(**namespace)
split = cls_name.split('.', 1)
try:
obj = getattr(namespace, split[0])
except AttributeError as e:
raise ValueError('Object not found') from e
if len(split) > 1:
return _get_class_from_name('.'.join(split[1:]), obj)
else:
return obj
def create_uuid():
"""
Creates a UUID.
"""
return uuid.uuid4().hex
def get_mro(cls):
"""
Wrapper on top of :func:`inspect.getmro` that recognizes ``None`` as a
type (treated like ``type(None)``).
"""
if cls is type(None) or cls is None:
return (type(None), object)
else:
assert isinstance(cls, type)
return inspect.getmro(cls)
def get_method_class(function):
"""
Get the class of a method by analyzing its name.
"""
cls_name = function.__qualname__.rsplit('.', 1)[0]
if '<locals>' in cls_name:
return None
return eval(cls_name, function.__globals__)
def get_name(obj, full_qual=True, qual=True, pretty=False):
"""
Get a name for ``obj`` (function or class) that can be used in a generated
script.
:param full_qual: Full name of the object, including its module name.
:type full_qual: bool
:param qual: Qualified name of the object
:type qual: bool
:param pretty: If ``True``, will show a prettier name for some types,
although it is not guarnateed it will actually be a type name. For example,
``type(None)`` will be shown as ``None`` instead of ``NoneType``.
:type pretty: bool
"""
# full_qual enabled implies qual enabled
_qual = qual or full_qual
# qual disabled implies full_qual disabled
full_qual = full_qual and qual
qual = _qual
if qual:
_get_name = lambda x: x.__qualname__
else:
_get_name = lambda x: x.__name__
if obj is None:
pretty = True
obj = type(obj)
if pretty:
for prettier_obj in {None, NoValue}:
if obj == type(prettier_obj):
# For these types, qual=False or qual=True makes no difference
obj = prettier_obj
_get_name = lambda x: str(x)
break
# Add the module's name in front of the name to get a fully
# qualified name
if full_qual:
try:
module_name = obj.__module__
except AttributeError:
module_name = ''
else:
module_name = (
module_name + '.'
if module_name != '__main__' and module_name != 'builtins'
else ''
)
else:
module_name = ''
# Classmethods appear as bound method of classes. Since each subclass will
# get a different bound method object, we want to reflect that in the
# name we use, instead of always using the same name that the method got
# when it was defined
if inspect.ismethod(obj):
name = _get_name(obj.__self__) + '.' + obj.__name__
else:
name = _get_name(obj)
return module_name + name
def get_toplevel_module(obj):
"""
Return the outermost module object in which ``obj`` is defined as a tuple
of source file path and line number.
This is usually a package.
"""
module = inspect.getmodule(obj)
toplevel_module_name = module.__name__.split('.')[0]
toplevel_module = sys.modules[toplevel_module_name]
return toplevel_module
def get_src_loc(obj, shorten=True):
"""
Get the source code location of ``obj``
:param shorten: Shorten the paths of the source files by only keeping the
part relative to the top-level package.
:type shorten: bool
"""
try:
src_line = inspect.getsourcelines(obj)[1]
src_file = inspect.getsourcefile(obj)
src_file = pathlib.Path(src_file).resolve()
except (OSError, TypeError):
src_line, src_file = None, None
if shorten and src_file:
mod = get_toplevel_module(obj)
try:
paths = mod.__path__
except AttributeError:
paths = [mod.__file__]
paths = [pathlib.Path(p) for p in paths if p]
paths = [
p.parent.resolve()
for p in paths
if p in src_file.parents
]
if paths:
src_file = src_file.relative_to(paths[0])
return (str(src_file), src_line)
def is_serializable(obj, raise_excep=False):
"""
Try to Pickle the object to see if that raises any exception.
"""
stream = io.StringIO()
try:
# This may be slow for big objects but it is the only way to be sure
# it can actually be serialized
pickle.dumps(obj)
except (TypeError, pickle.PickleError, AttributeError) as e:
debug('Cannot serialize instance of {}: {}'.format(
type(obj).__qualname__, str(e)
))
if raise_excep:
raise NotSerializableError(obj) from e
return False
else:
return True
def once(callable_):
"""
Call the given function at most once per set of parameters
"""
return functools.lru_cache(maxsize=None, typed=True)(callable_)
def remove_indices(iterable, ignored_indices):
"""
Filter the given ``iterable`` by removing listed in ``ignored_indices``.
"""
return [v for i, v in enumerate(iterable) if i not in ignored_indices]
def resolve_annotations(annotations, module_vars):
"""
Basic reimplementation of typing.get_type_hints.
Some Python versions do not have a typing module available, and it also
avoids creating ``Optional[]`` when the parameter has a None default value.
"""
return {
# If we get a string, evaluate it in the global namespace of the
# module in which the callable was defined
param: cls if not isinstance(cls, str) else eval(cls, module_vars)
for param, cls in annotations.items()
}
def get_module_basename(path):
"""
Get the module name of the module defined in source ``path``.
"""
path = pathlib.Path(path)
module_name = inspect.getmodulename(str(path))
# This is either garbage or a package
if module_name is None:
module_name = path.name
return module_name
def iterate_cb(iterator, pre_hook=None, post_hook=None):
"""
Iterate over ``iterator``, and call some callbacks.
:param pre_hook: Callback called right before getting a new value.
:type pre_hook: collections.abc.Callable
:param post_hook: Callback called right after getting a new value.
:type post_hook: collections.abc.Callable
"""
with contextlib.suppress(StopIteration):
for i in itertools.count():
# Do not execute pre_hook on the first iteration
if pre_hook and i:
pre_hook()
val = next(iterator)
if post_hook:
post_hook()
yield val
def format_exception(e):
"""
Format the traceback of the exception ``e`` in a string.
"""
elements = traceback.format_exception(type(e), e, e.__traceback__)
return ''.join(elements)
# Logging level above CRITICAL that is always displayed and used for output
LOGGING_OUT_LEVEL = 60
"""
Log level used for the ``OUT`` level.
This allows sending all the output through the logging module instead of using
:func:`print`, so it can easily be recorded to a file
"""
class ExekallFormatter(logging.Formatter):
"""
Custom :class:`logging.Formatter` that takes care of ``OUT`` level.
This ``OUT`` level allows using :mod:`logging` instead of :func:`print` so
it can be redirected to a file easily.
"""
def __init__(self, fmt, *args, **kwargs):
self.default_fmt = logging.Formatter(fmt, *args, **kwargs)
self.out_fmt = logging.Formatter('%(message)s', *args, **kwargs)
def format(self, record):
# level above CRITICAL, so it is always displayed
if record.levelno == LOGGING_OUT_LEVEL:
return self.out_fmt.format(record)
# regular levels are logged with the regular formatter
else:
return self.default_fmt.format(record)
def setup_logging(log_level, debug_log_file=None, info_log_file=None, verbose=0):
"""
Setup the :mod:`logging` module.
:param log_level: Lowest log level name to display.
:type log_level: str
:param debug_log_file: Path to a file where logs are collected at the
``DEBUG`` level.
:type debug_log_file: str
:param info_log_file: Path to a file where logs are collected at the
``INFO`` level.
:type info_log_file: str
:param verbose: Verbosity level. The format string for log entries will
contain more information when the level increases.`
:type verbose: int
"""
logging.addLevelName(LOGGING_OUT_LEVEL, 'OUT')
level=getattr(logging, log_level.upper())
verbose_formatter = ExekallFormatter('[%(name)s/%(filename)s:%(lineno)s][%(asctime)s] %(levelname)s %(message)s')
normal_formatter = ExekallFormatter('[%(name)s][%(asctime)s] %(levelname)s %(message)s')
logger = logging.getLogger()
# We do not filter anything at the logger level, only at the handler level
logger.setLevel(logging.NOTSET)
console_handler = logging.StreamHandler()
console_handler.setLevel(level)
formatter = verbose_formatter if verbose else normal_formatter
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
if debug_log_file:
file_handler = logging.FileHandler(str(debug_log_file), encoding='utf-8')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(verbose_formatter)
logger.addHandler(file_handler)
if info_log_file:
file_handler = logging.FileHandler(str(info_log_file), encoding='utf-8')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(normal_formatter)
logger.addHandler(file_handler)
# Redirect all warnings of the "warnings" module as log entries
logging.captureWarnings(True)
EXEKALL_LOGGER = logging.getLogger('EXEKALL')
def out(msg):
"""
To be used as a replacement of :func:`print`.
This allows easy redirection of the output to a file.
"""
EXEKALL_LOGGER.log(LOGGING_OUT_LEVEL, msg)
def info(msg):
"""Write a log message at the INFO level."""
EXEKALL_LOGGER.info(msg)
def debug(msg):
"""Write a log message at the DEBUG level."""
EXEKALL_LOGGER.debug(msg)
def warn(msg):
"""Write a log message at the WARNING level."""
EXEKALL_LOGGER.warning(msg)
def error(msg):
"""Write a log message at the ERROR level."""
EXEKALL_LOGGER.error(msg)
def infer_mod_name(python_src):
"""
Compute the module name of a Python source file by inferring its top-level
package
"""
python_src = pathlib.Path(python_src)
module_path = None
# First look for the outermost package we find in the parent directories.
# If we were supplied a path, it will not try to go past its highest folder.
for folder in reversed(python_src.parents):
if pathlib.Path(folder, '__init__.py').exists():
package_root_parent = folder.parents[0]
module_path = python_src.relative_to(package_root_parent)
break
# If no package was found, we try to find it through sys.path in case it is
# only using namespace packages
else:
for package_root_parent in sys.path:
try:
module_path = python_src.relative_to(package_root_parent)
break
except ValueError:
continue
# If we found the top-level package
if module_path is not None:
module_parents = list(module_path.parents)
module_basename = get_module_basename(module_path)
# Import all parent package_names before we import the module
for package_name in reversed(module_parents[:-1]):
package_name = import_file(
pathlib.Path(package_root_parent, package_name),
module_name = '.'.join(package_name.parts),
is_package = True,
)
module_dotted_path = list(module_parents[0].parts) + [module_basename]
module_name = '.'.join(module_dotted_path)
else:
module_name = get_module_basename(python_src)
return module_name
def find_customization_module_set(module_set):
"""
Find all customization modules, where subclasses of
:class:`exekall.customization.AdaptorBase` are expected to be found.
It looks for modules named ``exekall_customize`` present in any enclosing
package of modules in ``module_set``.
"""
def build_full_names(l_l):
"""Explode list of lists, and build full package names."""
for l in l_l:
for i, _ in enumerate(l):
i += 1
yield '.'.join(l[:i])
# Exception raised changed in 3.7:
# https://docs.python.org/3/library/importlib.html#importlib.util.find_spec
if sys.version_info >= (3, 7):
import_excep = ModuleNotFoundError
else:
import_excep = AttributeError
package_names_list = [
module.__name__.split('.')
for module in module_set
]
package_name_set = set(build_full_names(package_names_list))
customization_module_set = set()
for name in package_name_set:
customize_name = name + '.exekall_customize'
# Only hide ModuleNotFoundError exceptions when looking up that
# specific module, we don't want to hide issues inside the module
# itself.
module_exists = False
with contextlib.suppress(import_excep):
module_exists = importlib.util.find_spec(customize_name)
if module_exists:
# Importing that module is enough to make the adaptor visible
# to the Adaptor base class
customize_module = importlib.import_module(customize_name)
customization_module_set.add(customize_module)
return customization_module_set
def import_modules(paths_or_names, best_effort=False):
"""
Import the modules in the given list of paths.
If a folder is passed, all Python sources are recursively imported.
"""
def import_it(path_or_name, best_effort):
# Recursively import all modules when passed folders
if path_or_name.is_dir():
yield from import_folder(path_or_name, best_effort=best_effort)
# If passed a file, a symlink or something like that
elif path_or_name.exists():
try:
yield import_file(path_or_name)
except ImportError:
if best_effort:
return
else:
raise
# Otherwise, assume it is just a module name
else:
yield from import_name_recursively(path_or_name, best_effort=best_effort)
return set(itertools.chain.from_iterable(
import_it(pathlib.Path(path), best_effort=best_effort)
for path in paths_or_names
))
def import_name_recursively(name, best_effort=False):
"""
Import a module by its name.
:param name: Full name of the module.
:type name: str
If it's a package, import all submodules recursively.
"""
try:
mod = importlib.import_module(str(name))
except ImportError:
if best_effort:
return
else:
raise
try:
paths = mod.__path__
# This is a plain module
except AttributeError:
yield mod
# This is a package, so we import all the submodules recursively
else:
for path in paths:
yield from import_folder(pathlib.Path(path), best_effort=best_effort)
def import_folder(path, best_effort=False):
"""
Import all modules contained in the given folder, recurisvely.
"""
for python_src in glob.iglob(str(path/'**'/'*.py'), recursive=True):
try:
yield import_file(python_src)
except ImportError:
if best_effort:
continue
else:
raise
def import_file(python_src, module_name=None, is_package=False):
"""
Import a module.
:param python_src: Path to a Python source file.
:type python_src: str or pathlib.Path
:param module_name: Name under which to import the module. If ``None``, the
name is inferred using :func:`infer_mod_name`
:type module_name: str
:param is_package: ``True`` if the module is a package. If a folder or
``__init__.py`` is passed, this is forcefully set to ``True``.
:type is_package: bool
"""
python_src = pathlib.Path(python_src).resolve()
# Directly importing __init__.py does not really make much sense and may
# even break, so just import its package instead.
if python_src.name == '__init__.py':
return import_file(
python_src=python_src.parent,
module_name=module_name,
is_package=True
)
if python_src.is_dir():
is_package = True
if module_name is None:
module_name = infer_mod_name(python_src)
# Check if the module has already been imported
if module_name in sys.modules:
return sys.modules[module_name]
is_namespace_package = False
if is_package:
# Signify that it is a package to
# importlib.util.spec_from_file_location
submodule_search_locations = [str(python_src)]
init_py = pathlib.Path(python_src, '__init__.py')
# __init__.py does not exists for namespace packages
if init_py.exists():
python_src = init_py
else:
is_namespace_package = True
else:
submodule_search_locations = None
# Python >= 3.5 style
if hasattr(importlib.util, 'module_from_spec'):
# We manually build a ModuleSpec for namespace packages, since
# spec_from_file_location apparently does not handle them
if is_namespace_package:
spec = importlib.machinery.ModuleSpec(
name=module_name,
# loader is None for namespace packages
loader=None,
is_package=True,
)
# Set __path__ for namespace packages
spec.submodule_search_locations = submodule_search_locations
else:
spec = importlib.util.spec_from_file_location(module_name, str(python_src),
submodule_search_locations=submodule_search_locations)
if spec is None:
raise ValueError('Could not find module "{module}" at {path}'.format(
module=module_name,
path=python_src
))
module = importlib.util.module_from_spec(spec)
if not is_namespace_package:
try:
# Register module before executing it so relative imports will
# work
sys.modules[module_name] = module
# Nothing to execute in a namespace package
spec.loader.exec_module(module)
# If the module cannot be imported cleanly regardless of the reason,
# make sure we remove it from sys.modules since it's broken. Future
# attempt to import it should raise again, rather than returning the
# broken module
except BaseException:
with contextlib.suppress(KeyError):
del sys.modules[module_name]
raise
# Python <= v3.4 style
else:
module = importlib.machinery.SourceFileLoader(
module_name, str(python_src)).load_module()
sys.modules[module_name] = module
importlib.invalidate_caches()
return module
def flatten_seq(seq, levels=1):
"""
Flatten a nested sequence, up to ``levels`` levels.
"""
if levels == 0:
return seq
else:
seq = list(itertools.chain.from_iterable(seq))
return flatten_seq(seq, levels=levels - 1)
def take_first(iterable):
"""
Pick the first item of ``iterable``.
"""
for i in iterable:
return i
return NoValue
class _NoValueType:
"""
Type of the :attr:`NoValue` singleton.
This is mostly used like ``None``, in places where ``None`` may be an
acceptable value.
"""
# Use a singleton pattern to make sure that even deserialized instances
# will be the same object
def __new__(cls):
try:
return cls._instance
except AttributeError:
obj = super().__new__(cls)
cls._instance = obj
return obj
def __eq__(self, other):
return isinstance(other, _NoValueType)
def __hash__(self):
return 0
def __bool__(self):
return False
def __repr__(self):
return 'NoValue'
def __eq__(self, other):
return type(self) is type(other)
NoValue = _NoValueType()
"""
Singleton with similar purposes as ``None``.
"""
class RestartableIter:
"""
Wrap an iterator to give a new iterator that is restartable.
"""
def __init__(self, it):
self.values = []
# Wrap the iterator to update the memoized values
def wrapped(it):
for x in it:
self.values.append(x)
yield x
self.it = wrapped(it)
def __iter__(self):
return self
def __next__(self):
try:
return next(self.it)
except StopIteration:
# Use the stored values the next time we try to get an
# itertor again
self.it = iter(self.values)
raise
def get_froz_val_set_set(db, uuid_seq=None, type_pattern_seq=None):
"""
Get a set of sets of :class:`exekall.engine.FrozenExprVal`.
:param db: :class:`exekall.engine.ValueDB` to look into
:type db: exekall.engine.ValueDB
:param uuid_seq: Sequence of UUIDs to select.
:type uuid_seq: list(str)
:param type_pattern_seq: Sequence of :func:`fnmatch.fnmatch` patterns
matching type names (including module name).
:type type_pattern_seq: list(str)
"""
def uuid_predicate(froz_val):
return froz_val.uuid in uuid_seq
def type_pattern_predicate(froz_val):
return match_base_cls(froz_val.type_, type_pattern_seq)
if type_pattern_seq and not uuid_seq:
predicate = type_pattern_predicate
elif uuid_seq and not type_pattern_seq:
predicate = uuid_predicate
elif not uuid_seq and not type_pattern_seq:
predicate = lambda froz_val: True
else:
def predicate(froz_val):
return uuid_predicate(froz_val) and type_pattern_predicate(froz_val)
return db.get_by_predicate(predicate, flatten=False, deduplicate=True)
def match_base_cls(cls, pattern_list):
"""
Match the name of the class of the object and all its base classes.
"""
for base_cls in get_mro(cls):
base_cls_name = get_name(base_cls, full_qual=True)
if not base_cls_name:
continue
if match_name(base_cls_name, pattern_list):
return True
return False
def match_name(name, pattern_list):
"""
Return ``True`` if ``name`` is matched by any pattern in ``pattern_list``.
If a pattern starts with ``!``, it is taken as a negative pattern.
"""
if name is None:
return False
if not pattern_list:
return False
neg_patterns = {
pattern[1:]
for pattern in pattern_list
if pattern.startswith('!')
}
pos_patterns = {
pattern
for pattern in pattern_list
if not pattern.startswith('!')
}
invert = lambda x: not x
identity = lambda x: x
def check(pattern_set, f):
if pattern_set:
ok = any(
fnmatch.fnmatch(name, pattern)
for pattern in pattern_set
)
return f(ok)
else:
return True
return (check(pos_patterns, identity) and check(neg_patterns, invert))
def get_common_base(cls_list):
"""
Get the most derived common base class of classes in ``cls_list``.
"""
# MRO in which "object" will appear first
def rev_mro(cls):
return reversed(inspect.getmro(cls))
def common(cls1, cls2):
# Get the most derived class that is in common in the MRO of cls1 and
# cls2
for b1, b2 in itertools.takewhile(
lambda b1_b2: b1_b2[0] is b1_b2[1],
zip(rev_mro(cls1), rev_mro(cls2))
):
pass
return b1
return functools.reduce(common, cls_list)
def get_subclasses(cls):
"""
Get all the (direct and indirect) subclasses of ``cls``.
"""
subcls_set = {cls}
for subcls in cls.__subclasses__():
subcls_set.update(get_subclasses(subcls))
return subcls_set
def get_recursive_module_set(module_set, package_set):
"""
Retrieve the set of all modules recursively imported from the modules in
``module_set`` if they are (indirectly) part of one of the packages named
in ``package_set``.
"""
recursive_module_set = set()
for module in module_set:
_get_recursive_module_set(module, recursive_module_set, package_set)
return recursive_module_set
def _get_recursive_module_set(module, module_set, package_set):
if module in module_set:
return
module_set.add(module)
for imported_module in vars(module).values():
if (
isinstance(imported_module, types.ModuleType)
# We only recurse into modules that are part of the given set
# of packages
and any(
# Either a submodule of one of the packages or one of the
# packages themselves
imported_module.__name__.split('.', 1)[0] == package
for package in package_set
)
):
_get_recursive_module_set(imported_module, module_set, package_set)
@contextlib.contextmanager
def disable_gc():
"""
Context manager to disable garbage collection.
This can result in significant speed-up in code creating a lot of objects,
like :func:`pickle.load`.
"""
if not gc.isenabled():
yield
return
gc.disable()
try:
yield
finally:
gc.enable()
def render_graphviz(expr):
"""
Render the structure of an expression as a graphviz description or SVG.
:returns: A tuple(bool, content) where the boolean is ``True`` if SVG could
be rendered or ``False`` if it still a graphviz description.
:param expr: Expression to render
:type expr: exekall.engine.ExpressionBase
"""
graphviz = expr.format_structure(graphviz=True)
with tempfile.NamedTemporaryFile('wt') as f:
f.write(graphviz)
f.flush()
try:
svg = subprocess.check_output(
['dot', f.name, '-Tsvg'],
stderr=subprocess.DEVNULL,
).decode('utf-8')
# If "dot" is not installed
except FileNotFoundError:
pass
except subprocess.CalledProcessError as e:
debug('dot failed to execute: {}'.format(e))
else:
return (True, svg)
return (False, graphviz)
def add_argument(parser, *args, help, **kwargs):
"""
Equivalent to :meth:`argparse.ArgumentParser.add_argument`, with ``help``
formatting.
This allows using parsers setup using raw formatters.
"""
if help is not argparse.SUPPRESS:
help=textwrap.dedent(help)
# Preserve all new lines where there are, and only wrap the other lines.
help='\n'.join(textwrap.fill(line) for line in help.splitlines())
return parser.add_argument(*args, **kwargs, help=help)
def create_adaptor_parser_group(parser, adaptor_cls):
description = '{} custom options.\nCan only be specified *after* positional parameters.'.format(adaptor_cls.name)
return parser.add_argument_group(adaptor_cls.name, description)
def powerset(iterable):
"""
Powerset of the given iterable ::
powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)
"""
s = list(iterable)
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s)+1))
| StarcoderdataPython |
3287225 | import os
import shutil
path = '/Users/zac/Downloads/zip/'
def scan_file():
files = os.listdir(path)
for f in files:
if f.endswith('.zip'):
print("ZacLog: Found zip file!")
return f
def unzip_file(f):
folder_name = f.split('.')[0]
target_path = path + folder_name
if not os.path.exists(target_path):
os.makedirs(target_path)
shutil.unpack_archive(path + f, target_path)
def delete_file(f):
os.remove(f)
zip_file = scan_file()
if zip_file:
unzip_file(zip_file)
delete_file(path + zip_file)
else:
print("ZacLog: No zip file found!")
| StarcoderdataPython |
48927 | # -*- coding: utf-8 -*-
# Program Name: coin_conversion.py
# <NAME>
# 06/15/16
# Python Version 3.4
# Description: Convert amount into coins
# Optional import for versions of python <= 2
from __future__ import print_function
# Do this until valid input is given
while True:
try:
# This takes in a integer
coins = int(input("Please enter the total amount of coins: "))
except ValueError:
# If there is invalid input (casting exception)
# Show the user this text in console, then continue the loop
print("Sorry, I didn't understand that, try again, must be a whole number!")
continue
else:
# The value was successfully parsed
# So break out of the loop
break
# Quarters conv
quarters = int(coins / 25)
coins = (coins - (quarters * 25))
print ("# of Quarters: %i x 25c = %i cents total" %(quarters,quarters*25))
# Dimes conv
dimes = int(coins / 10)
coins = (coins - (dimes * 10))
print ("# of Dimes: %i x 10c = %i cents total" %(dimes,dimes*10))
# Nickles conv
nickles = int(coins / 5)
coins = (coins - (nickles * 5))
print ("# of Nickles: %i x 5c = %i cents total" %(nickles,nickles*5))
# Pennies conv
pennies = int(coins)
print ("# of Pennies: %i x 1c = %i cents total" %(pennies,pennies*1))
"""
Please enter the total amount of coins: 166
# of Quarters: 6 x 25c = 150 cents total
# of Dimes: 1 x 10c = 10 cents total
# of Nickles: 1 x 5c = 5 cents total
# of Pennies: 1 x 1c = 1 cents total
"""
| StarcoderdataPython |
1785578 | #!/usr/bin/python
DOCUMENTATION = '''
---
module: ec2_asg_target_groups
short_description: Configure target groups on an existing auto scaling group
description:
- Configure the specified target groups to be attached to an auto scaling group
- The auto scaling group must already exist (use the ec2_asg module)
version_added: "2.4"
author: "<NAME> (@manicminer)"
options:
name:
description:
- The name of the auto scaling group
required: true
target_groups:
description:
- A list of target group names that you wish to attach to the auto scaling group.
- Any existing attached target groups will be detached.
wait_timeout:
description:
- Number of seconds to wait for the instances to pass their ELB health checks, after switching its target groups.
required: false
default: 300
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
---
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Set load balancers for an auto scaling group
- ec2_asg_target_groups:
name: webapp-production
target_groups:
- webapp-prod-blue
'''
RETURN = '''
---
new_group:
description: Details about the new now-in-service group
returned: success
type: dict
sample:
name: 'webapp-18'
target_group_arns: ['webapp-production']
instance_ids: ['i-aaccee01', 'i-aaccee02']
instance_status: {'i-aaccee01': ['InService', 'Healthy'], 'i-aaccee02': ['InService', 'Healthy']}
old_group:
description: Details about the now-previous group
returned: success
type: dict
sample:
name: 'webapp-17'
target_group_arns: ['webapp-post-production']
instance_ids: ['i-bbddff01', 'i-bbddff02']
instance_status: {'i-bbddff01': ['InService', 'Healthy'], 'i-bbddff02': ['InService', 'Healthy']}
'''
try:
import boto3
from botocore import exceptions
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
import time
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
target_groups=dict(type='list', required=True),
wait_timeout=dict(type='int', default=300),
),
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
autoscaling = boto3_conn(module, conn_type='client', resource='autoscaling', region=region, endpoint=ec2_url, **aws_connect_kwargs)
elb = boto3_conn(module, conn_type='client', resource='elbv2', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except botocore.exceptions.ClientError, e:
module.fail_json(msg="Boto3 Client Error - " + str(e))
group_name = module.params.get('name')
groups = autoscaling.describe_auto_scaling_groups(AutoScalingGroupNames=[group_name])['AutoScalingGroups']
if len(groups) > 1:
module.fail_json(msg="More than one auto scaling group was found that matches the supplied group_name '%s'." % group_name)
elif len(groups) < 1:
module.fail_json(msg="The auto scaling group '%s' was not found" % group_name)
group = groups[0]
new_target_group_names = module.params.get('target_groups')
target_groups = elb.describe_target_groups(Names=new_target_group_names)['TargetGroups']
if len(target_groups) < 1:
module.fail_json(msg="No target groups found")
new_target_groups = [t['TargetGroupArn'] for t in target_groups]
old_target_groups = group['TargetGroupARNs']
unique_new_target_groups = [t for t in new_target_groups if t not in old_target_groups]
unique_old_target_groups = [t for t in old_target_groups if t not in new_target_groups]
instances = [i['InstanceId'] for i in group['Instances']]
instance_status = dict((i['InstanceId'], (i['LifecycleState'], i['HealthStatus'])) for i in group['Instances'])
# Before starting, ensure instances in group are healthy and in service
for instance_id, status in instance_status.iteritems():
if status[0] != 'InService' or status[1] != 'Healthy':
module.fail_json(msg='Instances in group must be healthy and in service')
# Attach dest target group(s) to auto scaling group
autoscaling.attach_load_balancer_target_groups(AutoScalingGroupName=group['AutoScalingGroupName'], TargetGroupARNs=new_target_groups)
# Ensure instances in service with new target group(s)
healthy = False
wait_timeout = time.time() + module.params.get('wait_timeout')
while not healthy and wait_timeout > time.time():
healthy = True
# Iterate new instances and ensure they are registered/healthy
for instance_id, status in instance_status.iteritems():
# We are only concerned with new instances that were InService prior to switching the ELBs,
# and where the auto scaling group uses ELB health checks, that their health check passed
if status[0] == 'InService' and (status[1] == 'Healthy' or new_group['HealthCheckType'] != 'ELB'):
# Iterate new target_groups and retrieve instance health
for target_group in new_target_groups:
instance_health = elb.describe_target_health(TargetGroupArn=target_group, Targets=[{'Id': instance_id}])['TargetHealthDescriptions']
# Ensure the instance is registered and InService according to the dest target group
if len(instance_health) == 0 or instance_health[0]['TargetHealth']['State'] != 'healthy':
healthy = False
if not healthy:
time.sleep(5)
# The new target group failed to report the new instances as healthy.
if wait_timeout <= time.time():
if module.params.get('rollback_on_failure'):
# Detach unique new ELB(s) to roll back to previous state (avoid detaching any load balancers that were already attached at start)
autoscaling.detach_load_balancers(AutoScalingGroupName=group['AutoScalingGroupName'], LoadBalancerNames=unique_new_load_balancers)
module.fail_json(msg='Waited too long for target ELB to report instances as healthy. Configuration has been rolled back.')
else:
module.fail_json(msg='Waited too long for target ELB to report instances as healthy. No rollback action was taken.')
# Detach old target group(s) from auto scaling group (unique only, we don't want to mistakenly detach any new target groups that were specified)
autoscaling.detach_load_balancer_target_groups(AutoScalingGroupName=group['AutoScalingGroupName'], TargetGroupARNs=unique_old_target_groups)
result = dict(
name=group['AutoScalingGroupName'],
target_group_arns=new_target_groups,
instance_ids=instances,
instance_status=instance_status,
)
module.exit_json(changed=True, result=result)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| StarcoderdataPython |
3224957 | from abc import ABC
from src import OrderDetails
class DatabaseInterface(ABC):
async def add_order(self, size: OrderDetails.PizzaSizes, payment: OrderDetails.PaymentTypes):
pass
class MessageSenderInterface(ABC):
async def send_message(self, msg: str, buttons=None):
pass
def end_dialog(self):
pass
class MessengerInterface(ABC):
def start_listening(self):
pass
def start_webhook(self, webhook_path: str, webapp_host: str, webapp_port: int, webhook_url: str):
pass
def webhook_enabled(self) -> bool:
pass
| StarcoderdataPython |
3385176 | import unittest
from random import randint
from pytime.clock_in_mirror import what_is_the_time
class ClockTestCases(unittest.TestCase):
def test_2(self):
self.assertEqual(what_is_the_time("06:35"), "05:25", "didn't work for '06:35'")
def test_3(self):
self.assertEqual(what_is_the_time("11:59"), "12:01", "didn't work for '11:59'")
def test_4(self):
self.assertEqual(what_is_the_time("12:02"), "11:58", "didn't work for '12:02'")
def test_5(self):
self.assertEqual(what_is_the_time("04:00"), "08:00", "didn't work for '04:00'")
def test_6(self):
self.assertEqual(what_is_the_time("06:00"), "06:00", "didn't work for '06:00'")
def test_7(self):
self.assertEqual(what_is_the_time("12:00"), "12:00", "didn't work for '12:00'")
@staticmethod
def rand_time():
return "{:02}:{:02}".format(randint(1, 12), randint(0, 59))
def test_random(self):
for i in range(10):
time = ClockTestCases.rand_time()
self.assertEqual(what_is_the_time(what_is_the_time(time)), time, "didn't work for " + time)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3329563 | <reponame>jsalbert/biotorch
import pytest
@pytest.fixture(scope='session')
def model_architectures():
# Tuple containing (architecture, input size)
return [
('le_net_mnist', (1, 1, 32, 32)),
('le_net_cifar', (1, 3, 32, 32)),
('resnet18', (1, 3, 128, 128)),
('resnet20', (1, 3, 128, 128)),
('resnet56', (1, 3, 128, 128))
]
| StarcoderdataPython |
1626535 | """Refresh a duplicate volume with a snapshot from its parent."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
@click.command()
@click.argument('volume_id')
@click.argument('snapshot_id')
@environment.pass_env
def cli(env, volume_id, snapshot_id):
"""Refresh a duplicate volume with a snapshot from its parent."""
block_manager = SoftLayer.BlockStorageManager(env.client)
resp = block_manager.refresh_dupe(volume_id, snapshot_id)
click.echo(resp)
| StarcoderdataPython |
128290 | <gh_stars>100-1000
p = 196732205348849427366498732223276547339
secret = REDACTED
def calc_root(num, mod, n):
f = GF(mod)
temp = f(num)
return temp.nth_root(n)
def gen_v_list(primelist, p, secret):
a = []
for prime in primelist:
a.append(calc_root(prime, p, secret))
return a
def decodeInt(i, primelist):
pl = sorted(primelist)[::-1]
out = ''
for j in pl:
if i%j == 0:
out += '1'
else:
out += '0'
return out
def bin2asc(b):
return hex(int(b,2)).replace('0x','').decode('hex')
primelist = [2,3,5,7,11,13,17,19,23,29,31,37,43,47,53,59]
message = REDACTED
chunks = []
for i in range(0,len(message),2):
chunks += [message[i:i+2]]
vlist = gen_v_list(primelist,p,secret)
print(vlist)
for chunk in chunks:
binarized = bin(int(chunk.encode('hex'),16)).replace('0b','').zfill(16)[::-1] #lsb first
enc = 1
for bit in range(len(binarized)):
enc *= vlist[bit]**int(binarized[bit])
enc = enc%p
print(enc)
| StarcoderdataPython |
1652698 | <reponame>liquidity-network/nocust-hub
from .subscribe import (
subscribe,
unsubscribe,
)
from .ping import (
ping,
)
from .get import (
get,
)
OPERATIONS = {
'subscribe': subscribe,
'unsubscribe': unsubscribe,
'ping': ping,
'get': get,
}
| StarcoderdataPython |
3210349 | <reponame>shreshthtuli/Go-Back-N
"""
Mininet Topologies with 2 nodes
Author : <NAME>
Usage:
- sudo mn --custom topo.py --topo linear --controller=remote,ip=127.0.0.1
To specify parameters use: --link tc,bw=10,delay=3,loss=2,max_queue_size=3
Example : for ring topology with bandwidth limited to 2:
- sudo mn --custom topo.py --topo linear --controller=remote,ip=127.0.0.1 --link tc,bw=1,delay=3,loss=1
"""
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.node import CPULimitedHost
from mininet.link import TCLink
from mininet.util import dumpNodeConnections
from mininet.log import setLogLevel
class LinearTopo( Topo ):
"Linear Topology Mininet"
def __init__( self ):
# Initialize topology
Topo.__init__( self )
# Add hosts and switches
leftHost = self.addHost( 'h1' )
rightHost = self.addHost( 'h2' )
leftSwitch = self.addSwitch( 's3' )
rightSwitch = self.addSwitch( 's4' )
# Add links
self.addLink( leftHost, leftSwitch )
self.addLink( leftSwitch, rightSwitch )
self.addLink( rightSwitch, rightHost )
topos = {
'linear': ( lambda: LinearTopo() ),
'ring': ( lambda: RingTopo() ) ,
'mesh': ( lambda: MeshTopo() ) ,
'star': ( lambda: StarTopo())
} | StarcoderdataPython |
14907 | <gh_stars>1-10
from fumblr.keys import IMGUR_SECRET, IMGUR_ID
from imgurpython import ImgurClient, helpers
import os
import base64
API_URL = 'https://api.imgur.com/3/'
def get_client():
"""
Get an API client for Imgur
Returns:
Imgur client if it is available
"""
try:
return ImgurClient(IMGUR_ID, IMGUR_SECRET)
except helpers.error.ImgurClientError:
print(f'Error: imgur client error - id: {IMGUR_ID} secret: {IMGUR_SECRET}')
def delete_image(deletehash):
"""
Delete image from Imgur with given deletehash
Args:
deletehash: Hash id of image to delete
Returns:
Response from Imgur of image deletion if successful, otherwise False
"""
client = get_client()
if client:
try:
return client.delete_image(deletehash)
except:
return False
def upload_image(path):
"""
Upload image at system path to Imgur
Example of response data from Imgur upload:
{'size': 3527,
'title': None,
'animated': False,
'deletehash': 'YkK79ucEtDDn1b9',
'views': 0,
'width': 187,
'account_url': None,
'in_gallery': False,
'name': '',
'section': None,
'account_id': 0,
'type': 'image/png',
'datetime': 1473926225,
'description': None,
'height': 242,
'bandwidth': 0,
'id': 'AEvnA7h',
'favorite': False,
'nsfw': None,
'link': 'http://i.imgur.com/AEvnA7h.png',
'is_ad': False,
'vote': None}
Args:
path: System path of image
Returns:
Response from Imgur
"""
client = get_client()
if client:
image_path = os.path.abspath(path)
upload = client.upload_from_path(image_path)
return upload
def upload(image):
"""
Upload image to Imgur from file
Args:
image: File object
Returns:
Imgur response object
"""
client = get_client()
if client:
contents = image.read()
b64 = base64.b64encode(contents)
data = {
'image': b64,
'type': 'base64'
}
return client.make_request('POST', 'upload', data, True)
def upload_from_url(url):
"""
Upload image to Imgur from url
Args:
url: URL of image
Returns:
Imgur Response object if successful, otherwise False
"""
client = get_client()
if client:
try:
return client.upload_from_url(url)
except helpers.error.ImgurClientError:
print('Error: imgur client error')
return False
def get_image(id):
"""
Return image data for image with given id
Args:
id: Imgur image id
Returns:
Response from Imgur
"""
client = get_client()
if client:
image_data = client.get_image(id)
return image_data
| StarcoderdataPython |
40127 | # -*- coding: utf-8 -*-
from .gmail import GmailPlugin
| StarcoderdataPython |
148249 | <filename>apps/groups/views.py<gh_stars>0
from rest_framework import viewsets
from django.contrib.auth.models import Group
from rest_framework.authentication import SessionAuthentication
from rest_framework_simplejwt.authentication import JWTAuthentication
from rest_framework.permissions import IsAuthenticatedOrReadOnly, IsAdminUser
from .serializer import GroupSerializer
class GroupViewSet(viewsets.ModelViewSet):
queryset = Group.objects.all()
serializer_class = GroupSerializer
authentication_classes = [SessionAuthentication, JWTAuthentication]
permission_classes = [IsAuthenticatedOrReadOnly, IsAdminUser]
| StarcoderdataPython |
3326393 | # _ __ ____ _ _
# | |/ / | _ \ | \ | |
# | ' / | |_) | | \| |
# | . \ | __/ | |\ |
# |_|\_\ |_| |_| \_|
#
# (c) 2018 KPN
# License: MIT license.
# Author: <NAME>
#
# include all
from .senml_base import SenmlBase
from .senml_pack import SenmlPack
from .senml_record import SenmlRecord
from .senml_unit import SenmlUnits
from .senml_kpn_names import SenmlNames | StarcoderdataPython |
1607827 | <filename>tests/native/test_armv7_bitwise.py
import unittest
from manticore.native.cpu import bitwise
class BitwiseTest(unittest.TestCase):
_multiprocess_can_split_ = True
def test_mask(self):
masked = bitwise.Mask(8)
self.assertEqual(masked, 0xFF)
def test_get_bits(self):
val = 0xAABBCCDD
result = bitwise.GetNBits(val, 8)
self.assertEqual(result, 0xDD)
def test_lsl_nocarry(self):
val = 0xAA00
result, carry = bitwise.LSL_C(val, 4, 32)
self.assertEqual(result, 0x0AA000)
self.assertEqual(carry, 0)
def test_lsl_carry(self):
val = 0x80000000
result, carry = bitwise.LSL_C(val, 1, 32)
print(hex(result), "", hex(carry))
self.assertEqual(result, 0)
self.assertEqual(carry, 1)
def test_lsr_nocarry(self):
val = 0xFFF7
result, carry = bitwise.LSR_C(val, 4, 32)
self.assertEqual(result, 0x0FFF)
self.assertEqual(carry, 0)
def test_lsr_carry(self):
val = 0xFFF8
result, carry = bitwise.LSR_C(val, 4, 32)
self.assertEqual(result, 0x0FFF)
self.assertEqual(carry, 1)
def test_asr_nocarry(self):
val = 0x00F0
result, carry = bitwise.ASR_C(val, 4, 32)
self.assertEqual(result, 0xF)
self.assertEqual(carry, 0)
def test_asr_carry(self):
val = 0x0003
result, carry = bitwise.ASR_C(val, 1, 32)
self.assertEqual(result, 1)
self.assertEqual(carry, 1)
def test_ror_nocarry(self):
val = 0x00F0
result, carry = bitwise.ROR_C(val, 4, 32)
print(hex(result))
self.assertEqual(result, 0xF)
self.assertEqual(carry, 0)
def test_ror_carry(self):
val = 0x0003
result, carry = bitwise.ROR_C(val, 1, 32)
print(hex(result))
self.assertEqual(result, 0x80000001)
self.assertEqual(carry, 1)
def test_rrx_nocarry(self):
val = 0x000F
result, carry = bitwise.RRX_C(val, 0, 32)
self.assertEqual(result, 0x7)
self.assertEqual(carry, 1)
def test_rrx_carry(self):
val = 0x0001
result, carry = bitwise.RRX_C(val, 1, 32)
print(hex(result))
self.assertEqual(result, 0x80000000)
self.assertEqual(carry, 1)
def test_sint(self):
val = 0xFFFFFFFF
result = bitwise.SInt(val, 32)
self.assertEqual(result, -1)
def test_sint_2(self):
val = 0xFFFFFFFE
result = bitwise.SInt(val, 32)
self.assertEqual(result, -2)
| StarcoderdataPython |
1794719 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from gdcmdtools.perm import GDPerm
from gdcmdtools.perm import help_permission_text
import argparse
from argparse import RawTextHelpFormatter
from gdcmdtools.base import BASE_INFO
from gdcmdtools.base import DEBUG_LEVEL
from pprint import pprint
import sys
import logging
logger = logging.getLogger()
__THIS_APP = 'gdperm'
__THIS_DESCRIPTION = 'Tool to change file\'s permission on Google Drive'
__THIS_VERSION = BASE_INFO["version"]
def test():
file_id = "https://drive.google.com/open?id=0B60IjoJ-xHK6YU1wZ2hsQVQ0SzA"
permission_id = "02914492818163807046i"
action1 = {
'name': 'update',
'param': [
permission_id,
'user',
'writer',
'<EMAIL>']}
action2 = {
'name': 'update',
'param': [
permission_id,
'user',
'reader',
'<EMAIL>']}
for action in [action1, action2]:
perm = GDPerm(file_id, action)
result = perm.run()
pprint(result)
assert result[u"role"] == action["param"][2]
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='%s v%s - %s - %s (%s)' %
(__THIS_APP,
__THIS_VERSION,
__THIS_DESCRIPTION,
BASE_INFO["app"],
BASE_INFO["description"]),
formatter_class=RawTextHelpFormatter)
arg_parser.add_argument(
'file_id',
help='The id for the file you\'re going to change permission')
mutex_group = arg_parser.add_mutually_exclusive_group(required=False)
mutex_group.add_argument(
'--list',
action='store_true',
help='list the permission resource of the file')
mutex_group.add_argument(
'--get',
metavar='PERMISSION_ID',
help='get the permission resource by id')
PERMISSION_METAVAR = ('TYPE', 'ROLE', 'VALUE')
mutex_group.add_argument(
'--insert',
metavar=PERMISSION_METAVAR,
nargs=len(PERMISSION_METAVAR),
help="set the permission of the created folder, can be:\n" +
'\n'.join(help_permission_text) +
'\nvalue: user or group e-mail address,\nor \'me\' to refer to the current authorized user\n' +
'ex: -p anyone reader me # set the uploaded file public-read')
UPDATE_PERMISSION_METAVAR = ("PERMISSION_ID",) + PERMISSION_METAVAR
mutex_group.add_argument(
'--update',
metavar=UPDATE_PERMISSION_METAVAR,
nargs=len(UPDATE_PERMISSION_METAVAR),
help="update the permission, refer to the help of --insert")
mutex_group.add_argument(
'--delete',
metavar='PERMISSION_ID',
help='delete the permission of the file by id')
mutex_group.add_argument(
'--get_by_user',
metavar='USER_EMAIL',
help='get the permission associated with user')
arg_parser.add_argument('--debug',
choices=DEBUG_LEVEL,
default=DEBUG_LEVEL[-1],
help='define the debug level')
args = arg_parser.parse_args()
# set debug devel
logger.setLevel(getattr(logging, args.debug.upper()))
action = {}
valid_actions = [
"list",
"get",
"insert",
"update",
"delete",
"get_by_user"]
for a in valid_actions:
action[a] = args.__dict__[a]
# check which action is given by argument
for act in action:
if action[act] != mutex_group.get_default(act):
pass_action = {"name": act, "param": action[act]}
logger.debug("pass_action=%s" % pass_action)
perm = GDPerm(args.file_id, pass_action)
result = perm.run()
pprint(result)
if result is None:
sys.exit(1)
else:
sys.exit(0)
logger.error('unexpected error')
sys.exit(1)
| StarcoderdataPython |
3331797 | <reponame>dexbiobot/SML-Cogs
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2017 SML
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import json
import os
import re
from collections import defaultdict
import aiohttp
import discord
import yaml
from __main__ import send_cmd_help
from box import Box
from cogs.utils import checks
from cogs.utils.dataIO import dataIO
from discord.ext import commands
PATH = os.path.join("data", "bands")
JSON = os.path.join(PATH, "settings.json")
CONFIG_YAML = os.path.join(PATH, "config.yml")
def nested_dict():
"""Recursively nested defaultdict."""
return defaultdict(nested_dict)
class Client():
"""BrawlStats async client."""
def __init__(self, auth=None):
"""Init."""
self.headers = {
'Authorization': auth
}
def api_url(self, kind, tag):
"""Return clan api url"""
if kind == "bands":
return "https://api.brawlstats.io/v1/bands/" + tag
async def get_band(self, tag):
"""Get a clan."""
data = None
try:
async with aiohttp.ClientSession(headers=self.headers) as session:
async with session.get(self.api_url("bands", tag)) as resp:
if resp.status == 200:
data = await resp.json()
except json.JSONDecodeError:
return None
return Box(data)
async def get_bands(self, tags):
data = []
for tag in tags:
d = await self.get_band(tag)
if d is not None:
data.append(d)
return data
class Bands:
"""Auto parse band info and display requirements"""
def __init__(self, bot):
"""Init."""
self.bot = bot
self.settings = nested_dict()
self.settings.update(dataIO.load_json(JSON))
self._client = None
@property
def client(self):
if self._client is None:
if self.bands_config is not None:
self._client = Client(auth=self.bands_config.authorization)
return self._client
@checks.mod_or_permissions()
@commands.group(pass_context=True)
async def bandsset(self, ctx):
"""Settings"""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@checks.mod_or_permissions()
@bandsset.command(name="config", pass_context=True, no_pm=True)
async def bandsset_config(self, ctx):
"""Upload config yaml file. See config.example.yml for how to format it."""
TIMEOUT = 60.0
await self.bot.say(
"Please upload family config yaml file. "
"[Timeout: {} seconds]".format(TIMEOUT))
attach_msg = await self.bot.wait_for_message(
timeout=TIMEOUT,
author=ctx.message.author)
if attach_msg is None:
await self.bot.say("Operation time out.")
return
if not len(attach_msg.attachments):
await self.bot.say("Cannot find attachments.")
return
attach = attach_msg.attachments[0]
url = attach["url"]
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
with open(CONFIG_YAML, "wb") as f:
f.write(await resp.read())
await self.bot.say(
"Attachment received and saved as {}".format(CONFIG_YAML))
self.settings['config'] = CONFIG_YAML
dataIO.save_json(JSON, self.settings)
@property
def bands_config(self):
if os.path.exists(CONFIG_YAML):
with open(CONFIG_YAML) as f:
config = Box(yaml.load(f))
return config
return None
def get_band_config(self, tag):
for band in self.bands_config.bands:
if band.tag == tag:
return Box(band, default_box=True)
return None
@commands.command(pass_context=True, no_pm=True)
async def bands(self, ctx, *args):
"""Display band info.
[p]bands -m Disable member count
[p]bands -t Disable band tag
"""
await self.bot.type()
config = self.bands_config
band_tags = [band.tag for band in config.bands]
bands = await self.client.get_bands(band_tags)
color = getattr(discord.Color, config.color)()
em = discord.Embed(
title=config.name,
description="Minimum trophies to join our Brawl Stars bands. Current trophies required.",
color=color
)
show_member_count = "-m" not in args
show_band_tag = "-t" not in args
for band in bands:
match = re.search('[\d,O]{3,}', band.description)
pb_match = re.search('PB', band.description)
name = band.name
band_config = self.get_band_config(band.tag)
trophies = 'N/A'
if band_config.req:
trophies = band_config.req
elif match is not None:
trophies = match.group(0)
trophies = trophies.replace(',', '')
trophies = trophies.replace('O', '0')
trophies = '{:,}'.format(int(trophies))
else:
trophies = band.required_score
pb = ''
if pb_match is not None:
pb = ' PB'
member_count = ''
if show_member_count:
member_count = '{} / 100\n'.format(band.member_count)
band_tag = ''
if show_band_tag:
band_tag = '#{}\n'.format(band.tag)
value = '{band_tag}{member_count}{trophies}{pb}'.format(
band_tag=band_tag,
member_count=member_count,
trophies=trophies,
pb=pb)
em.add_field(name=name, value=value)
em.set_thumbnail(url=config.logo_url)
await self.bot.say(embed=em)
def check_folder():
"""Check folder."""
os.makedirs(PATH, exist_ok=True)
def check_file():
"""Check files."""
if not dataIO.is_valid_json(JSON):
dataIO.save_json(JSON, {})
def setup(bot):
"""Setup."""
check_folder()
check_file()
n = Bands(bot)
bot.add_cog(n)
| StarcoderdataPython |
1707521 | #!/usr/bin/env python3
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
df1 = pd.read_csv("original-benchmark-results.csv")
df2 = pd.read_csv("warped-benchmark-results.csv")
mean1 = df1.groupby("sampler").mean()
mean2 = df2.groupby("sampler").mean()
cached1 = (
df1[(df1["cached"]) & (df1["sampler"] != "resnet18")].groupby("sampler").mean()
)
cached2 = (
df2[(df2["cached"]) & (df2["sampler"] != "resnet18")].groupby("sampler").mean()
)
not_cached1 = (
df1[(~df1["cached"]) & (df1["sampler"] != "resnet18")].groupby("sampler").mean()
)
not_cached2 = (
df2[(~df2["cached"]) & (df2["sampler"] != "resnet18")].groupby("sampler").mean()
)
print("cached, original\n", cached1)
print("cached, warped\n", cached2)
print("not cached, original\n", not_cached1)
print("not cached, warped\n", not_cached2)
cmap = sns.color_palette()
labels = ["GridGeoSampler", "RandomBatchGeoSampler", "RandomGeoSampler"]
fig, ax = plt.subplots()
x = np.arange(3)
width = 0.2
rects1 = ax.bar(
x - width * 3 / 2,
not_cached1["rate"],
width,
label="Raw Data, Not Cached",
color=cmap[0],
)
rects2 = ax.bar(
x - width * 1 / 2,
not_cached2["rate"],
width,
label="Preprocessed, Not Cached",
color=cmap[1],
)
rects2 = ax.bar(
x + width * 1 / 2, cached1["rate"], width, label="Raw Data, Cached", color=cmap[2]
)
rects3 = ax.bar(
x + width * 3 / 2,
cached2["rate"],
width,
label="Preprocessed, Cached",
color=cmap[3],
)
ax.set_ylabel("sampling rate (patches/sec)", fontsize=12)
ax.set_xticks(x)
ax.set_xticklabels(labels, fontsize=12)
ax.tick_params(axis="x", labelrotation=10)
ax.legend(fontsize="large")
plt.gca().spines.right.set_visible(False)
plt.gca().spines.top.set_visible(False)
plt.tight_layout()
plt.show()
| StarcoderdataPython |
1792779 | # Copyright (c) 2012 - 2015 <NAME>, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
import sys
import time
import subprocess
from jenkinsflow.test.cfg import ApiType
from jenkinsflow.test.framework import api_select
from jenkinsflow.test.framework.logger import log, logt
def _abort(log_file, test_file_name, api_type, fixed_prefix, job_name, sleep_time):
log(log_file, '\n')
logt(log_file, "Waiting to abort job:", job_name)
logt(log_file, "args:", test_file_name, fixed_prefix, job_name, sleep_time)
time.sleep(sleep_time)
with api_select.api(test_file_name, api_type, fixed_prefix='jenkinsflow_test__' + fixed_prefix + '__', login=True) as api:
api.job(job_name, max_fails=0, expect_invocations=0, expect_order=None)
api.poll()
api.quick_poll()
abort_me = api.get_job(api.job_name_prefix + job_name)
logt(log_file, "Abort job:", abort_me)
abort_me.stop_all()
logt(log_file, "Aborted")
if __name__ == '__main__':
job_name = sys.argv[4]
with open(job_name, 'a+') as log_file:
_abort(log_file, sys.argv[1], ApiType[sys.argv[2]], sys.argv[3], job_name, int(sys.argv[5]))
def abort(api, job_name, sleep_time):
"""Call this script as a subprocess"""
if api.api_type == ApiType.MOCK:
return
ff = __file__.replace('.pyc', '.py')
args = [sys.executable, ff, api.file_name, api.api_type.name, api.func_name.replace('test_', ''), job_name, str(sleep_time)]
with open(job_name, 'w') as log_file:
logt(log_file, "Invoking abort subprocess.", args)
subprocess.Popen(args)
| StarcoderdataPython |
199469 | <filename>bitcashcn/network/transaction.py
from bitcashcn.utils import is_valid_hex, asm_to_list
class Transaction:
"""Represents a transaction returned from the network."""
__slots__ = ('txid', 'amount_in', 'amount_out', 'fee', 'inputs', 'outputs')
def __init__(self, txid, amount_in, amount_out):
self.txid = txid
if amount_in - amount_out < 0:
raise ArithmeticError("Output is greater than input, leaving no room for a fee.")
self.fee = amount_in - amount_out
self.amount_in = amount_in
self.amount_out = amount_out
self.inputs = []
self.outputs = []
def add_input(self, part):
self.inputs.append(part)
def add_output(self, part):
self.outputs.append(part)
def __repr__(self):
return 'Transaction(txid={}, amount_in={}, amount_out={}, ' \
'fee={}, inputs={}, outputs={})'.format(
repr(self.txid),
str(self.amount_in),
str(self.amount_out),
str(self.fee),
len(self.inputs),
len(self.outputs))
class TxInput:
"""
Representation of a single input
"""
def __init__(self, address, amount):
self.address = address
self.amount = amount
def __repr__(self):
return "Input(address={}, amount={:.0f})".format(self.address, self.amount)
class TxOutput:
"""
Representation of a single output.
"""
def __init__(self, address, amount, asm=None):
self.address = address
self.amount = amount
self.op_return = None
if address is None and asm is not None:
if asm.startswith('OP_RETURN '):
self.data = asm_to_list(asm)
def __repr__(self):
if self.address is None and self.data is not None:
return "Output(OP_RETURN, amount_burned={})".format(self.amount)
else:
return "Output(address={}, amount={:.0f})".format(self.address, self.amount)
class TxPart:
"""
Representation of a single input or output.
"""
def __init__(self, address, amount, asm=None):
self.address = address
self.amount = amount
self.op_return = None
if address is None and asm is not None:
if asm.startswith('OP_RETURN '):
self.op_return = asm[10:]
elif asm.startswith('return ['):
self.op_return = asm[8:-1]
def message(self):
"""Attempt to decode the op_return value (if there is one) as a UTF-8 string."""
if self.op_return is None:
return None
return bytearray.fromhex(self.op_return).decode('utf-8')
def __repr__(self):
if self.address is None and self.op_return is not None:
return "OP_RETURN data with {:.0f} satoshi burned".format(self.amount)
else:
return "{} with {:.0f} satoshi".format(self.address, self.amount) | StarcoderdataPython |
1767013 | import spira.all as spira
class Resistor(spira.PCell):
width = spira.NumberParameter(default=spira.RDD.R1.MIN_WIDTH, doc='Width of the shunt resistance.')
length = spira.NumberParameter(default=spira.RDD.R1.MIN_LENGTH, doc='Length of the shunt resistance.')
def validate_parameters(self):
if self.width > self.length:
raise ValueError('`Width` cannot be larger than `length`.')
return True
def create_elements(self, elems):
elems += spira.Box(width=self.length, height=self.width, center=(0,0), layer=spira.RDD.PLAYER.R1.METAL)
return elems
def create_ports(self, ports):
w, l = self.width, self.length
ports += spira.Port(name='P1_R1', midpoint=(-l/2,0), orientation=180, width=self.width)
ports += spira.Port(name='P2', midpoint=(l/2,0), orientation=0, width=self.width, process=spira.RDD.PROCESS.R1)
return ports
if __name__ == '__main__':
D = Resistor()
D.gdsii_output(name='Resistor')
| StarcoderdataPython |
112852 | import copy
import os
import re
from gen_code_util import build_call_line
TARGET_FILES_SWIG = [
'iriclib_bc.h',
'iriclib_cc.h',
'iriclib_complex.h',
'iriclib_geo.h',
'iriclib_geoutil.h',
'iriclib_grid.h',
'iriclib_grid_solverlib.h',
'iriclib_gui_coorp.h',
'iriclib_init.h',
'iriclib_not_withbaseid.h',
'iriclib_not_withgridid.h',
'iriclib_sol_particle.h',
'iriclib_sol_particlegroup.h',
'iriclib_sol_polydata.h',
'iriclib_solution.h',
'iriclib_wrapper.h',
]
def gen_swig_i_content(fdef):
fdef2 = fdef
fdef2 = fdef2.replace('IRICLIBDLL ', '')
fdef2 = fdef2.replace(';', '')
m = re.search(r'(\w+) (\w+)\((.*)\)', fdef2)
(retval, fname, args) = m.groups()
if fname == 'cg_iRIC_Read_Grid2d_Interpolate': return ''
arglist = args.split(',')
args = list()
for a in arglist:
a = a.strip()
frags = a.split(' ')
aname = frags.pop()
atype = " ".join(frags)
if atype == "int*" or atype == "double*":
args.append(atype + " OUTPUT")
else:
args.append(a)
return retval + " " + fname + "(" + ", ".join(args) + ");\n"
def gen_swig_i():
with open(os.path.join('..', 'python_binding', 'iric.i'), 'w', encoding='utf-8') as f_i:
with open('iric.i.header.txt', 'r', encoding='utf-8') as f:
for l in f:
f_i.write(l)
for filename in TARGET_FILES_SWIG:
f_i.write("// from {0}\n".format(filename))
with open(os.path.join('..', filename), 'r', encoding='utf-8') as f:
for line in f:
if not 'IRICLIBDLL' in line: continue
if 'RealSingle' in line: continue
if 'StringLen' in line: continue
if filename != 'iriclib_wrapper.h' and '_arr' in line: continue
f_i.write(gen_swig_i_content(line))
f_i.write("\n")
with open('iric.i.footer.txt', 'r', encoding='utf-8') as f:
for l in f:
f_i.write(l)
def gen_size_func(fname, args):
m = re.search('(cg_iRIC_Read_BC_Indices)(.*)', fname)
if m:
main, suffix = m.groups()
return 'cg_iRIC_Read_BC_IndicesSize2' + suffix, args
m = re.search('((cg_iRIC_Read_(.*))FunctionalWithName)(.*)', fname)
if m:
d1, main, d2, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return main + 'FunctionalSize' + suffix, args2
m = re.search('(cg_iRIC_Read_Grid_(Real|Integer|Complex)_Node)(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Grid_Functional_(Real|Integer)_Node)(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Grid_(Real|Integer|Complex)_Cell)(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return 'cg_iRIC_Read_Grid_CellCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Grid_Functional_(Real|Integer)_Cell)(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_CellCount' + suffix, args2
m = re.search('(cg_iRIC_GetGridCoord[1-3]d)(.*)', fname)
if m:
m, suffix = m.groups()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args
m = re.search('(cg_iRIC_Read_Grid[1-3]d_Coords)(.*)', fname)
if m:
m, suffix = m.groups()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args
m = re.search('(cg_iRIC_Read_Grid_FunctionalDimension_(Integer|Real))(.*)', fname)
if m:
m, d1, suffix = m.groups()
return 'cg_iRIC_Read_Grid_FunctionalDimensionSize' + suffix, args
m = re.search('(cg_iRIC_Read_Grid_FunctionalTime)(.*)', fname)
if m:
m, suffix = m.groups()
return 'cg_iRIC_Read_Grid_FunctionalTimeSize' + suffix, args
m = re.search('(cg_iRIC_Read_Sol_Node_(Integer|Real))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_Cell_(Integer|Real))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_CellCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_IFace_(Integer|Real))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_IFaceCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_JFace_(Integer|Real))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_JFaceCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_KFace_(Integer|Real))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_KFaceCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_GridCoord[1-3]d)(.*)', fname)
if m:
m, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_Grid[1-3]d_Coords)(.*)', fname)
if m:
m, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_(Integer|Real))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
args2.pop()
return 'cg_iRIC_Read_Grid_NodeCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_Particle_(Real|Integer))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return 'cg_iRIC_Read_Sol_Particle_Count' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_Particle_(Pos.d))(.*)', fname)
if m:
m, d1, suffix = m.groups()
return 'cg_iRIC_Read_Sol_Particle_Count' + suffix, args
m = re.search('(cg_iRIC_Read_Sol_ParticleGroup_(Real|Integer))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return 'cg_iRIC_Read_Sol_ParticleGroup_Count' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_ParticleGroup_(Pos.d))(.*)', fname)
if m:
m, d1, suffix = m.groups()
return 'cg_iRIC_Read_Sol_ParticleGroup_Count' + suffix, args
m = re.search('(cg_iRIC_Read_Sol_PolyData_(Real|Integer))(.*)', fname)
if m:
m, d1, suffix = m.groups()
args2 = copy.copy(args)
args2.pop()
return 'cg_iRIC_Read_Sol_PolyData_DataCount' + suffix, args2
m = re.search('(cg_iRIC_Read_Sol_PolyData_Type)(.*)', fname)
if m:
m, suffix = m.groups()
return 'cg_iRIC_Read_Sol_PolyData_DataCount' + suffix, args
m = re.search('(cg_iRIC_Read_Sol_PolyData_Pos2d)(.*)', fname)
if m:
m, suffix = m.groups()
return 'cg_iRIC_Read_Sol_PolyData_CoordinateCount' + suffix, args
m = re.search('(cg_iRIC_Read_Grid_TriangleElements)(.*)', fname)
if m:
m, suffix = m.groups()
return 'cg_iRIC_Read_Grid_TriangleElementsSize2' + suffix, args
if fname == 'iRIC_Geo_Polygon_Read_Points':
return 'iRIC_Geo_Polygon_Read_PointCount', args
if fname == 'iRIC_Geo_Polygon_Read_HolePoints':
return 'iRIC_Geo_Polygon_Read_HolePointCount', args
if fname == 'iRIC_Geo_RiverSurvey_Read_Altitudes':
return 'iRIC_Geo_RiverSurvey_Read_AltitudeCount', args
if fname == 'cg_iRIC_Read_Grid2d_InterpolateWithCell':
return 'cg_iRIC_Read_Grid2d_CellNodeCount', ['grid_handle', 'cellId']
m = re.search("((cg_iRIC_Read_(.*))Functional)(.*)", fname)
if m:
d1, main, d2, suffix = m.groups()
return main + 'FunctionalSize' + suffix, args
return '@TODO FIX THIS', list()
def gen_iric_py_content(fdef):
fdef2 = fdef
fdef2 = fdef2.replace('IRICLIBDLL ', '')
fdef2 = fdef2.replace(';', '')
m = re.search(r'(\w+) (\w+)\((.*)\)', fdef2)
(retval, fname, args) = m.groups()
if fname == 'cg_iRIC_Read_Grid2d_Interpolate': return ''
arglist = args.split(',')
args = list() # args of the api
rets = list() # return values of the api
arrays_in = dict()
arrays_out = dict()
w_rets = list() # return values of the wrapper func
w_args = list() # args of the wrapper func
if retval == 'int':
w_rets.append('ier')
content = ''
for a in arglist:
a = a.strip()
frags = a.split(' ')
aname = frags.pop()
atype = " ".join(frags)
if atype == "int*" or atype == "double*" or aname == "strvalue":
w_rets.append(aname)
rets.append(aname)
elif atype == "IntArrayContainer&" or atype == "RealArrayContainer&":
if "_Read" in fname or "GetGridCoord" in fname or "GetTriangleElements" in fname:
arrays_out[aname] = atype.replace('&', '')
w_args.append(aname.replace('_arr', ''))
rets.append(aname.replace('_arr', '') + ".get()")
elif "_Write" in fname:
arrays_in[aname] = atype.replace('&', '')
args.append(aname)
w_args.append(aname.replace('_arr', ''))
else:
print("Invalid function name {0}".format(fname))
else:
args.append(aname)
w_args.append(aname)
content = "def " + fname + "(" + ", ".join(args) + "):\n"
if len(arrays_out) > 0:
size_f, args2 = gen_size_func(fname, args)
content += "\t" + "size = " + size_f + "(" + ", ".join(args2) + ")\n"
for n, t in arrays_in.items():
content += "\t" + n.replace('_arr', '') + " = " + t + "(" + n + ".size)\n"
content += "\t" + n.replace('_arr', '') + ".set(" + n + ")\n"
for n, t in arrays_out.items():
content += "\t" + n.replace('_arr', '') + " = " + t + "(size)\n"
content += "\t" + ", ".join(w_rets) + " = _iric." + fname + "(" + ", ".join(w_args) + ")\n"
if fname == "iRIC_Check_Cancel":
content += "\t" + "return ier" + "\n"
else:
if retval == 'int':
content += "\t" + "_checkErrorCode(ier)\n"
if len(rets) > 0:
content += "\t" + "return " + ", ".join(rets) + "\n"
content += "\n"
return content
def gen_iric_py():
with open(os.path.join('..', 'python_binding', 'iric_custom.py'), 'w', encoding='utf-8') as f_i:
with open('iric_custom.py.header.txt', 'r', encoding='utf-8') as f:
for l in f:
f_i.write(l)
for filename in TARGET_FILES_SWIG:
f_i.write("# from {0}\n".format(filename))
with open(os.path.join('..', filename), 'r', encoding='utf-8') as f:
for line in f:
if not 'IRICLIBDLL' in line: continue
if 'RealSingle' in line: continue
if 'StringLen' in line: continue
if filename != 'iriclib_wrapper.h' and '_arr' in line: continue
f_i.write(gen_iric_py_content(line))
f_i.write("\n")
with open('iric_custom.py.footer.txt', 'r', encoding='utf-8') as f:
for l in f:
f_i.write(l)
def gen_swig():
gen_swig_i()
gen_iric_py()
| StarcoderdataPython |
1649557 | import os
import sys
import warnings
from inspect import getmembers, isfunction
import inspect
import numpy as np
from ase.io import read
import scipy.sparse as sp
from Utilities import Initial
no_dir_template = "\nThere does not exist a suitable directory in which to place these" \
"quantities.\n\nInstead, we shall generate one at '%s'.\n"
no_file_template = "\nThere does not exist a file in which to write the quantity %s.\n" \
"\nInstead, we shall create the file '%s' at location '%s'."
AttrErr = "Unable to find a write object for {0}:\n"\
"\nException traceback:\n{1}.\n"
class Writer():
"""
Robert:
This class object has been written with the purpose of handling the
creation and distribution of Sapphire Output.
In version 0.10.1, the pickle function is inadequate to facilitate
the entirity of the metadata.
In principle, all of the handling of output should be handled out of
sight of the user.
"""
def __init__(self, System, Metadata):
self.output_info_file = System['base_dir']+'Output_Info.txt'
self.output_error_file = System['base_dir']+'Output_Errors.txt'
self.Quants = {
'Dir': 'Time_Dependent/', 'File': 'R_Cut', 'Iterate': False, 'Bool': False,
'Skip': True, 'Energy': False, 'Homo': False, 'Hetero': False, 'xyz': False
}
self.Metadata = Metadata # This is the data provided to the user by Sapphire after post processing
self.System = System # Significant system information regarding I/O streams
self.Logo = Initial.Logo().Logo()
with open(self.output_info_file, 'w') as outfile:
outfile.write(self.Logo)
outfile.write('\n')
with open(self.output_error_file, 'w') as outfile:
outfile.write(self.Logo)
outfile.write('\n')
"""
This provides a dictionary with the function names as keys and the
function itself.
This allows us to have 1-1-1 mapping between the output p
"""
self.functions_list = [o for o in getmembers(Writer) if isfunction(o[1])]
self.Functions = {}
for x in self.functions_list:
if x in self.Quants.keys():
self.Functions[x[0]] = inspect.getfullargspec(x[1])[0][1:]
def ensure_dir(self, base_dir='', file_path=''):
"""
Robert:
A simple script to verify the existence of a directory
given the path to it. If it does not exist, will create it.
"""
directory = base_dir + file_path
if not os.path.exists(directory):
os.makedirs(directory)
with open(self.output_info_file, 'w') as outfile:
outfile.write(no_dir_template % (base_dir+file_path))
def MakeFile(self, Attributes):
self.out = self.System['base_dir'] + Attributes['Dir'] + Attributes['File']
if not os.path.isfile(self.out):
with open(self.System['base_dir'] + Attributes['Dir'] + Attributes['File'], 'w') as out:
out.close()
else:
pass
def Masterkey(self, Quantity):
try:
with open(self.out, 'w') as f:
for item in self.Metadata[self.x]:
f.write(str(item)+'\n')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
def Adj(self, Quantity):
self.out = self.System['base_dir'] + Quantity['Dir'] + Quantity['File']
self.ensure_dir(base_dir=self.System['base_dir'], file_path=Quantity['Dir'])
for i, t in enumerate(self.Metadata[self.x]):
try:
self.filename = self.System['base_dir'] + Quantity['Dir'] + 'File%s' % i
self.Mat = sp.csr_matrix.todense(t)
with open(self.filename, 'w') as f:
for line in self.Mat:
np.savetxt(f, line, fmt='%d')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
def Ele(self, Quantity):
self.out = self.System['base_dir'] + Quantity['Dir'] + Quantity['File']
self.ensure_dir(base_dir=self.System['base_dir'], file_path=Quantity['Dir'])
with open(self.out, 'w') as file:
for i, t in enumerate(self.Metadata[self.x]):
try:
self.filename = self.System['base_dir'] + Quantity['Dir'] + 'File%s' % i
file.write('\t|\t'.join(str(item) for item in t[0])+'\n')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
def HeAdj(self, Quantity):
self.Homo = self.System['Homo']
for Ele in self.Homo:
if len(self.Metadata[self.x]) > 1:
Temp = np.column_stack((
self.Metadata[self.x][0][self.Homo.index(Ele)],
self.Metadata[self.x][1][self.Homo.index(Ele)]
))
for t in range(2, len(self.Metadata[self.x])):
Temp = np.column_stack((
Temp, np.array(self.Metadata[self.x][t][self.Homo.index(Ele)], int)
))
np.savetxt(
self.System['base_dir'] + Quantity['Dir'] + Quantity['File']+Ele,
Temp.transpose(), fmt='%d')
else:
np.savetxt(
self.System['base_dir'] + Quantity['Dir'] + Quantity['File']+Ele,
np.array(self.Metadata[self.x][0][self.Homo.index(Ele)]).transpose(),
fmt='%d')
def Write_Homo(self, Quantity):
# self.MakeFile(Quantity) #See if the file already exists
for Ele in self.System['Homo']:
File = str(self.x)[:-2]+Ele
self.out = self.System['base_dir'] + Quantity['Dir'] + Quantity['File']+Ele
self.ensure_dir(base_dir=self.System['base_dir'], file_path=Quantity['Dir'])
try:
if not Quantity['Iterate'] and not Quantity['Bool'] and not Quantity['array']:
try:
np.savetxt(self.out, self.Metadata[File], fmt='%s')
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(File), str(e)))
try:
with open(self.out, 'a') as CurrentOut:
CurrentOut.write(str(File)+str(self.Metadata[File]))
CurrentOut.write('\n')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (File, e))
elif Quantity['Iterate'] and Quantity['array']:
try:
if len(self.Metadata[File]) > 1:
Temp = np.column_stack((self.Metadata[File][0], self.Metadata[File][1]))
for t in range(2, len(self.Metadata[File])):
Temp = np.column_stack((Temp, self.Metadata[File][t]))
np.savetxt(self.out, Temp.transpose(), fmt='%f')
else:
np.savetxt(
self.out,
np.array(self.Metadata[File][0]).transpose(),
fmt='%f')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (File, e))
elif Quantity['Iterate'] and not Quantity['array']:
try:
np.savetxt(self.out, np.array(self.Metadata[File], dtype=float).transpose(), fmt='%f')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (File, e))
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(File), str(e)))
def Write(self, Quantity):
self.out = self.System['base_dir'] + Quantity['Dir'] + Quantity['File']
self.ensure_dir(base_dir=self.System['base_dir'], file_path=Quantity['Dir']) # See if the directory already exists
# self.MakeFile(Quantity) #See if the file already exists
if Quantity['Exec']:
try:
with open(self.out, 'a') as CurrentOut:
CurrentOut.write(str(self.x)+'\t|\t'+str(self.Metadata[self.x]))
CurrentOut.write('\n')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
else:
try:
if Quantity['Bool']:
try:
with open(self.out, 'a') as CurrentOut:
CurrentOut.write(str(self.x) + '\t|\t' + str(self.Metadata[self.x]))
CurrentOut.write('\n')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
elif not Quantity['Iterate'] and not Quantity['Bool'] and not Quantity['array']:
try:
np.savetxt(self.out, self.Metadata[self.x], fmt='%s')
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
try:
with open(self.out, 'a') as CurrentOut:
CurrentOut.write(str(self.x)+str(self.Metadata[self.x]))
CurrentOut.write('\n')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
elif Quantity['Iterate'] and Quantity['array']:
try:
if len(self.Metadata[self.x]) > 1:
Temp = np.column_stack((self.Metadata[self.x][0], self.Metadata[self.x][1]))
for t in range(2, len(self.Metadata[self.x])):
Temp = np.column_stack((Temp, self.Metadata[self.x][t]))
np.savetxt(self.out, Temp.transpose(), fmt='%f')
else:
np.savetxt(
self.out,
np.array(self.Metadata[self.x][0]).transpose(),
fmt='%f')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
elif Quantity['Iterate'] and not Quantity['array']:
try:
np.savetxt(self.out, np.array(self.Metadata[self.x], dtype=float).transpose(), fmt='%f')
except Exception as e:
with open(self.output_error_file, 'a') as outfile:
outfile.write(AttrErr % (self.x, e))
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
def Run(self, Output_Type):
"""
Robert.
This will need to be handled internally delicately so as to not confuse
the user.
I would like to be able to determine whether or not to call a given
output file type based on it being part of the Full, Homo, or Hetero
sub-systems.
In principle, the User is at liberty (not now, but soon) to pre-select their
own output parameters. Though deviating from the defaults could be dangerous.
At present, one of three string-types can be assigned to the 'Output_Type'
free variable:
Full - Loads in the OutputInfoFull.py file for its attributes to be read.
Homo - Loads in the OutputInfoHomo.py file for its attributes to be read.
Hetero - Loads in the OutputInfoHetero.py file for its attributes to be read.
"""
if Output_Type == 'Full':
from Utilities import OutputInfoFull as Out # Case 1
elif Output_Type == 'Homo':
from Utilities import OutputInfoHomo as Out # Case 2
elif Output_Type == 'Hetero':
from Utilities import OutputInfoHetero as Out # Case 3
self.Write_List = []
for self.x in self.Metadata.keys(): # Things such as: 'pdf', 'R_Cut', ...
try:
if Output_Type == 'Homo' and self.x.startswith('ho'):
Attributes = getattr(Out, str(self.x[:-2])) # Pulls dictionaries with names corresponding to x as above
with open(self.output_info_file, 'a') as outfile:
outfile.write('Working now with %s and placing it in %s with file name %s.\n' % (self.x, Attributes['Dir'], Attributes['File']))
try:
self.Write_Homo(Attributes)
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
else:
Attributes = getattr(Out, str(self.x)) # Pulls dictionaries with names corresponding to x as above
if self.x == 'adj':
try:
self.Adj(Attributes)
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
elif self.x == 'Elements':
try:
self.Ele(Attributes)
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
elif self.x == 'headj':
try:
self.HeAdj(Attributes)
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
elif self.x == 'master':
try:
self.Masterkey(Attributes)
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
else:
self.Write(Attributes)
with open(self.output_info_file, 'a') as outfile:
outfile.write('Working now with %s and placing it in %s with file name %s.\n' % (self.x, Attributes['Dir'], Attributes['File']))
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format(str(self.x), str(e)))
try:
from CNA.Utilities import Pattern_Key as PK
self.pattern_key = PK().Key()
with open(self.System['base_dir'] + 'RecognisedPatterns.txt', 'w') as outfile:
for i, thing in enumerate(self.pattern_key.keys()):
outfile.write(str(i) + ')\t' + str(thing)+':\t')
for item in self.pattern_key[thing]:
outfile.write(str(item) + ':\t' + str(self.pattern_key[thing][item])+'\t|\t')
outfile.write('\n\n')
except Exception as e:
with open(self.output_error_file, 'a') as error:
error.write(AttrErr.format('CNA_Patterns', e))
| StarcoderdataPython |
164248 | """
Run main.
"""
from behave_graph import main
main()
| StarcoderdataPython |
141140 | <filename>tests/test_ilcs.py
import numpy
def test_all_distances(ilcs):
numpy.testing.assert_almost_equal(
ilcs.comparator('125 55/21-a (att)', '126 55/21-b (att)'),
numpy.array([
1, # citation: Not Missing
0, # ambiguous: Dummy
1, # same name type?: Dummy
ilcs.compareString('126', '125'), # chapter: Derived
ilcs.compareString('55', '55'), # act prefix: Derived
ilcs.compareString('21', '21'), # section: Derived
ilcs.compareString('a', 'b'), # subsection: Derived
1, # chapter: Not Missing
1, # act prefix: Not Missing
1, # section: Not Missing
1, # subsection: Not Missing
0, # full string: String
1, # attempted match: Dummy
0 # exact match: Exact
])
)
def test_exact_match(ilcs):
numpy.testing.assert_almost_equal(
ilcs.comparator('125 55/21 (att)', '125 55/21 (att)'),
numpy.array([1, 0, 1, 0.5, 0.5, 0.5, 0, 1, 1, 1, 0, 0, 1, 1])
)
def test_mismatched_elements(ilcs):
numpy.testing.assert_almost_equal(
ilcs.comparator('125 55/21-a (att)', '125 56/21'),
numpy.array([
1, 0, 1,
ilcs.compareString('125', '125'),
ilcs.compareString('55', '56'),
ilcs.compareString('21', '21'),
0, 1, 1, 1, 0, 0, 0, 0
])
)
def test_attempted_match(ilcs):
numpy.testing.assert_almost_equal(
ilcs.comparator('725 5/21-a (att)', '720-5/8-4 725 6/21'),
numpy.array([
1, 0, 1,
ilcs.compareString('125', '125'),
ilcs.compareString('5', '6'),
ilcs.compareString('21', '21'),
0, 1, 1, 1, 0, 0, 1, 0
])
)
| StarcoderdataPython |
161891 | <gh_stars>0
# coding: utf-8
# In[1]:
from selenium import webdriver
from selenium.webdriver.remote.webelement import WebElement
import time
from bs4 import BeautifulSoup
#driver=webdriver.PhantomJS(executable_path="")
driver=webdriver.Chrome(executable_path="")
driver.get("https://tw.news.yahoo.com/technology/archive/")
time.sleep(3)
bs1=BeautifulSoup(driver.page_source,"html.parser")
news=""
for new in bs1.find("ul",{"class":"yom-list-wide thumbnail"}):
title=new.find("div",{"class":"txt"}).find("a").text
href=new.find("div",{"class":"txt"}).find("a").attrs['href']
url="https://tw.news.yahoo.com"+href
driver.get(url)
time.sleep(15)
bs2=BeautifulSoup(driver.page_source,"html.parser")
content=bs2.find("div",{"class":"yom-mod yom-art-content "}).find("div",{"class":"bd"}).get_text()
#print(content)
news+=title+"\n"+"\n"+content+"\n"+"\n"
#print(news)
f=open("yahoonews.txt","w")
f.write(news)
f.close()
driver.close()
| StarcoderdataPython |
1636465 | #! usr/bin/python3.6
"""
Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-06-11 12:40:47.360445
.. warning::
The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only.
They are there as a guide as to how the visual basic / catscript functions work
and thus help debugging in pycatia.
"""
from pycatia.system_interfaces.any_object import AnyObject
class DrawingDimLine(AnyObject):
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| System.IUnknown
| System.IDispatch
| System.CATBaseUnknown
| System.CATBaseDispatch
| System.AnyObject
| DrawingDimLine
|
| Manages dimension line of a dimension in drawing view.
|
| This interface is obtained from DrawingDimension.GetDimLine
| method.
"""
def __init__(self, com_object):
super().__init__(com_object)
self.drawing_dim_line = com_object
@property
def color(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property Color() As long
|
| Returns or sets color of dimension line.
|
| Example:
| This example retrieves color of dimension line MyDimLine drawing
| dimension.
|
| oColorDimLine = MyDimLine.Color
:return: int
:rtype: int
"""
return self.drawing_dim_line.Color
@color.setter
def color(self, value: int):
"""
:param int value:
"""
self.drawing_dim_line.Color = value
@property
def dim_line_graph_rep(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property DimLineGraphRep() As CatDimLineGraphRep
|
| Returns or graphic representation of dimension line.
|
| Example:
| This example retrieves graphic representation of dimension line
| MyDimLine drawing dimension.
|
| odimLineGraphRep = MyDimLine.DimLineGraphRep
:return: int
:rtype: int
"""
return self.drawing_dim_line.DimLineGraphRep
@dim_line_graph_rep.setter
def dim_line_graph_rep(self, value: int):
"""
:param int value:
"""
self.drawing_dim_line.DimLineGraphRep = value
@property
def dim_line_orientation(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property DimLineOrientation() As CatDimOrientation
|
| Returns or orientation of dimension line.
|
| Example:
| This example retrieves orientation of dimension line MyDimLine drawing
| dimension.
|
| odimLineOrient = MyDimLine.DimLineOrientation
:return: int
:rtype: int
"""
return self.drawing_dim_line.DimLineOrientation
@dim_line_orientation.setter
def dim_line_orientation(self, value: int):
"""
:param int value:
"""
self.drawing_dim_line.DimLineOrientation = value
@property
def dim_line_reference(self):
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property DimLineReference() As CatDimReference
|
| Returns or reference of dimension line.
|
| Example:
| This example retrieves reference of dimension line MyDimLine drawing
| dimension.
|
| odimLineRef = MyDimLine.DimLineReference
:return: int
:rtype: int
"""
return self.drawing_dim_line.DimLineReference
@dim_line_reference.setter
def dim_line_reference(self, value: int):
"""
:param int value:
"""
self.drawing_dim_line.DimLineReference = value
@property
def dim_line_rep(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property DimLineRep() As CatDimLineRep (Read Only)
|
| Returns or representation of dimension line.
|
| Example:
| This example retrieves representation of dimension line MyDimLine
| drawing dimension.
|
| odimLineRep = MyDimLine.DimLineRep
:return: int
:rtype: int
"""
return self.drawing_dim_line.DimLineRep
@property
def dim_line_type(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property DimLineType() As long (Read Only)
|
| Returns type of dimension line.
|
| Example:
| This example retrieves type of dimension line MyDimLine drawing
| dimension.
|
| odimLineType = MyDimLine.DimLineType
:return: int
:rtype: int
"""
return self.drawing_dim_line.DimLineType
@property
def thickness(self) -> float:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property Thickness() As double
|
| Returns or sets thickness of dimension line.
|
| Example:
| This example retrieves thickness of dimension line MyDimLine drawing
| dimension.
|
| oThickDimLine = MyDimLine.Thickness
:return: float
:rtype: float
"""
return self.drawing_dim_line.Thickness
@thickness.setter
def thickness(self, value: float):
"""
:param float value:
"""
self.drawing_dim_line.Thickness = value
def get_dim_line_dir(self, o_dir_x: float, o_dir_y: float) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub GetDimLineDir(double oDirX,
| double oDirY)
|
| Returns direction of a dimension line in case of a catDimUserDefined
| representation mode. To retrieve the representation mode:
| DrawingDimLine.get_DimLineRep.
|
| Parameters:
|
| oDirX,oDirY
| The components of the direction vector
| Example:
| This example retrieves the direction vector of a dimension line
| MyDimLine drawing dimension.
|
| MyDimLine.GetDimLineDir oDirX, oDirY
:param float o_dir_x:
:param float o_dir_y:
:return: None
:rtype: None
"""
return self.drawing_dim_line.GetDimLineDir(o_dir_x, o_dir_y)
def get_geom_info(self, o_geom_infos: tuple) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub GetGeomInfo(CATSafeArrayVariant oGeomInfos)
|
| Get geometrical information of dimension line.
|
| Parameters:
|
| oGeomInfos
| geometrical information.
| Example:
| This example gets geometrical information of MyDimLine
| path.
|
| MyDimLine.GetGeomInfo(oGeomInfos)
:param tuple o_geom_infos:
:return: None
:rtype: None
"""
return self.drawing_dim_line.GetGeomInfo(o_geom_infos)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'get_geom_info'
# # vba_code = """
# # Public Function get_geom_info(drawing_dim_line)
# # Dim oGeomInfos (2)
# # drawing_dim_line.GetGeomInfo oGeomInfos
# # get_geom_info = oGeomInfos
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def get_symb_color(self, index: int) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetSymbColor(long Index) As long
|
| Get symbol color of dimension line.
|
| Parameters:
|
| Index
| 1:first symbol 2:second symbol 3:leader symbol
| oColorSymb
| symbol color.
| Example:
| This example gets symbol color of MyDimLine path.
|
| ColorSymb = MyDimLine.GetSymbColor(Index)
:param int index:
:return: int
:rtype: int
"""
return self.drawing_dim_line.GetSymbColor(index)
def get_symb_thickness(self, index: int) -> float:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetSymbThickness(long Index) As double
|
| Get symbol thickness of dimension line.
|
| Parameters:
|
| Index
| 1:first symbol 2:second symbol 3:leader symbol
| oThickSymb
| symbol thickness.
| Example:
| This example gets symbol thickness of MyDimLine
| path.
|
| ThickSymb = MyDimLine.GetSymbThickness(Index)
:param int index:
:return: float
:rtype: float
"""
return self.drawing_dim_line.GetSymbThickness(index)
def get_symb_type(self, index: int) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetSymbType(long Index) As CatDimSymbols
|
| Get symbol type of dimension line.
|
| Parameters:
|
| Index
| 1:first symbol 2:second symbol 3:leader symbol
| oTypeSymb
| symbol type.
| Example:
| This example gets symbol type of MyDimLine path.
|
| typeSymb = MyDimLine.GetSymbType(Index)
:param int index:
:return: enum cat_dim_symbols
:rtype: int
"""
return self.drawing_dim_line.GetSymbType(index)
def set_symb_color(self, index: int, i_color_symb: int) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetSymbColor(long Index,
| long iColorSymb)
|
| Set symbol color of dimension line.
|
| Parameters:
|
| Index
| 1:first symbol 2:second symbol 3:leader symbol
| oColorSymb
| symbol color.
| Example:
| This example sets symbol color of MyDimLine path.
|
| MyDimLine.SetSymbColor(Index, iColorSymb)
:param int index:
:param int i_color_symb:
:return: None
:rtype: None
"""
return self.drawing_dim_line.SetSymbColor(index, i_color_symb)
def set_symb_thickness(self, index: int, i_thick_symb: float) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetSymbThickness(long Index,
| double iThickSymb)
|
| Set symbol thickness of dimension line.
|
| Parameters:
|
| Index
| 1:first symbol 2:second symbol 3:leader symbol
| oThickSymb
| symbol thickness.
| Example:
| This example sets symbol thickness of MyDimLine
| path.
|
| MyDimLine.GetSymbThickness(Index, iThickSymb)
:param int index:
:param float i_thick_symb:
:return: None
:rtype: None
"""
return self.drawing_dim_line.SetSymbThickness(index, i_thick_symb)
def set_symb_type(self, index: int, i_symb_type: int) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetSymbType(long Index,
| CatDimSymbols iSymbType)
|
| Set symbol type of dimension line.
|
| Parameters:
|
| Index
| 1:first symbol 2:second symbol 3:leader symbol
| iSymbType
| symbol type.
| Example:
| This example sets symbol type of MyDimLine path.
|
| MyDimLine.SetSymbType(Index, iSymbType)
:param int index:
:param int i_symb_type:
:return: None
:rtype: None
"""
return self.drawing_dim_line.SetSymbType(index, i_symb_type)
def __repr__(self):
return f'DrawingDimLine(name="{self.name}")'
| StarcoderdataPython |
3214461 | <gh_stars>10-100
# The MIT License (MIT) # Copyright (c) 2014-2017 University of Bristol
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from .client import Client
from .config import HyperStreamConfig
from .plate import PlateManager
from .workflow import Workflow, WorkflowManager
from .channels import ChannelManager
from .hyperstream import HyperStream
from .online_engine import OnlineEngine
from .stream import StreamId, Stream, StreamInstance, StreamMetaInstance, DatabaseStream, StreamDict, \
StreamInstanceCollection, StreamView
from .time_interval import TimeInterval, TimeIntervals, RelativeTimeInterval
from .tool import Tool, MultiOutputTool, AggregateTool, SelectorTool, PlateCreationTool
from .utils import MIN_DATE, UTC, StreamNotAvailableError, StreamAlreadyExistsError, StreamDataNotAvailableError, \
StreamNotFoundError, IncompatiblePlatesError, ToolNotFoundError, ChannelNotFoundError, ToolExecutionError, \
ChannelAlreadyExistsError, FactorAlreadyExistsError, FactorDefinitionError, LinkageError, \
NodeAlreadyExistsError, PlateDefinitionError, PlateEmptyError, MultipleStreamsFoundError
from .version import __version__
| StarcoderdataPython |
34717 | <filename>setup_project.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
setup_project.py -- GIS Project Setup Utility
<NAME>; May 2014/May 2016
This script creates a project folder-environment for GIS projects as follows:
<project_name>/
data/
raw/
<project_name>.gdb
design/
fonts/
images/
rasters/
vectors/
maps/
archive/
final/
reports/
resources/
tools/
"""
import argparse
import os
from shutil import copy2
# import arcpy # moved for speed
# =============================================================================
# CLI ARGS
argp = argparse.ArgumentParser(description=__doc__, add_help=True,
formatter_class=argparse.RawTextHelpFormatter)
argp.add_argument("-n", action="store", dest="name",
help="Name of new project folder.")
argp.add_argument("--gdb", action="store_true", default=False,
dest="make_gdb", help="Option to make a gdb on setup.")
argp.add_argument("--cart", action="store_true", default=False,
dest="make_cart",
help="Option to make cartographic resource folders.")
# TODO: For now, we assume user has $ cd'd into the desired base directory
args = argp.parse_args()
if args.make_gdb:
import arcpy
# =============================================================================
# FUNCTIONS
def make_gdb(gdb_name):
# if no project exists, place gdb in cwd
# if it does, place in DATA/
# process: make gdb in C:/temp and copy to cwd
copy2(None, None)
pass
def main(dest_folder):
# <project>/
os.mkdir(dest_folder)
# DATA/
# RAWDATA/
os.makedirs(os.path.join(dest_folder, "data", "raw"))
# TODO: make scratch gdb here? / use C:/temp?
# MISC/
os.mkdir(os.path.join(dest_folder, "data", "misc"))
# GDB/
# TODO: if args.make_gdb: make_gdb(dest_folder)
# MAPS/
# archive/
os.makedirs(os.path.join(dest_folder, "maps", "archive"))
# FINAL/
os.mkdir(os.path.join(dest_folder, "maps", "final"))
if args.make_cart:
# DESIGN/
# FONTS/
os.makedirs(os.path.join(dest_folder, "design", "fonts"))
# IMAGES/
os.mkdir(os.path.join(dest_folder, "design", "images"))
# RASTERS/
os.mkdir(os.path.join(dest_folder, "design", "rasters"))
# VECTORS/
os.mkdir(os.path.join(dest_folder, "design", "vectors"))
# REPORTS/
os.mkdir(os.path.join(dest_folder, "reports"))
# RESOURCES/
# TOOLS/
os.makedirs(os.path.join(dest_folder, "resources", "tools"))
return
# =============================================================================
# RUN IT
if __name__ == "__main__":
if not args.name:
args.name = "new_project"
new_proj = os.path.join(os.getcwd(), args.name)
main(new_proj)
| StarcoderdataPython |
118277 | from sklearn.naive_bayes import BernoulliNB
from run_binary_classifier import run
param_grid = {
'bag_of_words__stop_words': ['english'],
'bag_of_words__ngram_range': [(1, 2)],
'bag_of_words__max_features': [500],
'dim_reduct__n_components': [300],
'normalizer__norm': ['l2'],
'classifier__alpha': [1.0],
'classifier__binarize': [0.0]
}
clf = BernoulliNB()
run(param_grid, clf)
| StarcoderdataPython |
3242074 | import matplotlib.pyplot as plt
import numpy as np
from lib.plot_utils import plot_all_states, plot_agent, plot_value_table, plot_env_agent_and_policy_at_state, \
plot_env_agent_and_chosen_action, plot_policy
from lib.grid_world import a_index_to_symbol
class TDEvaluation:
def __init__(self, env, policy, initial_value_table, alpha=0.2, vmin=-8, vmax=0.5):
self.env = env
self.policy = policy
self.value_table = initial_value_table
self.alpha = alpha
self.vmin, self.vmax = vmin, vmax
self.fig, self.ax = None, None
self.phase = 'choosing_actions'
self.sampled_a = None
self.current_episode = []
self.current_episode_returns = []
self.i = 0
self.td_target = None
self.env.reset()
@property
def s(self):
return self.env.state
@property
def s_idx(self):
return self.env.states.index(self.s)
@property
def s_centered(self):
return [self.s[0] + 0.5, self.s[1] + 0.5]
def next_step(self, fast_execution=False):
print_string = ''
if self.phase == 'choosing_actions':
if not fast_execution:
self.fig, self.ax = plt.subplots(ncols=3, figsize=(20, 6))
plot_env_agent_and_policy_at_state(self.env, self.s, self.policy, self.ax[0])
plot_value_table(self.env, self.value_table, self.ax[1], vmin=self.vmin, vmax=self.vmax)
plot_policy(self.env, self.policy, self.ax[2])
plt.show()
print('Sampling action...\n')
self.phase = 'showing_sampled_action'
elif self.phase == 'showing_sampled_action':
self.sampled_a = np.random.choice(4, p=self.policy[self.s_idx])
if not fast_execution:
self.fig, self.ax = plt.subplots(ncols=3, figsize=(20, 6))
plot_env_agent_and_chosen_action(self.env, self.s, self.sampled_a, self.ax[0])
plot_value_table(self.env, self.value_table, self.ax[1], vmin=self.vmin, vmax=self.vmax)
plot_policy(self.env, self.policy, self.ax[2])
plt.show()
print(f'Action sampled: {a_index_to_symbol[self.sampled_a]}\n')
self.phase = 'carrying_out_action'
elif self.phase == 'carrying_out_action':
old_s = self.s
s_prime, r, t = self.env.step(self.sampled_a)
self.current_episode.append([old_s, self.sampled_a, r, s_prime, t])
if not fast_execution:
self.fig, self.ax = plt.subplots(ncols=3, figsize=(20, 6))
plot_all_states(self.env, self.ax[0])
plot_agent(old_s, self.ax[0], alpha=0.3)
plot_agent(self.s, self.ax[0])
plot_value_table(self.env, self.value_table, self.ax[1], vmin=self.vmin, vmax=self.vmax)
plot_policy(self.env, self.policy, self.ax[2])
plt.show()
print(f'Transition: <{old_s}, {a_index_to_symbol[self.sampled_a]}, {r:.1f}, {s_prime}>')
self.phase = 'showing_td_target'
elif self.phase == 'showing_td_target':
if not fast_execution:
self.fig, self.ax = plt.subplots(ncols=3, figsize=(20, 6))
plot_all_states(self.env, self.ax[0])
plot_agent(self.s, self.ax[0])
plot_value_table(self.env, self.value_table, self.ax[1], vmin=self.vmin, vmax=self.vmax)
plot_policy(self.env, self.policy, self.ax[2])
plt.show()
old_s, a, r, s_prime, _ = self.current_episode[-1]
s_prime_idx = self.env.states.index(s_prime)
self.td_target = r + 0.9 * self.value_table[s_prime_idx]
if not fast_execution:
print(f'Transition: <{old_s}, {a_index_to_symbol[a]}, {r:.1f}, {s_prime}>\n')
print('Updating value table:')
print(f'TD target = R + 𝛾 * V({s_prime}) = {r:.1f} + 0.9*{self.value_table[s_prime_idx]:.2f} = '
f'{self.td_target:.2f}')
self.phase = 'updating_value_table'
elif self.phase == 'updating_value_table':
if not fast_execution:
self.fig, self.ax = plt.subplots(ncols=3, figsize=(20, 6))
plot_all_states(self.env, self.ax[0])
plot_agent(self.s, self.ax[0])
plot_value_table(self.env, self.value_table, self.ax[1], vmin=self.vmin, vmax=self.vmax)
plot_policy(self.env, self.policy, self.ax[2])
plt.show()
old_s, a, r, s_prime, _ = self.current_episode[-1]
old_s_idx = self.env.states.index(old_s)
new_value = self.alpha * self.td_target + (1 - self.alpha) * self.value_table[old_s_idx]
if fast_execution:
print_string += f'Transition: <{old_s}, {a_index_to_symbol[a]}, {r:.1f}, {s_prime}>\n'
print_string += f'TD target = {self.td_target:.2f}\n'
print_string += f'V({old_s}) ← {self.value_table[old_s_idx]:.2f} + {self.alpha:.2f} * ' \
f'({self.td_target:.2f} - {self.value_table[old_s_idx]:.2f}) = {new_value:.2f}\n\n'
else:
print(f'Transition: <{old_s}, {a_index_to_symbol[a]}, {r:.1f}, {s_prime}>\n')
print('Updating value table:')
print(f'TD target = {self.td_target:.2f}')
print(f'V({old_s}) ← {self.value_table[old_s_idx]:.2f} + {self.alpha:.2f} * '
f'({self.td_target:.2f} - {self.value_table[old_s_idx]:.2f}) = {new_value:.2f}\n')
self.value_table[old_s_idx] = new_value
self.phase = 'showing_new_value_table'
elif self.phase == 'showing_new_value_table':
if not fast_execution:
self.fig, self.ax = plt.subplots(ncols=3, figsize=(20, 6))
plot_all_states(self.env, self.ax[0])
plot_agent(self.s, self.ax[0])
plot_value_table(self.env, self.value_table, self.ax[1], vmin=self.vmin, vmax=self.vmax)
plot_policy(self.env, self.policy, self.ax[2])
plt.show()
old_s, a, r, s_prime, t = self.current_episode[-1]
old_s_idx = self.env.states.index(old_s)
if not fast_execution:
print(f'Transition: <{old_s}, {a_index_to_symbol[a]}, {r:.1f}, {s_prime}>\n')
print('Updating value table:')
print(f'TD target = {self.td_target:.2f}')
print(f'V({old_s}) = {self.value_table[old_s_idx]:.2f}')
if t:
if not fast_execution:
print('\nTerminal state, reseting environment.')
self.env.reset()
self.current_episode = []
self.i += 1
self.phase = 'choosing_actions'
else:
raise ValueError(f'Phase {self.phase} not recognized.')
return print_string
def finish_episode(self):
print_string = ''
current_i = self.i
while self.i == current_i:
print_string += f'{self.next_step(fast_execution=True)}'
self.fig, self.ax = plt.subplots(ncols=3, figsize=(20, 6))
plot_all_states(self.env, self.ax[0])
plot_agent(self.s, self.ax[0])
plot_value_table(self.env, self.value_table, self.ax[1], vmin=self.vmin, vmax=self.vmax)
plot_policy(self.env, self.policy, self.ax[2])
plt.show()
print(print_string)
| StarcoderdataPython |
1675564 | <filename>models/language_modeling/tensorflow/bert_large/training/fp32/generic_ops.py
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main BERT model and related functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
_inprecision = tf.float32
_rprecision = tf.float32
_use_experimental_gelu = False
def set_rprecision(dt):
global _rprecision
_rprecision=dt
def set_global_flags(experimental_gelu):
global _use_experimental_gelu
_use_experimental_gelu = experimental_gelu
def i_cast(x) :
return tf.cast(x, _inprecision)
def r_cast(x) :
return tf.cast(x, _rprecision)
def matmul(matA, matB, transpose_b=False) :
matA = i_cast(matA)
matB = i_cast(matB)
matC = tf.matmul(matA, matB, transpose_b=transpose_b)
return r_cast(matC)
def multiply(x,y):
x = r_cast(x)
y = r_cast(y)
return tf.multiply(x,y)
def mzip(x,y):
if x.dtype== tf.bfloat16:
x = r_cast(x)
y = r_cast(y)
return zip(x,y)
def tanh(x):
x = i_cast(x)
rval = tf.tanh(x)
return r_cast(rval)
def softmax(scores, axis=None):
scores = i_cast(scores)
rval = tf.nn.softmax(scores, axis)
return r_cast(rval)
def layer_norm(inputs, begin_norm_axis, begin_params_axis, scope):
inputs = i_cast(inputs)
#lnorm = tf.keras.layers.LayerNormalization(axis=1, center=True, scale=True)
lnorm = tf.keras.layers.LayerNormalization()
out_tensor = lnorm(inputs)
return r_cast(out_tensor)
"Moved from modeling.py"
def gelu(x):
"""Gaussian Error Linear Unit.
This is a smoother version of the RELU.
Original paper: https://arxiv.org/abs/1606.08415
Args:
x: float Tensor to perform activation.
Returns:
`x` with the GELU activation applied.
"""
if _use_experimental_gelu :
return tf.nn.gelu(x)
else:
x = i_cast(x)
cdf = 0.5 * (1.0 + tf.tanh(
(np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3)))))
rval = x * cdf
return r_cast(rval)
def logTheLossHook(total_loss, n):
return tf.compat.v1.train.LoggingTensorHook({"\t Loss " : total_loss}, every_n_iter=n)
| StarcoderdataPython |
82681 | import sys
sys.path.append('..')
from troup.client import CommandAPI, client_to_local_node, ChannelClient
#cc = ChannelClient(nodes_specs=['RPI:ws://192.168.2.128:7000'])
cc = client_to_local_node()
cmd = CommandAPI(channel_client=cc)
promise = cmd.send(CommandAPI.command('info', {}))
print(promise)
print('cmd send')
try:
result = promise.result
print('Result -> %s' % str(result))
finally:
cmd.shutdown()
print('done')
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.