repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
jpshort/odoo | comunity_modules/account_financial_report_webkit_xls/report/partners_balance_xls.py | 25 | 20947 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2013 Noviat nv/sa (www.noviat.com). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import xlwt
from openerp.addons.report_xls.report_xls import report_xls
from openerp.addons.report_xls.utils import rowcol_to_cell
from openerp.addons.account_financial_report_webkit.report.partner_balance \
import PartnerBalanceWebkit
from openerp.tools.translate import _
# import logging
# _logger = logging.getLogger(__name__)
def display_line(all_comparison_lines):
return any([line.get('balance') for line in all_comparison_lines])
class partners_balance_xls(report_xls):
column_sizes = [12, 40, 25, 17, 17, 17, 17, 17]
def print_title(self, ws, _p, row_position, xlwt, _xs):
cell_style = xlwt.easyxf(_xs['xls_title'])
report_name = ' - '.join([_p.report_name.upper(),
_p.company.partner_id.name,
_p.company.currency_id.name])
c_specs = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_empty_row(self, ws, row_position):
c_sizes = self.column_sizes
c_specs = [('empty%s' % i, 1, c_sizes[i], 'text', None)
for i in range(0, len(c_sizes))]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, set_column_size=True)
return row_position
def print_header_titles(self, ws, _p, data, row_position, xlwt, _xs):
cell_format = _xs['bold'] + _xs['fill_blue'] + _xs['borders_all']
cell_style = xlwt.easyxf(cell_format)
cell_style_center = xlwt.easyxf(cell_format + _xs['center'])
c_specs = [
('fy', 1, 0, 'text', _('Fiscal Year'), None, cell_style_center),
('af', 1, 0, 'text', _('Accounts Filter'),
None, cell_style_center),
('df', 1, 0, 'text', _p.filter_form(data) == 'filter_date' and _(
'Dates Filter') or _('Periods Filter'), None,
cell_style_center),
('pf', 1, 0, 'text', _('Partners Filter'),
None, cell_style_center),
('tm', 1, 0, 'text', _('Target Moves'), None, cell_style_center),
('ib', 1, 0, 'text', _('Initial Balance'),
None, cell_style_center),
('coa', 1, 0, 'text', _('Chart of Account'),
None, cell_style_center),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_header_data(self, ws, _p, data, row_position, xlwt, _xs,
initial_balance_text):
cell_format = _xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style = xlwt.easyxf(cell_format)
cell_style_center = xlwt.easyxf(cell_format + _xs['center'])
c_specs = [
('fy', 1, 0, 'text', _p.fiscalyear.name if _p.fiscalyear else '-',
None, cell_style_center),
('af', 1, 0, 'text', _p.accounts(data) and ', '.join(
[account.code for account in _p.accounts(data)]) or _('All'),
None, cell_style_center),
]
df = _('From') + ': '
if _p.filter_form(data) == 'filter_date':
df += _p.start_date if _p.start_date else u''
else:
df += _p.start_period.name if _p.start_period else u''
df += ' ' + _('\nTo') + ': '
if _p.filter_form(data) == 'filter_date':
df += _p.stop_date if _p.stop_date else u''
else:
df += _p.stop_period.name if _p.stop_period else u''
c_specs += [
('df', 1, 0, 'text', df, None, cell_style_center),
('tm', 1, 0, 'text', _p.display_partner_account(
data), None, cell_style_center),
('pf', 1, 0, 'text', _p.display_target_move(
data), None, cell_style_center),
('ib', 1, 0, 'text', initial_balance_text[
_p.initial_balance_mode], None, cell_style_center),
('coa', 1, 0, 'text', _p.chart_account.name,
None, cell_style_center),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_comparison_header(self, _xs, xlwt, row_position, _p, ws,
initial_balance_text):
cell_format_ct = _xs['bold'] + _xs['fill_blue'] + _xs['borders_all']
cell_style_ct = xlwt.easyxf(cell_format_ct)
c_specs = [('ct', 7, 0, 'text', _('Comparisons'))]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style_ct)
cell_format = _xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style_center = xlwt.easyxf(cell_format)
for index, params in enumerate(_p.comp_params):
c_specs = [
('c', 2, 0, 'text', _('Comparison') + str(index + 1) +
' (C' + str(index + 1) + ')')]
if params['comparison_filter'] == 'filter_date':
c_specs += [('f', 2, 0, 'text', _('Dates Filter') + ': ' +
_p.formatLang(params['start'], date=True) + ' - '
+ _p.formatLang(params['stop'], date=True))]
elif params['comparison_filter'] == 'filter_period':
c_specs += [('f', 2, 0, 'text', _('Periods Filter') +
': ' + params['start'].name + ' - ' +
params['stop'].name)]
else:
c_specs += [('f', 2, 0, 'text', _('Fiscal Year') +
': ' + params['fiscalyear'].name)]
c_specs += [('ib', 2, 0, 'text', _('Initial Balance') +
': ' +
initial_balance_text[params['initial_balance_mode']])]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style_center)
return row_position
def print_account_header(self, ws, _p, _xs, xlwt, row_position):
cell_format = _xs['bold'] + _xs['fill'] + \
_xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style = xlwt.easyxf(cell_format)
cell_style_right = xlwt.easyxf(cell_format + _xs['right'])
cell_style_center = xlwt.easyxf(cell_format + _xs['center'])
if len(_p.comp_params) == 2:
account_span = 3
else:
account_span = _p.initial_balance_mode and 2 or 3
c_specs = [
('account', account_span, 0, 'text', _('Account / Partner Name')),
('code', 1, 0, 'text', _('Code / Ref')),
]
if _p.comparison_mode == 'no_comparison':
if _p.initial_balance_mode:
c_specs += [('init_bal', 1, 0, 'text',
_('Initial Balance'), None, cell_style_right)]
c_specs += [
('debit', 1, 0, 'text', _('Debit'), None, cell_style_right),
('credit', 1, 0, 'text', _('Credit'), None, cell_style_right),
]
if _p.comparison_mode == 'no_comparison' or not _p.fiscalyear:
c_specs += [('balance', 1, 0, 'text',
_('Balance'), None, cell_style_right)]
else:
c_specs += [('balance_fy', 1, 0, 'text', _('Balance %s') %
_p.fiscalyear.name, None, cell_style_right)]
if _p.comparison_mode in ('single', 'multiple'):
for index in range(_p.nb_comparison):
if _p.comp_params[index][
'comparison_filter'] == 'filter_year' \
and _p.comp_params[index].get('fiscalyear', False):
c_specs += [('balance_%s' % index, 1, 0, 'text',
_('Balance %s') %
_p.comp_params[index]['fiscalyear'].name,
None, cell_style_right)]
else:
c_specs += [('balance_%s' % index, 1, 0, 'text',
_('Balance C%s') % (index + 1), None,
cell_style_right)]
if _p.comparison_mode == 'single':
c_specs += [
('diff', 1, 0, 'text', _('Difference'),
None, cell_style_right),
('diff_percent', 1, 0, 'text',
_('% Difference'), None, cell_style_center),
]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, row_style=cell_style)
return row_position
def print_row_code_account(self, ws, current_account, row_position, _xs,
xlwt):
cell_format = _xs['xls_title'] + _xs['bold'] + \
_xs['fill'] + _xs['borders_all']
cell_style = xlwt.easyxf(cell_format)
c_specs = [
('acc_title', 7, 0, 'text', ' - '.join([current_account.code,
current_account.name])), ]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, cell_style)
return row_position
def print_account_totals(self, _xs, xlwt, ws, row_start_account,
row_position, current_account, _p):
cell_format = _xs['bold'] + _xs['fill'] + \
_xs['borders_all'] + _xs['wrap'] + _xs['top']
cell_style = xlwt.easyxf(cell_format)
cell_style_decimal = xlwt.easyxf(
cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
c_specs = [
('acc_title', 2, 0, 'text', current_account.name),
('code', 1, 0, 'text', current_account.code),
]
for column in range(3, 7):
# in case of one single comparison, the column 6 will contain
# percentages
if (_p.comparison_mode == 'single' and column == 6):
total_diff = rowcol_to_cell(row_position, column - 1)
total_balance = rowcol_to_cell(row_position, column - 2)
account_formula = 'Round(' + total_diff + \
'/' + total_balance + '*100;0)'
else:
account_start = rowcol_to_cell(row_start_account, column)
account_end = rowcol_to_cell(row_position - 1, column)
account_formula = 'Round(SUM(' + \
account_start + ':' + account_end + ');2)'
c_specs += [('total%s' % column, 1, 0, 'text', None,
account_formula, None, cell_style_decimal)]
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_position = self.xls_write_row(
ws, row_position, row_data, cell_style)
return row_position + 1
def generate_xls_report(self, _p, _xs, data, objects, wb):
# Initialisations
ws = wb.add_sheet(_p.report_name[:31])
ws.panes_frozen = True
ws.remove_splits = True
ws.portrait = 0 # Landscape
ws.fit_width_to_pages = 1
row_pos = 0
ws.header_str = self.xls_headers['standard']
ws.footer_str = self.xls_footers['standard']
# Print Title
row_pos = self.print_title(ws, _p, row_pos, xlwt, _xs)
# Print empty row to define column sizes
row_pos = self.print_empty_row(ws, row_pos)
# Print Header Table titles (Fiscal Year - Accounts Filter - Periods
# Filter...)
row_pos = self.print_header_titles(ws, _p, data, row_pos, xlwt, _xs)
initial_balance_text = {
'initial_balance': _('Computed'),
'opening_balance': _('Opening Entries'),
False: _('No')} # cf. account_report_partner_balance.mako
# Print Header Table data
row_pos = self.print_header_data(
ws, _p, data, row_pos, xlwt, _xs, initial_balance_text)
# Print comparison header table
if _p.comparison_mode in ('single', 'multiple'):
row_pos += 1
row_pos = self.print_comparison_header(
_xs, xlwt, row_pos, _p, ws, initial_balance_text)
# Freeze the line
ws.set_horz_split_pos(row_pos)
# cell styles for account data
regular_cell_format = _xs['borders_all']
regular_cell_style = xlwt.easyxf(regular_cell_format)
regular_cell_style_decimal = xlwt.easyxf(
regular_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
row_pos += 1
for current_account in objects:
partners_order = _p['partners_order_accounts']\
.get(current_account.id, False)
# do not display accounts without partners
if not partners_order:
continue
comparisons = _p['comparisons_accounts']\
.get(current_account.id, False)
# in multiple columns mode, we do not want to print accounts
# without any rows
if _p.comparison_mode in ('single', 'multiple'):
all_comparison_lines = [comp['partners_amounts'][partner_id[1]]
for partner_id in partners_order
for comp in comparisons]
if not display_line(all_comparison_lines):
continue
current_partner_amounts = _p['partners_amounts_accounts']\
.get(current_account.id, False)
if _p.comparison_mode in ('single', 'multiple'):
comparison_total = {}
for i, comp in enumerate(comparisons):
comparison_total[i] = {'balance': 0.0}
# print row: Code - Account name
row_pos = self.print_row_code_account(
ws, current_account, row_pos, _xs, xlwt)
row_account_start = row_pos
# Print row: Titles "Account/Partner Name-Code/ref-Initial
# Balance-Debit-Credit-Balance" or "Account/Partner
# Name-Code/ref-Balance Year-Balance Year2-Balance C2-Balance C3"
row_pos = self.print_account_header(ws, _p, _xs, xlwt, row_pos)
for (partner_code_name, partner_id, partner_ref, partner_name) \
in partners_order:
partner = current_partner_amounts.get(partner_id, {})
# in single mode, we have to display all the partners even if
# their balance is 0.0 because the initial balance should match
# with the previous year closings
# in multiple columns mode, we do not want to print partners
# which have a balance at 0.0 in each comparison column
if _p.comparison_mode in ('single', 'multiple'):
all_comparison_lines = [comp['partners_amounts']
[partner_id]
for comp in comparisons
if comp['partners_amounts'].
get(partner_id)]
if not display_line(all_comparison_lines):
continue
# display data row
if len(_p.comp_params) == 2:
account_span = 3
else:
account_span = _p.initial_balance_mode and 2 or 3
c_specs = [('acc_title', account_span, 0, 'text',
partner_name if partner_name else
_('Unallocated'))]
c_specs += [('partner_ref', 1, 0, 'text',
partner_ref if partner_ref else '')]
if _p.comparison_mode == 'no_comparison':
bal_formula = ''
if _p.initial_balance_mode:
init_bal_cell = rowcol_to_cell(row_pos, 3)
bal_formula = init_bal_cell + '+'
debit_col = 4
c_specs += [
('init_bal', 1, 0, 'number', partner.get(
'init_balance', 0.0), None,
regular_cell_style_decimal),
]
else:
debit_col = 3
c_specs += [
('debit', 1, 0, 'number', partner.get('debit', 0.0),
None, regular_cell_style_decimal),
('credit', 1, 0, 'number', partner.get('credit', 0.0),
None, regular_cell_style_decimal),
]
debit_cell = rowcol_to_cell(row_pos, debit_col)
credit_cell = rowcol_to_cell(row_pos, debit_col + 1)
bal_formula += debit_cell + '-' + credit_cell
c_specs += [('bal', 1, 0, 'number', None,
bal_formula, regular_cell_style_decimal), ]
else:
c_specs += [('bal', 1, 0, 'number', partner.get('balance',
0.0),
None, regular_cell_style_decimal), ]
if _p.comparison_mode in ('single', 'multiple'):
for i, comp in enumerate(comparisons):
comp_partners = comp['partners_amounts']
balance = diff = percent_diff = 0
if comp_partners.get(partner_id):
balance = comp_partners[partner_id]['balance']
diff = comp_partners[partner_id]['diff']
percent_diff = comp_partners[
partner_id]['percent_diff']
comparison_total[i]['balance'] += balance
c_specs += [('balance_%s' % i, 1, 0, 'number',
balance, None,
regular_cell_style_decimal), ]
# no diff in multiple comparisons because it shows too much
# data
if _p.comparison_mode == 'single':
c_specs += [('balance_diff', 1, 0, 'number',
diff, None, regular_cell_style_decimal), ]
if percent_diff is False:
c_specs += [('balance', 1, 0, 'number',
diff, None, regular_cell_style_decimal), ]
else:
c_specs += [('perc_diff', 1, 0, 'number',
int(round(percent_diff))), ]
row_data = self.xls_row_template(
c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(
ws, row_pos, row_data, regular_cell_style)
row_pos = self.print_account_totals(
_xs, xlwt, ws, row_account_start, row_pos, current_account, _p)
partners_balance_xls('report.account.account_report_partner_balance_xls',
'account.account',
parser=PartnerBalanceWebkit)
| agpl-3.0 |
jgillis/casadi | examples/python/simple_nlp.py | 1 | 1773 | #
# This file is part of CasADi.
#
# CasADi -- A symbolic framework for dynamic optimization.
# Copyright (C) 2010 by Joel Andersson, Moritz Diehl, K.U.Leuven. All rights reserved.
#
# CasADi is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# CasADi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with CasADi; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
#
# -*- coding: utf-8 -*-
from casadi import *
# Declare variables
x = ssym("x",2)
# Form the NLP objective
f = SXFunction([x],[x[0]**2 + x[1]**2])
# Form the NLP constraints
g = SXFunction([x],[x[0]+x[1]-10])
# Pick an NLP solver
MySolver = IpoptSolver
#MySolver = WorhpSolver
#MySolver = SQPMethod
# Allocate a solver
solver = MySolver(f,g)
if MySolver==SQPMethod:
solver.setOption("qp_solver",QPOasesSolver)
solver.setOption("qp_solver_options",{"printLevel":"none"})
solver.init()
# Set constraint bounds
solver.setInput(0.,"lbg")
# Solve the NLP
solver.evaluate()
# Print solution
print "-----"
print "objective at solution = ", solver.output("f")
print "primal solution = ", solver.output("x")
print "dual solution (x) = ", solver.output("lam_x")
print "dual solution (g) = ", solver.output("lam_g")
| lgpl-3.0 |
acecheese/CloudBot | plugins/youtube.py | 16 | 5563 | import re
import time
import isodate
import requests
from cloudbot import hook
from cloudbot.util import timeformat
from cloudbot.util.formatting import pluralize
youtube_re = re.compile(r'(?:youtube.*?(?:v=|/v/)|youtu\.be/|yooouuutuuube.*?id=)([-_a-zA-Z0-9]+)', re.I)
base_url = 'https://www.googleapis.com/youtube/v3/'
api_url = base_url + 'videos?part=contentDetails%2C+snippet%2C+statistics&id={}&key={}'
search_api_url = base_url + 'search?part=id&maxResults=1'
playlist_api_url = base_url + 'playlists?part=snippet%2CcontentDetails%2Cstatus'
video_url = "http://youtu.be/%s"
err_no_api = "The YouTube API is off in the Google Developers Console."
def get_video_description(video_id):
json = requests.get(api_url.format(video_id, dev_key)).json()
if json.get('error'):
if json['error']['code'] == 403:
return err_no_api
else:
return
data = json['items']
snippet = data[0]['snippet']
statistics = data[0]['statistics']
content_details = data[0]['contentDetails']
out = '\x02{}\x02'.format(snippet['title'])
if not content_details.get('duration'):
return out
length = isodate.parse_duration(content_details['duration'])
out += ' - length \x02{}\x02'.format(timeformat.format_time(int(length.total_seconds()), simple=True))
total_votes = float(statistics['likeCount']) + float(statistics['dislikeCount'])
if total_votes != 0:
# format
likes = pluralize(int(statistics['likeCount']), "like")
dislikes = pluralize(int(statistics['dislikeCount']), "dislike")
percent = 100 * float(statistics['likeCount']) / total_votes
out += ' - {}, {} (\x02{:.1f}\x02%)'.format(likes,
dislikes, percent)
if 'viewCount' in statistics:
views = int(statistics['viewCount'])
out += ' - \x02{:,}\x02 view{}'.format(views, "s"[views == 1:])
uploader = snippet['channelTitle']
upload_time = time.strptime(snippet['publishedAt'], "%Y-%m-%dT%H:%M:%S.000Z")
out += ' - \x02{}\x02 on \x02{}\x02'.format(uploader,
time.strftime("%Y.%m.%d", upload_time))
if 'contentRating' in content_details:
out += ' - \x034NSFW\x02'
return out
@hook.on_start()
def load_key(bot):
global dev_key
dev_key = bot.config.get("api_keys", {}).get("google_dev_key", None)
@hook.regex(youtube_re)
def youtube_url(match):
return get_video_description(match.group(1))
@hook.command("youtube", "you", "yt", "y")
def youtube(text):
"""youtube <query> -- Returns the first YouTube search result for <query>."""
if not dev_key:
return "This command requires a Google Developers Console API key."
json = requests.get(search_api_url, params={"q": text, "key": dev_key, "type": "video"}).json()
if json.get('error'):
if json['error']['code'] == 403:
return err_no_api
else:
return 'Error performing search.'
if json['pageInfo']['totalResults'] == 0:
return 'No results found.'
video_id = json['items'][0]['id']['videoId']
return get_video_description(video_id) + " - " + video_url % video_id
@hook.command("youtime", "ytime")
def youtime(text):
"""youtime <query> -- Gets the total run time of the first YouTube search result for <query>."""
if not dev_key:
return "This command requires a Google Developers Console API key."
json = requests.get(search_api_url, params={"q": text, "key": dev_key, "type": "video"}).json()
if json.get('error'):
if json['error']['code'] == 403:
return err_no_api
else:
return 'Error performing search.'
if json['pageInfo']['totalResults'] == 0:
return 'No results found.'
video_id = json['items'][0]['id']['videoId']
json = requests.get(api_url.format(video_id, dev_key)).json()
if json.get('error'):
return
data = json['items']
snippet = data[0]['snippet']
content_details = data[0]['contentDetails']
statistics = data[0]['statistics']
if not content_details.get('duration'):
return
length = isodate.parse_duration(content_details['duration'])
l_sec = int(length.total_seconds())
views = int(statistics['viewCount'])
total = int(l_sec * views)
length_text = timeformat.format_time(l_sec, simple=True)
total_text = timeformat.format_time(total, accuracy=8)
return 'The video \x02{}\x02 has a length of {} and has been viewed {:,} times for ' \
'a total run time of {}!'.format(snippet['title'], length_text, views,
total_text)
ytpl_re = re.compile(r'(.*:)//(www.youtube.com/playlist|youtube.com/playlist)(:[0-9]+)?(.*)', re.I)
@hook.regex(ytpl_re)
def ytplaylist_url(match):
location = match.group(4).split("=")[-1]
json = requests.get(playlist_api_url, params={"id": location, "key": dev_key}).json()
if json.get('error'):
if json['error']['code'] == 403:
return err_no_api
else:
return 'Error looking up playlist.'
data = json['items']
snippet = data[0]['snippet']
content_details = data[0]['contentDetails']
title = snippet['title']
author = snippet['channelTitle']
num_videos = int(content_details['itemCount'])
count_videos = ' - \x02{:,}\x02 video{}'.format(num_videos, "s"[num_videos == 1:])
return "\x02{}\x02 {} - \x02{}\x02".format(title, count_videos, author)
| gpl-3.0 |
laperry1/android_external_chromium_org | tools/profile_chrome/chrome_controller_unittest.py | 76 | 1395 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import json
from profile_chrome import chrome_controller
from profile_chrome import controllers_unittest
class ChromeControllerTest(controllers_unittest.BaseControllerTest):
def testGetCategories(self):
# Not supported on stable yet.
# TODO(skyostil): Remove this once category queries roll into stable.
if self.browser == 'stable':
return
categories = \
chrome_controller.ChromeTracingController.GetCategories(
self.device, self.package_info)
self.assertEquals(len(categories), 2)
self.assertTrue(categories[0])
self.assertTrue(categories[1])
def testTracing(self):
categories = '*'
ring_buffer = False
controller = chrome_controller.ChromeTracingController(self.device,
self.package_info,
categories,
ring_buffer)
interval = 1
try:
controller.StartTracing(interval)
finally:
controller.StopTracing()
result = controller.PullTrace()
try:
with open(result) as f:
json.loads(f.read())
finally:
os.remove(result)
| bsd-3-clause |
jnvsor/apgtk | subcommand.py | 1 | 3790 | import subprocess
from errorwindow import ErrorDialogue
class ModeError(Exception):
pass
class ProcessStderr(ChildProcessError):
def __init__(self, value):
self.value = value
class CommandExecution:
def __init__(self, binder):
self.input = binder
def execute(self):
try:
self.command = [str(i) for i in self.build()]
proc = subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.out, err = proc.communicate()
if(proc.returncode):
raise ProcessStderr(err)
except ModeError:
ErrorDialogue( "All character types are set to disabled.",
"Can't create a password without characters!")
except ProcessStderr as e:
ErrorDialogue( "An unforseen error occurred in the APG subprocess.",
"stderr output:\n" + e.value.decode())
except FileNotFoundError:
ErrorDialogue( "APG is not installed!","Please install apg through your "
"package manager.")
except Exception as e:
ErrorDialogue( "An unforseen error occurred.",
str(type(e).__name__) + ": " + str(e))
else:
return True
def as_list(self):
passwords = []
for line in self.out.decode("utf-8").split("\n"):
line = line.split()
if(line == []):
continue
linedict = {"Password": line.pop(0)}
if("-t" in self.command):
linedict["Pronunciation"] = line.pop(0)[1:-1]
if("-y" in self.command):
linedict["Crypt"] = line.pop(0)
if("-l" in self.command):
linedict["Phonetics"] = line.pop(0)
passwords.append(linedict)
return passwords
def build(self):
command = ["apg"]
amount = ["-n", int(self.input["amount"].get_value())]
min = ["-m", self.input["length"].get_min()]
max = ["-x", self.input["length"].get_max()]
seed, exclude, dictionary, mode, crypt, phone, pronouncedisplay = ([] for x in range(7))
if(self.input["seed"].get_enabled()):
seed = ["-c", self.input["seed"].get_value()]
if(self.input["exclude"].get_enabled()):
exclude = ["-E", self.input["exclude"].get_value()]
if(self.input["dictionary"].get_enabled()):
dictionary = ["-r", self.input["dictionary"].get_value()]
for index, key in enumerate(sorted(self.input["mode"].widgets.keys())):
if(self.input["mode"].widgets[key].get_value() == 1):
mode += chr(ord(key)+32)
elif(self.input["mode"].widgets[key].get_value() == 2):
mode += key
if(mode == []):
if self.input["algorithm"].get_value():
raise ModeError
""" Pronunciation mode ignores mode setting anyway, but will balk at
an empty mode setting, so we default but throw exception for random mode """
mode = ["c", "l", "n", "s"]
mode = ["-M", "".join(mode)]
algorithm = ["-a", self.input["algorithm"].get_value()]
if not self.input["algorithm"].get_value():
pronouncedisplay = ["-t"]
if(self.input["crypt"].get_active()):
crypt = ["-y"]
if(self.input["phone"].get_active()):
phone = ["-l"]
return command+algorithm+mode+exclude+amount+min+max+\
dictionary+seed+crypt+phone+pronouncedisplay
| gpl-3.0 |
keedio/hue | desktop/core/ext-py/MySQL-python-1.2.5/MySQLdb/__init__.py | 76 | 3229 | """MySQLdb - A DB API v2.0 compatible interface to MySQL.
This package is a wrapper around _mysql, which mostly implements the
MySQL C API.
connect() -- connects to server
See the C API specification and the MySQL documentation for more info
on other items.
For information on how MySQLdb handles type conversion, see the
MySQLdb.converters module.
"""
__revision__ = """$Revision$"""[11:-2]
from MySQLdb.release import __version__, version_info, __author__
import _mysql
if version_info != _mysql.version_info:
raise ImportError("this is MySQLdb version %s, but _mysql is version %r" %
(version_info, _mysql.version_info))
threadsafety = 1
apilevel = "2.0"
paramstyle = "format"
from _mysql import *
from MySQLdb.constants import FIELD_TYPE
from MySQLdb.times import Date, Time, Timestamp, \
DateFromTicks, TimeFromTicks, TimestampFromTicks
try:
frozenset
except NameError:
from sets import ImmutableSet as frozenset
class DBAPISet(frozenset):
"""A special type of set for which A == x is true if A is a
DBAPISet and x is a member of that set."""
def __eq__(self, other):
if isinstance(other, DBAPISet):
return not self.difference(other)
return other in self
STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING,
FIELD_TYPE.VAR_STRING])
BINARY = DBAPISet([FIELD_TYPE.BLOB, FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.MEDIUM_BLOB, FIELD_TYPE.TINY_BLOB])
NUMBER = DBAPISet([FIELD_TYPE.DECIMAL, FIELD_TYPE.DOUBLE, FIELD_TYPE.FLOAT,
FIELD_TYPE.INT24, FIELD_TYPE.LONG, FIELD_TYPE.LONGLONG,
FIELD_TYPE.TINY, FIELD_TYPE.YEAR])
DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE])
TIME = DBAPISet([FIELD_TYPE.TIME])
TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
DATETIME = TIMESTAMP
ROWID = DBAPISet()
def test_DBAPISet_set_equality():
assert STRING == STRING
def test_DBAPISet_set_inequality():
assert STRING != NUMBER
def test_DBAPISet_set_equality_membership():
assert FIELD_TYPE.VAR_STRING == STRING
def test_DBAPISet_set_inequality_membership():
assert FIELD_TYPE.DATE != STRING
def Binary(x):
return str(x)
def Connect(*args, **kwargs):
"""Factory function for connections.Connection."""
from MySQLdb.connections import Connection
return Connection(*args, **kwargs)
connect = Connection = Connect
__all__ = [ 'BINARY', 'Binary', 'Connect', 'Connection', 'DATE',
'Date', 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks',
'TimestampFromTicks', 'DataError', 'DatabaseError', 'Error',
'FIELD_TYPE', 'IntegrityError', 'InterfaceError', 'InternalError',
'MySQLError', 'NULL', 'NUMBER', 'NotSupportedError', 'DBAPISet',
'OperationalError', 'ProgrammingError', 'ROWID', 'STRING', 'TIME',
'TIMESTAMP', 'Warning', 'apilevel', 'connect', 'connections',
'constants', 'converters', 'cursors', 'debug', 'escape', 'escape_dict',
'escape_sequence', 'escape_string', 'get_client_info',
'paramstyle', 'string_literal', 'threadsafety', 'version_info']
| apache-2.0 |
newville/scikit-image | doc/examples/plot_rag.py | 25 | 2139 | """
=======================
Region Adjacency Graphs
=======================
This example demonstrates the use of the `merge_nodes` function of a Region
Adjacency Graph (RAG). The `RAG` class represents a undirected weighted graph
which inherits from `networkx.graph` class. When a new node is formed by
merging two nodes, the edge weight of all the edges incident on the resulting
node can be updated by a user defined function `weight_func`.
The default behaviour is to use the smaller edge weight in case of a conflict.
The example below also shows how to use a custom function to select the larger
weight instead.
"""
from skimage.future.graph import rag
import networkx as nx
from matplotlib import pyplot as plt
import numpy as np
def max_edge(g, src, dst, n):
"""Callback to handle merging nodes by choosing maximum weight.
Returns either the weight between (`src`, `n`) or (`dst`, `n`)
in `g` or the maximum of the two when both exist.
Parameters
----------
g : RAG
The graph under consideration.
src, dst : int
The vertices in `g` to be merged.
n : int
A neighbor of `src` or `dst` or both.
Returns
-------
weight : float
The weight between (`src`, `n`) or (`dst`, `n`) in `g` or the
maximum of the two when both exist.
"""
w1 = g[n].get(src, {'weight': -np.inf})['weight']
w2 = g[n].get(dst, {'weight': -np.inf})['weight']
return max(w1, w2)
def display(g, title):
"""Displays a graph with the given title."""
pos = nx.circular_layout(g)
plt.figure()
plt.title(title)
nx.draw(g, pos)
nx.draw_networkx_edge_labels(g, pos, font_size=20)
g = rag.RAG()
g.add_edge(1, 2, weight=10)
g.add_edge(2, 3, weight=20)
g.add_edge(3, 4, weight=30)
g.add_edge(4, 1, weight=40)
g.add_edge(1, 3, weight=50)
# Assigning dummy labels.
for n in g.nodes():
g.node[n]['labels'] = [n]
gc = g.copy()
display(g, "Original Graph")
g.merge_nodes(1, 3)
display(g, "Merged with default (min)")
gc.merge_nodes(1, 3, weight_func=max_edge, in_place=False)
display(gc, "Merged with max without in_place")
plt.show()
| bsd-3-clause |
astrofrog/ginga | ginga/gtkw/plugins/Pan.py | 1 | 10531 | #
# Pan.py -- Pan plugin for fits viewer
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import math
import gtk
from ginga.misc import Bunch
from ginga.gtkw import FitsImageCanvasGtk
from ginga.gtkw import FitsImageCanvasTypesGtk as CanvasTypes
from ginga import GingaPlugin
class Pan(GingaPlugin.GlobalPlugin):
def __init__(self, fv):
# superclass defines some variables for us, like logger
super(Pan, self).__init__(fv)
self.channel = {}
self.active = None
self.info = None
fv.add_callback('add-channel', self.add_channel)
fv.add_callback('delete-channel', self.delete_channel)
fv.set_callback('active-image', self.focus_cb)
def build_gui(self, container):
nb = gtk.Notebook()
nb.set_group_id(-30)
nb.set_tab_pos(gtk.POS_BOTTOM)
nb.set_scrollable(False)
nb.set_show_tabs(False)
nb.set_show_border(False)
nb.show()
self.nb = nb
container.pack_start(self.nb, fill=True, expand=True)
def _create_pan_image(self):
width, height = 300, 300
# Uncomment to debug; passing parent logger generates too
# much noise in the main logger
#sfi = FitsImageCanvasGtk.FitsImageCanvas(logger=self.logger)
sfi = FitsImageCanvasGtk.FitsImageCanvas(logger=None)
sfi.enable_autozoom('on')
sfi.enable_pan(False)
#sfi.set_zoom_algorithm('rate')
sfi.enable_zoom(False)
sfi.enable_autocuts('off')
sfi.enable_draw(True)
sfi.set_drawtype('rectangle', linestyle='dash')
sfi.set_drawcolor('green')
sfi.set_callback('draw-event', self.draw_cb)
hand = sfi.get_cursor('pan')
sfi.define_cursor('pick', hand)
## sfi.enable_cuts(False)
sfi.set_bg(0.4, 0.4, 0.4)
sfi.set_callback('button-press', self.btndown)
sfi.set_callback('motion', self.panxy)
sfi.set_callback('scroll', self.zoom)
sfi.set_callback('configure', self.reconfigure)
iw = sfi.get_widget()
iw.set_size_request(width, height)
iw.show()
return sfi
def add_channel(self, viewer, chinfo):
panimage = self._create_pan_image()
chname = chinfo.name
iw = panimage.get_widget()
iw.show()
self.nb.append_page(iw, gtk.Label(chname))
index = self.nb.page_num(iw)
paninfo = Bunch.Bunch(panimage=panimage, widget=iw,
pancompass=None, panrect=None,
nbindex=index)
self.channel[chname] = paninfo
# Extract RGBMap object from main image and attach it to this
# pan image
fitsimage = chinfo.fitsimage
rgbmap = fitsimage.get_rgbmap()
panimage.set_rgbmap(rgbmap, redraw=False)
rgbmap.add_callback('changed', self.rgbmap_cb, panimage)
fitsimage.copy_attributes(panimage, ['cutlevels'])
fitsimage.add_callback('image-set', self.new_image_cb, chinfo, paninfo)
fitsimage.add_callback('pan-set', self.panset, chinfo, paninfo)
fitssettings = fitsimage.get_settings()
pansettings = panimage.get_settings()
zoomsettings = ['zoom_algorithm', 'zoom_rate', 'scale_x_base', 'scale_y_base']
fitssettings.shareSettings(pansettings, zoomsettings)
for key in zoomsettings:
pansettings.getSetting(key).add_callback('set', self.zoom_cb,
fitsimage, chinfo, paninfo)
xfrmsettings = ['flip_x', 'flip_y', 'swap_xy', 'locut', 'hicut']
fitssettings.shareSettings(pansettings, xfrmsettings)
for key in xfrmsettings:
pansettings.getSetting(key).add_callback('set', self.redraw_cb,
fitsimage, chinfo, paninfo, 0.5)
fitssettings.shareSettings(pansettings, ['rot_deg'])
pansettings.getSetting('rot_deg').add_callback('set', self.redraw_cb,
fitsimage, chinfo, paninfo, 0)
self.logger.debug("channel %s added." % (chinfo.name))
def delete_channel(self, viewer, chinfo):
self.logger.debug("TODO: delete channel %s" % (chinfo.name))
# CALLBACKS
def rgbmap_cb(self, rgbmap, panimage):
# color mapping has changed in some way
panimage.redraw(whence=1)
def new_image_cb(self, fitsimage, image, chinfo, paninfo):
loval, hival = fitsimage.get_cut_levels()
paninfo.panimage.cut_levels(loval, hival, redraw=False)
self.set_image(chinfo, paninfo, image)
def focus_cb(self, viewer, fitsimage):
chname = self.fv.get_channelName(fitsimage)
chinfo = self.fv.get_channelInfo(chname)
chname = chinfo.name
if self.active != chname:
index = self.channel[chname].nbindex
self.nb.set_current_page(index)
self.active = chname
self.info = self.channel[self.active]
def reconfigure(self, fitsimage, width, height):
self.logger.debug("new pan image dimensions are %dx%d" % (
width, height))
fitsimage.zoom_fit()
def redraw_cb(self, setting, value, deg, chinfo, paninfo, whence):
paninfo.panimage.redraw(whence=whence)
self.panset(chinfo.fitsimage, chinfo, paninfo)
return True
def zoom_cb(self, setting, value, fitsimage, chinfo, paninfo):
# refit the pan image, because scale factors may have changed
paninfo.panimage.zoom_fit(redraw=True)
# redraw pan info
self.panset(fitsimage, chinfo, paninfo)
return True
# LOGIC
def clear(self):
self.info.panimage.clear()
def set_image(self, chinfo, paninfo, image):
paninfo.panimage.set_image(image)
# remove old compass
try:
paninfo.panimage.deleteObjectByTag(paninfo.pancompass,
redraw=False)
except Exception:
pass
# create compass
try:
(x, y, xn, yn, xe, ye) = image.calc_compass_center()
self.logger.debug("x=%d y=%d xn=%d yn=%d xe=%d ye=%d" % (
x, y, xn, yn, xe, ye))
paninfo.pancompass = paninfo.panimage.add(CanvasTypes.Compass(
x, y, xn, yn, xe, ye, color='skyblue',
fontsize=14), redraw=True)
except Exception, e:
self.logger.warn("Can't calculate compass: %s" % (
str(e)))
self.panset(chinfo.fitsimage, chinfo, paninfo)
def panset(self, fitsimage, chinfo, paninfo):
x, y = fitsimage.get_pan()
points = fitsimage.get_pan_rect()
# calculate pan position point radius
image = paninfo.panimage.get_image()
width, height = image.get_size()
edgew = math.sqrt(width**2 + height**2)
radius = int(0.015 * edgew)
# Mark pan rectangle and pan position
try:
obj = paninfo.panimage.getObjectByTag(paninfo.panrect)
if obj.kind != 'compound':
return True
point, bbox = obj.objects
self.logger.debug("starting panset")
point.x, point.y = x, y
point.radius = radius
bbox.points = points
paninfo.panimage.redraw(whence=3)
except KeyError:
paninfo.panrect = paninfo.panimage.add(CanvasTypes.CompoundObject(
CanvasTypes.Point(x, y, radius=radius),
CanvasTypes.Polygon(points)))
def panxy(self, fitsimage, button, data_x, data_y):
"""Motion event in the small fits window. This is usually a panning
control for the big window, but if the button is not held down then
we just show the pointing information as usual.
"""
if button == 0:
bigimage = self.fv.getfocus_fitsimage()
return self.fv.showxy(bigimage, data_x, data_y)
elif button & 0x1:
# If button1 is held down this is a panning move in the small
# window for the big window
bigimage = self.fv.getfocus_fitsimage()
return bigimage.panset_xy(data_x, data_y)
return False
def btndown(self, fitsimage, button, data_x, data_y):
bigimage = self.fv.getfocus_fitsimage()
if button == 0x1:
bigimage.panset_xy(data_x, data_y)
elif button == 0x21:
bigimage.panset_xy(data_x, data_y, redraw=False)
def zoom(self, fitsimage, direction):
"""Scroll event in the small fits window. Just zoom the large fits
window.
"""
fitsimage = self.fv.getfocus_fitsimage()
if direction == 'up':
fitsimage.zoom_in()
elif direction == 'down':
fitsimage.zoom_out()
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=1.0)
def draw_cb(self, fitsimage, tag):
# Get and delete the drawn object
obj = fitsimage.getObjectByTag(tag)
fitsimage.deleteObjectByTag(tag, redraw=True)
# determine center of drawn rectangle and set pan position
if obj.kind != 'rectangle':
return True
xc = (obj.x1 + obj.x2) / 2.0
yc = (obj.y1 + obj.y2) / 2.0
fitsimage = self.fv.getfocus_fitsimage()
# note: fitsimage <-- referring to large non-pan image
fitsimage.panset_xy(xc, yc, redraw=False)
# Determine appropriate zoom level to fit this rect
wd = obj.x2 - obj.x1
ht = obj.y2 - obj.y1
wwidth, wheight = fitsimage.get_window_size()
wd_scale = float(wwidth) / float(wd)
ht_scale = float(wheight) / float(ht)
scale = min(wd_scale, ht_scale)
self.logger.debug("wd_scale=%f ht_scale=%f scale=%f" % (
wd_scale, ht_scale, scale))
if scale < 1.0:
zoomlevel = - max(2, int(math.ceil(1.0/scale)))
else:
zoomlevel = max(1, int(math.floor(scale)))
self.logger.debug("zoomlevel=%d" % (zoomlevel))
fitsimage.zoom_to(zoomlevel, redraw=True)
def __str__(self):
return 'pan'
#END
| bsd-3-clause |
dankilman/pysource | docs/conf.py | 1 | 8198 | # -*- coding: utf-8 -*-
#
# pysource documentation build configuration file, created by
# sphinx-quickstart on Mon May 19 02:06:17 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pysource'
copyright = u'2014, Dan Kilman'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.3'
# The full version, including alpha/beta/rc tags.
release = '0.1.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pysourcedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pysource.tex', u'pysource Documentation',
u'Dan Kilman', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pysource', u'pysource Documentation',
[u'Dan Kilman'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pysource', u'pysource Documentation',
u'Dan Kilman', 'pysource', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| apache-2.0 |
ShyamSS-95/Bolt | bolt/lib/nonlinear/tests/test_convert.py | 1 | 2702 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
The code needs to handle uptil 5D phase space. However, ArrayFire
sets a hard limit on the dimensionality of the array to be 4.
To surpass this, we internally use 2 different forms in which the array
can be stored:
- q_expanded - shape:(Nq1, Nq2, Np1 * Np2 * Np3)
- p_expanded - shape:(Nq1 * Nq2, Np1, Np2, Np3)
This file contains the test functions that are used to convert the arrays
from one from to another and viceversa
"""
import numpy as np
import arrayfire as af
from petsc4py import PETSc
from bolt.lib.nonlinear_solver.nonlinear_solver import nonlinear_solver
convert_to_p_expanded = nonlinear_solver._convert_to_p_expanded
convert_to_q_expanded = nonlinear_solver._convert_to_q_expanded
class test(object):
def __init__(self):
self.q1_start = np.random.randint(0, 5)
self.q2_start = np.random.randint(0, 5)
self.q1_end = np.random.randint(5, 10)
self.q2_end = np.random.randint(5, 10)
self.N_q1 = np.random.randint(16, 32)
self.N_q2 = np.random.randint(16, 32)
self.N_ghost = np.random.randint(1, 5)
self.N_p1 = np.random.randint(16, 32)
self.N_p2 = np.random.randint(16, 32)
self.N_p3 = np.random.randint(16, 32)
self._da_f = PETSc.DMDA().create([self.N_q1, self.N_q2],
dof=(self.N_p1 * self.N_p2 * self.N_p3),
stencil_width=self.N_ghost,
)
def test_convert_to_p_expanded():
obj = test()
test_array = af.randu(obj.N_q1 + 2 * obj.N_ghost,
obj.N_q2 + 2 * obj.N_ghost,
obj.N_p1 * obj.N_p2 * obj.N_p3
)
modified = convert_to_p_expanded(obj, test_array)
expected = af.moddims(test_array,
obj.N_p1, obj.N_p2, obj.N_p3,
(obj.N_q1 + 2 * obj.N_ghost) *
(obj.N_q2 + 2 * obj.N_ghost)
)
assert (af.sum(modified - expected) == 0)
def test_convert_to_q_expanded():
obj = test()
test_array = af.randu( (obj.N_q1 + 2 * obj.N_ghost)
* (obj.N_q2 + 2 * obj.N_ghost),
obj.N_p1, obj.N_p2, obj.N_p3
)
modified = convert_to_q_expanded(obj, test_array)
expected = af.moddims(test_array,
obj.N_p1 * obj.N_p2 * obj.N_p3,
(obj.N_q1 + 2 * obj.N_ghost),
(obj.N_q2 + 2 * obj.N_ghost)
)
assert (af.sum(modified - expected) == 0)
| gpl-3.0 |
katakumpo/nicepy | nicepy/decorators/wrap.py | 1 | 1281 | # -*- coding: utf-8 *-*
import inspect
__all__ = ['wrap_decorator']
class wrap_decorator(object):
"""
Writing wrappers for instance methods can be a pain in the ass, so maybe this will grow into
some helper which can deal with all kinds functions, methods, classmethods some day.
For now it's the base for the default_args decorator.
"""
def __init__(self, *args, **kwargs):
self.func = None
self.args = list(args)
self.kwargs = kwargs
self.func_attrs = []
self.attrs = []
def __call__(self, *args, **kwargs):
if not self.func:
self.set_func_and_func_attrs(args[0])
return self
def __get__(self, instance, owner):
self.instance = instance
self.owner = owner
if not self.func:
self.set_func_and_func_attrs(self.args[0])
self.args = self.args[1:]
if not self.attrs:
self.attrs = self.get_attrs()
return self.wrap
def set_func_and_func_attrs(self, func):
self.func = func
self.func_attrs = inspect.getargspec(self.func)[0][1:]
def get_attrs(self):
return self.func_attrs[:]
def wrap(self, *args, **kwargs):
return self.func(self.instance, *args, **kwargs)
| mit |
hoettges/QKan | qkan/config.py | 1 | 13375 | import enum
import json
import logging
import os
import site
import warnings
from pathlib import Path
from typing import Any, Dict
from qkan import enums
log = logging.getLogger("QKan.config")
class ConfigEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
if isinstance(o, enum.Enum):
return o.value
return o.__dict__
class ClassObject:
def __init__(self, **kwargs: Any):
annotation_keys = self.__annotations__.keys()
# Assign values
for key, value in kwargs.items():
if key in annotation_keys:
# Handle classes
if issubclass(self.__annotations__[key], ClassObject) and isinstance(
value, dict
):
# Try to parse class
setattr(self, key, self.__annotations__[key](**value))
continue
# Handle enums, right now only string enums are supported
if issubclass(self.__annotations__[key], enum.Enum) and isinstance(
value, str
):
try:
setattr(self, key, self.__annotations__[key](value))
except ValueError:
# Set default if the config's value does not exist in the enum
setattr(self, key, getattr(self, key))
continue
# Type does not match annotation
if type(value) is not self.__annotations__[key]:
# TODO: Notify user that setting has been reset/removed
if hasattr(self, key):
log.warning(
f"{self.__class__.__name__}: Replaced setting {key} "
f"with default value {getattr(self, key)}, previously "
f"{value} ({type(value)})"
)
setattr(self, key, getattr(self, key))
continue
else:
# Default setting does not exist
raise TypeError(
f"{self.__class__.__name__}: Value of {key} ({value}, "
f"{type(value)}) does not match"
f" annotation's type ({self.__annotations__[key]})."
)
# Silently ignore/preserve setting, even if it doesn't exist
setattr(self, key, value)
# Check if all values are set
kwargs_keys = kwargs.keys()
for key in annotation_keys:
# Set default values so that they appear in __dict__
if not hasattr(self, key) and key not in kwargs_keys:
raise Exception(
f"{self.__class__.__name__}: No default setting was defined "
f"for {key}, please report this to the developers."
)
elif key not in kwargs_keys:
setattr(self, key, getattr(self, key))
def __getitem__(self, key: str) -> Any:
warnings.warn(
f"{self.__class__.__name__}: The dict-like config has been replaced "
f"with a class, use hasattr() instead.",
DeprecationWarning,
2,
)
return getattr(self, key)
def __contains__(self, item: str) -> Any:
warnings.warn(
f"{self.__class__.__name__}: The dict-like config has been replaced "
f"with a class, use hasattr() instead.",
DeprecationWarning,
2,
)
return hasattr(self, item)
def __setattr__(self, key: str, value: Any) -> None:
# Allow setting anything in temporary storage
if hasattr(self, "allow_any"):
super().__setattr__(key, value)
return
# Verify type if in annotation
if key in self.__annotations__ and type(value) is not self.__annotations__[key]:
raise TypeError(
f"{self.__class__.__name__}: Value of {key} ({value}, "
f"{type(value)}) does not match"
f" annotation's type ({self.__annotations__[key]})."
)
elif key not in self.__annotations__:
log.warning(
f"{self.__class__.__name__}: Dropping unknown value/key "
f"combination in config {key}={value} ({type(value)})"
)
return
super().__setattr__(key, value)
def __str__(self) -> str:
sorted_kv = sorted([f"{k}={v}" for k, v in self.__dict__.items()])
return f"<{self.__class__.__name__} {' '.join(sorted_kv)}>"
class TempStorage(ClassObject):
"""
May be used to save anything, will be written to config file
"""
__annotations__: Dict[str, Any] = {}
allow_any = True
class CheckExport(ClassObject):
"""Auswahl der zu exportierenden Datenbestände
Bei 'Aktionen' kann die Methode der Synchronisation ausgewählt werden.
Dabei basieren die Aktion auf einem Abgleich zwischen Quell- und Zieldaten
Achtung: Falls eine Beschränkung auf Teilgebiete aktiviert ist, bezieht
sich die Synchronisation nur auf die gewählten Teilgebiete!
"""
# Tabellen mit Geo-Objekten
schaechte: bool = True
auslaesse: bool = True
speicher: bool = True
haltungen: bool = True
pumpen: bool = True
wehre: bool = True
flaechen: bool = True
einleitdirekt: bool = True
aussengebiete: bool = True
einzugsgebiete: bool = True
tezg: bool = True
# Referenztabellen
abflussparameter: bool = True
bodenklassen: bool = True
rohrprofile: bool = True
# Aktionen
append: bool = True # Daten hinzufügen
update: bool = False # Daten ändern
synch: bool = False # Daten löschen
class CheckImport(ClassObject):
"""Auswahl der zu importierenden Datenbestände
Bei 'Aktionen' kann die Methode der Synchronisation ausgewählt werden.
Dabei basieren die Aktion auf einem Abgleich zwischen Quell- und Zieldaten
Achtung: Falls eine Beschränkung auf Teilgebiete aktiviert ist, bezieht
sich die Synchronisation nur auf die gewählten Teilgebiete!
"""
# Tabellen mit Geo-Objekten
schaechte: bool = True
auslaesse: bool = True
speicher: bool = True
haltungen: bool = True
pumpen: bool = True
wehre: bool = True
flaechen: bool = True
einleitdirekt: bool = True
aussengebiete: bool = True
einzugsgebiete: bool = True
# Haltungsflächen aus GIPS, drei Typen in einer Tabelle
tezg_ef: bool = True
tezg_hf: bool = True
tezg_tf: bool = True
# Referenztabellen
abflussparameter: bool = True
bodenklassen: bool = True
rohrprofile: bool = True
# Aktionen
append: bool = True # Daten hinzufügen
update: bool = False # Daten ändern
synch: bool = False # Daten löschen
allrefs: bool = False # Daten aus Referenztabellen: Nicht verwendete Referenzwerte einschließen
class DatabaseConfig(ClassObject):
qkan: str = ""
type: enums.QKanDBChoice = enums.QKanDBChoice.SPATIALITE
class DynaConfig(ClassObject):
autonummerierung: bool = False
bef_choice: enums.BefChoice = enums.BefChoice.FLAECHEN
file: str = ""
prof_choice: enums.ProfChoice = enums.ProfChoice.PROFILNAME
profile_ergaenzen: bool = True
template: str = ""
class SWMMConfig(ClassObject):
autonummerierung: bool = False
# bef_choice: enums.BefChoice = enums.BefChoice.FLAECHEN
file: str = ""
# prof_choice: enums.ProfChoice = enums.ProfChoice.PROFILNAME
profile_ergaenzen: bool = True
template: str = ""
class LinkFlConfig(ClassObject):
# Linkflaechen
auswahltyp: enums.AuswahlTyp = enums.AuswahlTyp.WITHIN
bezug_abstand: enums.BezugAbstand = enums.BezugAbstand.KANTE
bufferradius: float = 0.0
delete_geom_none: bool = True
links_in_tezg: bool = True
suchradius: float = 50.0
class SelectionConfig(ClassObject):
abflussparameter: list = []
flaechen_abflussparam: list = []
hal_entw: list = []
teilgebiete: list = []
class HEConfig(ClassObject):
database: str = ""
database_erg: str = ""
# database_erg_fb: str = ""
# database_fb: str = ""
qml_choice: enums.QmlChoice = enums.QmlChoice.UEBH
qml_file_results: str = ""
template: str = ""
# template_fb: str = ""
class HE8Config(ClassObject):
database: str = "" # QKan-Projektdatenbank
# database_erg: str = "" # ist jetzt: results_file
qml_choice: enums.QmlChoice = enums.QmlChoice.UEBH
qml_file_results: str = ""
template: str = "" # Vorlage für Export-Datenbank
import_file: str = "" # Import-Datenbank *.idbm
export_file: str = "" # Export-Datenbank *.idbm
results_file: str = "" # Ergebnis-Datenbank *.idbr
class MUConfig(ClassObject):
database: str = "" # QKan-Projektdatenbank
# database_erg: str = "" # ist jetzt: export_file
qml_file_results: str = ""
template: str = ""
import_file: str = "" # Import-Datenbank *.sqlite
export_file: str = "" # Export-Datenbank *.sqlite
class ProjectConfig(ClassObject):
file: str = ""
save_file: bool = True
template: str = ""
class AdaptConfig(ClassObject):
add_missing_layers: bool = True
database: bool = True
forms: bool = True
macros: bool = True
kbs: bool = True
qkan_db_update: bool = True
selected_layers: enums.SelectedLayers = enums.SelectedLayers.ALL
table_lookups: bool = True
update_node_type: bool = True
zoom_all: bool = True
class ToolsConfig(ClassObject):
class RunoffParams(ClassObject):
# TODO: Implement user choice of hard-coded and custom functions
itwh: list = [
"0.8693*log(area(geom))+ 5.6317",
"pow(18.904*pow(neigkl,0.686)*area(geom), 0.2535*pow(neigkl,0.244))",
]
dyna: list = [
"0.02 * pow(abstand, 0.77) * pow(neigung, -0.385) + pow(2*0.02 * "
"(abstand + fliesslaenge) / SQRT(neigung), 0.467)",
"pow(2*0.10 * (abstand + fliesslaenge) / SQRT(neigung), 0.467)",
]
maniak: list = [
"0.02 * pow(abstand, 0.77) * pow(neigung, -0.385) + pow(2*0.02 * "
"(abstand + fliesslaenge) / SQRT(neigung), 0.467)",
"pow(2*0.10 * (abstand + fliesslaenge) / SQRT(neigung), 0.467)",
"0.02 * pow(abstand, 0.77) * pow(neigung, -0.385) + pow(2*0.02 * abstand / SQRT(neigung), 0.467)",
"pow(2*0.10 * abstand / SQRT(neigung), 0.467)",
"0.02 * pow(abstand, 0.77) * pow(neigung, -0.385) + pow(2*0.02 * fliesslaenge / SQRT(neigung), 0.467)",
"pow(2*0.10 * fliesslaenge / SQRT(neigung), 0.467)",
]
def __str__(self) -> str:
return "<RunoffParams *hidden in __str__*>"
apply_qkan_template: bool = True
logeditor: str = ""
manningrauheit_bef: float = 0.02
manningrauheit_dur: float = 0.10
runoffmodeltype_choice: enums.RunOffModelType = (
enums.RunOffModelType.SPEICHERKASKADE
)
runoffparamsfunctions: RunoffParams = RunoffParams()
runoffparamstype_choice: enums.RunOffParamsType = enums.RunOffParamsType.MANIAK
class XmlConfig(ClassObject):
export_file: str = ""
import_file: str = ""
richt_choice: str = ""
ordner_bild: str = ""
ordner_video: str = ""
# init_database: bool = True
class Config(ClassObject):
autokorrektur: bool = True
epsg: int = 25832
fangradius: float = 0.1
max_loops: int = 1000
mindestflaeche: float = 0.5
mit_verschneidung: bool = True
# ---
adapt: AdaptConfig = AdaptConfig()
check_export: CheckExport = CheckExport()
check_import: CheckImport = CheckImport()
selections: SelectionConfig = SelectionConfig()
database: DatabaseConfig = DatabaseConfig()
dyna: DynaConfig = DynaConfig()
swmm: SWMMConfig = SWMMConfig()
he: HEConfig = HEConfig()
he8: HE8Config = HE8Config()
mu: MUConfig = MUConfig()
linkflaechen: LinkFlConfig = LinkFlConfig()
project: ProjectConfig = ProjectConfig()
tools: ToolsConfig = ToolsConfig()
xml: XmlConfig = XmlConfig()
def __init__(self, **kwargs: Any):
super().__init__(**kwargs)
def save(self) -> None:
cfile = Path(site.getuserbase()) / "qkan" / "qkan.json"
if not cfile.parent.exists():
os.makedirs(cfile.parent)
try:
cfile.write_text(
json.dumps(self, cls=ConfigEncoder, sort_keys=True, indent=4)
)
except OSError as e:
log.exception("Failed to save config")
raise e
@staticmethod
def load() -> "Config":
"""
Load config from file or generate a new on based on defaults
@raises json.JSONDecodeError, OSError upon failure
"""
cfile = Path(site.getuserbase()) / "qkan" / "qkan.json"
if cfile.exists():
cjson = json.loads(cfile.read_text())
return Config(**cjson)
else:
return Config()
| gpl-3.0 |
hub-cap/lady-rainicorn | rainicorn/openstack/common/pastedeploy.py | 1 | 5020 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from paste import deploy
from rainicorn.openstack.common import local
class BasePasteFactory(object):
"""A base class for paste app and filter factories.
Sub-classes must override the KEY class attribute and provide
a __call__ method.
"""
KEY = None
def __init__(self, data):
self.data = data
def _import_factory(self, local_conf):
"""Import an app/filter class.
Lookup the KEY from the PasteDeploy local conf and import the
class named there. This class can then be used as an app or
filter factory.
Note we support the <module>:<class> format.
Note also that if you do e.g.
key =
value
then ConfigParser returns a value with a leading newline, so
we strip() the value before using it.
"""
mod_str, _sep, class_str = local_conf[self.KEY].strip().rpartition(':')
del local_conf[self.KEY]
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
class AppFactory(BasePasteFactory):
"""A Generic paste.deploy app factory.
This requires openstack.app_factory to be set to a callable which returns a
WSGI app when invoked. The format of the name is <module>:<callable> e.g.
[app:myfooapp]
paste.app_factory = openstack.common.pastedeploy:app_factory
openstack.app_factory = myapp:Foo
The WSGI app constructor must accept a data object and a local config
dict as its two arguments.
"""
KEY = 'openstack.app_factory'
def __call__(self, global_conf, **local_conf):
"""The actual paste.app_factory protocol method."""
factory = self._import_factory(local_conf)
return factory(self.data, **local_conf)
class FilterFactory(AppFactory):
"""A Generic paste.deploy filter factory.
This requires openstack.filter_factory to be set to a callable which
returns a WSGI filter when invoked. The format is <module>:<callable> e.g.
[filter:myfoofilter]
paste.filter_factory = openstack.common.pastedeploy:filter_factory
openstack.filter_factory = myfilter:Foo
The WSGI filter constructor must accept a WSGI app, a data object and
a local config dict as its three arguments.
"""
KEY = 'openstack.filter_factory'
def __call__(self, global_conf, **local_conf):
"""The actual paste.filter_factory protocol method."""
factory = self._import_factory(local_conf)
def filter(app):
return factory(app, self.data, **local_conf)
return filter
def app_factory(global_conf, **local_conf):
"""A paste app factory used with paste_deploy_app()."""
return local.store.app_factory(global_conf, **local_conf)
def filter_factory(global_conf, **local_conf):
"""A paste filter factory used with paste_deploy_app()."""
return local.store.filter_factory(global_conf, **local_conf)
def paste_deploy_app(paste_config_file, app_name, data):
"""Load a WSGI app from a PasteDeploy configuration.
Use deploy.loadapp() to load the app from the PasteDeploy configuration,
ensuring that the supplied data object is passed to the app and filter
factories defined in this module.
To use these factories and the data object, the configuration should look
like this:
[app:myapp]
paste.app_factory = openstack.common.pastedeploy:app_factory
openstack.app_factory = myapp:App
...
[filter:myfilter]
paste.filter_factory = openstack.common.pastedeploy:filter_factory
openstack.filter_factory = myapp:Filter
and then:
myapp.py:
class App(object):
def __init__(self, data):
...
class Filter(object):
def __init__(self, app, data):
...
:param paste_config_file: a PasteDeploy config file
:param app_name: the name of the app/pipeline to load from the file
:param data: a data object to supply to the app and its filters
:returns: the WSGI app
"""
(af, ff) = (AppFactory(data), FilterFactory(data))
local.store.app_factory = af
local.store.filter_factory = ff
try:
return deploy.loadapp("config:%s" % paste_config_file, name=app_name)
finally:
del local.store.app_factory
del local.store.filter_factory
| apache-2.0 |
lgarren/spack | var/spack/repos/builtin/packages/r-raster/package.py | 3 | 1767 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RRaster(RPackage):
"""Reading, writing, manipulating, analyzing and modeling of gridded
spatial data. The package implements basic and high-level functions.
Processing of very large files is supported."""
homepage = "http://cran.r-project.org/package=raster"
url = "https://cran.r-project.org/src/contrib/raster_2.5-8.tar.gz"
version('2.5-8', '2a7db931c74d50516e82d04687c0a577')
depends_on('r-sp', type=('build', 'run'))
depends_on('r-rcpp', type=('build', 'run'))
| lgpl-2.1 |
pglass/locust | locust/clients.py | 3 | 11446 | import re
import time
from datetime import timedelta
from urlparse import urlparse, urlunparse
import requests
from requests import Response, Request
from requests.auth import HTTPBasicAuth
from requests.exceptions import (RequestException, MissingSchema,
InvalidSchema, InvalidURL)
import events
from exception import CatchResponseError, ResponseError
absolute_http_url_regexp = re.compile(r"^https?://", re.I)
class LocustResponse(Response):
def raise_for_status(self):
if hasattr(self, 'error') and self.error:
raise self.error
Response.raise_for_status(self)
class HttpSession(requests.Session):
"""
Class for performing web requests and holding (session-) cookies between requests (in order
to be able to log in and out of websites). Each request is logged so that locust can display
statistics.
This is a slightly extended version of `python-request <http://python-requests.org>`_'s
:py:class:`requests.Session` class and mostly this class works exactly the same. However
the methods for making requests (get, post, delete, put, head, options, patch, request)
can now take a *url* argument that's only the path part of the URL, in which case the host
part of the URL will be prepended with the HttpSession.base_url which is normally inherited
from a Locust class' host property.
Each of the methods for making requests also takes two additional optional arguments which
are Locust specific and doesn't exist in python-requests. These are:
:param name: (optional) An argument that can be specified to use as label in Locust's statistics instead of the URL path.
This can be used to group different URL's that are requested into a single entry in Locust's statistics.
:param catch_response: (optional) Boolean argument that, if set, can be used to make a request return a context manager
to work as argument to a with statement. This will allow the request to be marked as a fail based on the content of the
response, even if the response code is ok (2xx). The opposite also works, one can use catch_response to catch a request
and then mark it as successful even if the response code was not (i.e 500 or 404).
"""
def __init__(self, base_url, *args, **kwargs):
requests.Session.__init__(self, *args, **kwargs)
self.base_url = base_url
# Check for basic authentication
parsed_url = urlparse(self.base_url)
if parsed_url.username and parsed_url.password:
netloc = parsed_url.hostname
if parsed_url.port:
netloc += ":%d" % parsed_url.port
# remove username and password from the base_url
self.base_url = urlunparse((parsed_url.scheme, netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment))
# configure requests to use basic auth
self.auth = HTTPBasicAuth(parsed_url.username, parsed_url.password)
def _build_url(self, path):
""" prepend url with hostname unless it's already an absolute URL """
if absolute_http_url_regexp.match(path):
return path
else:
return "%s%s" % (self.base_url, path)
def request(self, method, url, name=None, catch_response=False, **kwargs):
"""
Constructs and sends a :py:class:`requests.Request`.
Returns :py:class:`requests.Response` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param name: (optional) An argument that can be specified to use as label in Locust's statistics instead of the URL path.
This can be used to group different URL's that are requested into a single entry in Locust's statistics.
:param catch_response: (optional) Boolean argument that, if set, can be used to make a request return a context manager
to work as argument to a with statement. This will allow the request to be marked as a fail based on the content of the
response, even if the response code is ok (2xx). The opposite also works, one can use catch_response to catch a request
and then mark it as successful even if the response code was not (i.e 500 or 404).
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request.
:param allow_redirects: (optional) Boolean. Set to True by default.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param return_response: (optional) If False, an un-sent Request object will returned.
:param config: (optional) A configuration dictionary. See ``request.defaults`` for allowed keys and their default values.
:param stream: (optional) whether to immediately download the response content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
"""
# prepend url with hostname unless it's already an absolute URL
url = self._build_url(url)
# store meta data that is used when reporting the request to locust's statistics
request_meta = {}
# set up pre_request hook for attaching meta data to the request object
request_meta["start_time"] = time.time()
response = self._send_request_safe_mode(method, url, **kwargs)
# record the consumed time
request_meta["response_time"] = int((time.time() - request_meta["start_time"]) * 1000)
request_meta["method"] = response.request.method
request_meta["name"] = name or (response.history and response.history[0] or response).request.path_url
# get the length of the content, but if the argument stream is set to True, we take
# the size from the content-length header, in order to not trigger fetching of the body
if kwargs.get("stream", False):
request_meta["content_size"] = int(response.headers.get("content-length") or 0)
else:
request_meta["content_size"] = len(response.content or "")
if catch_response:
response.locust_request_meta = request_meta
return ResponseContextManager(response)
else:
try:
response.raise_for_status()
except RequestException as e:
events.request_failure.fire(
request_type=request_meta["method"],
name=request_meta["name"],
response_time=request_meta["response_time"],
exception=e,
)
else:
events.request_success.fire(
request_type=request_meta["method"],
name=request_meta["name"],
response_time=request_meta["response_time"],
response_length=request_meta["content_size"],
)
return response
def _send_request_safe_mode(self, method, url, **kwargs):
"""
Send an HTTP request, and catch any exception that might occur due to connection problems.
Safe mode has been removed from requests 1.x.
"""
try:
return requests.Session.request(self, method, url, **kwargs)
except (MissingSchema, InvalidSchema, InvalidURL):
raise
except RequestException as e:
r = LocustResponse()
r.error = e
r.status_code = 0 # with this status_code, content returns None
r.request = Request(method, url).prepare()
return r
class ResponseContextManager(LocustResponse):
"""
A Response class that also acts as a context manager that provides the ability to manually
control if an HTTP request should be marked as successful or a failure in Locust's statistics
This class is a subclass of :py:class:`Response <requests.Response>` with two additional
methods: :py:meth:`success <locust.clients.ResponseContextManager.success>` and
:py:meth:`failure <locust.clients.ResponseContextManager.failure>`.
"""
_is_reported = False
def __init__(self, response):
# copy data from response to this object
self.__dict__ = response.__dict__
def __enter__(self):
return self
def __exit__(self, exc, value, traceback):
if self._is_reported:
# if the user has already manually marked this response as failure or success
# we can ignore the default haviour of letting the response code determine the outcome
return exc is None
if exc:
if isinstance(value, ResponseError):
self.failure(value)
else:
return False
else:
try:
self.raise_for_status()
except requests.exceptions.RequestException as e:
self.failure(e)
else:
self.success()
return True
def success(self):
"""
Report the response as successful
Example::
with self.client.get("/does/not/exist", catch_response=True) as response:
if response.status_code == 404:
response.success()
"""
events.request_success.fire(
request_type=self.locust_request_meta["method"],
name=self.locust_request_meta["name"],
response_time=self.locust_request_meta["response_time"],
response_length=self.locust_request_meta["content_size"],
)
self._is_reported = True
def failure(self, exc):
"""
Report the response as a failure.
exc can be either a python exception, or a string in which case it will
be wrapped inside a CatchResponseError.
Example::
with self.client.get("/", catch_response=True) as response:
if response.content == "":
response.failure("No data")
"""
if isinstance(exc, basestring):
exc = CatchResponseError(exc)
events.request_failure.fire(
request_type=self.locust_request_meta["method"],
name=self.locust_request_meta["name"],
response_time=self.locust_request_meta["response_time"],
exception=exc,
)
self._is_reported = True
| mit |
mugizico/scikit-learn | sklearn/decomposition/tests/test_sparse_pca.py | 142 | 5990 | # Author: Vlad Niculae
# License: BSD 3 clause
import sys
import numpy as np
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import SkipTest
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import if_not_mac_os
from sklearn.decomposition import SparsePCA, MiniBatchSparsePCA
from sklearn.utils import check_random_state
def generate_toy_data(n_components, n_samples, image_size, random_state=None):
n_features = image_size[0] * image_size[1]
rng = check_random_state(random_state)
U = rng.randn(n_samples, n_components)
V = rng.randn(n_components, n_features)
centers = [(3, 3), (6, 7), (8, 1)]
sz = [1, 2, 1]
for k in range(n_components):
img = np.zeros(image_size)
xmin, xmax = centers[k][0] - sz[k], centers[k][0] + sz[k]
ymin, ymax = centers[k][1] - sz[k], centers[k][1] + sz[k]
img[xmin:xmax][:, ymin:ymax] = 1.0
V[k, :] = img.ravel()
# Y is defined by : Y = UV + noise
Y = np.dot(U, V)
Y += 0.1 * rng.randn(Y.shape[0], Y.shape[1]) # Add noise
return Y, U, V
# SparsePCA can be a bit slow. To avoid having test times go up, we
# test different aspects of the code in the same test
def test_correct_shapes():
rng = np.random.RandomState(0)
X = rng.randn(12, 10)
spca = SparsePCA(n_components=8, random_state=rng)
U = spca.fit_transform(X)
assert_equal(spca.components_.shape, (8, 10))
assert_equal(U.shape, (12, 8))
# test overcomplete decomposition
spca = SparsePCA(n_components=13, random_state=rng)
U = spca.fit_transform(X)
assert_equal(spca.components_.shape, (13, 10))
assert_equal(U.shape, (12, 13))
def test_fit_transform():
alpha = 1
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
spca_lars = SparsePCA(n_components=3, method='lars', alpha=alpha,
random_state=0)
spca_lars.fit(Y)
# Test that CD gives similar results
spca_lasso = SparsePCA(n_components=3, method='cd', random_state=0,
alpha=alpha)
spca_lasso.fit(Y)
assert_array_almost_equal(spca_lasso.components_, spca_lars.components_)
@if_not_mac_os()
def test_fit_transform_parallel():
alpha = 1
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
spca_lars = SparsePCA(n_components=3, method='lars', alpha=alpha,
random_state=0)
spca_lars.fit(Y)
U1 = spca_lars.transform(Y)
# Test multiple CPUs
spca = SparsePCA(n_components=3, n_jobs=2, method='lars', alpha=alpha,
random_state=0).fit(Y)
U2 = spca.transform(Y)
assert_true(not np.all(spca_lars.components_ == 0))
assert_array_almost_equal(U1, U2)
def test_transform_nan():
# Test that SparsePCA won't return NaN when there is 0 feature in all
# samples.
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
Y[:, 0] = 0
estimator = SparsePCA(n_components=8)
assert_false(np.any(np.isnan(estimator.fit_transform(Y))))
def test_fit_transform_tall():
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 65, (8, 8), random_state=rng) # tall array
spca_lars = SparsePCA(n_components=3, method='lars',
random_state=rng)
U1 = spca_lars.fit_transform(Y)
spca_lasso = SparsePCA(n_components=3, method='cd', random_state=rng)
U2 = spca_lasso.fit(Y).transform(Y)
assert_array_almost_equal(U1, U2)
def test_initialization():
rng = np.random.RandomState(0)
U_init = rng.randn(5, 3)
V_init = rng.randn(3, 4)
model = SparsePCA(n_components=3, U_init=U_init, V_init=V_init, max_iter=0,
random_state=rng)
model.fit(rng.randn(5, 4))
assert_array_equal(model.components_, V_init)
def test_mini_batch_correct_shapes():
rng = np.random.RandomState(0)
X = rng.randn(12, 10)
pca = MiniBatchSparsePCA(n_components=8, random_state=rng)
U = pca.fit_transform(X)
assert_equal(pca.components_.shape, (8, 10))
assert_equal(U.shape, (12, 8))
# test overcomplete decomposition
pca = MiniBatchSparsePCA(n_components=13, random_state=rng)
U = pca.fit_transform(X)
assert_equal(pca.components_.shape, (13, 10))
assert_equal(U.shape, (12, 13))
def test_mini_batch_fit_transform():
raise SkipTest("skipping mini_batch_fit_transform.")
alpha = 1
rng = np.random.RandomState(0)
Y, _, _ = generate_toy_data(3, 10, (8, 8), random_state=rng) # wide array
spca_lars = MiniBatchSparsePCA(n_components=3, random_state=0,
alpha=alpha).fit(Y)
U1 = spca_lars.transform(Y)
# Test multiple CPUs
if sys.platform == 'win32': # fake parallelism for win32
import sklearn.externals.joblib.parallel as joblib_par
_mp = joblib_par.multiprocessing
joblib_par.multiprocessing = None
try:
U2 = MiniBatchSparsePCA(n_components=3, n_jobs=2, alpha=alpha,
random_state=0).fit(Y).transform(Y)
finally:
joblib_par.multiprocessing = _mp
else: # we can efficiently use parallelism
U2 = MiniBatchSparsePCA(n_components=3, n_jobs=2, alpha=alpha,
random_state=0).fit(Y).transform(Y)
assert_true(not np.all(spca_lars.components_ == 0))
assert_array_almost_equal(U1, U2)
# Test that CD gives similar results
spca_lasso = MiniBatchSparsePCA(n_components=3, method='cd', alpha=alpha,
random_state=0).fit(Y)
assert_array_almost_equal(spca_lasso.components_, spca_lars.components_)
| bsd-3-clause |
hoangt/tpzsimul.gem5 | tests/quick/se/02.insttest/test.py | 56 | 1707 | # Copyright (c) 2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
root.system.cpu[0].workload = LiveProcess(cmd = 'insttest',
executable = binpath('insttest'))
| bsd-3-clause |
tensorflow/tfx | tfx/orchestration/launcher/base_component_launcher_test.py | 1 | 3244 | # Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.component_launcher."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from unittest import mock
import tensorflow as tf
from tfx.dsl.io import fileio
from tfx.orchestration import data_types
from tfx.orchestration import metadata
from tfx.orchestration import publisher
from tfx.orchestration.launcher import in_process_component_launcher
from tfx.orchestration.launcher import test_utils
from tfx.types import channel_utils
from ml_metadata.proto import metadata_store_pb2
from tensorflow.python.lib.io import file_io # pylint: disable=g-direct-tensorflow-import
class ComponentRunnerTest(tf.test.TestCase):
@mock.patch.object(publisher, 'Publisher')
def testRun(self, mock_publisher):
mock_publisher.return_value.publish_execution.return_value = {}
test_dir = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self._testMethodName)
connection_config = metadata_store_pb2.ConnectionConfig()
connection_config.sqlite.SetInParent()
metadata_connection = metadata.Metadata(connection_config)
pipeline_root = os.path.join(test_dir, 'Test')
input_path = os.path.join(test_dir, 'input')
fileio.makedirs(os.path.dirname(input_path))
file_io.write_string_to_file(input_path, 'test')
input_artifact = test_utils._InputArtifact()
input_artifact.uri = input_path
component = test_utils._FakeComponent(
name='FakeComponent',
input_channel=channel_utils.as_channel([input_artifact]))
pipeline_info = data_types.PipelineInfo(
pipeline_name='Test', pipeline_root=pipeline_root, run_id='123')
driver_args = data_types.DriverArgs(enable_cache=True)
# We use InProcessComponentLauncher to test BaseComponentLauncher logics.
launcher = in_process_component_launcher.InProcessComponentLauncher.create(
component=component,
pipeline_info=pipeline_info,
driver_args=driver_args,
metadata_connection=metadata_connection,
beam_pipeline_args=[],
additional_pipeline_args={})
self.assertEqual(
launcher._component_info.component_type, '.'.join([
test_utils._FakeComponent.__module__,
test_utils._FakeComponent.__name__
]))
launcher.launch()
output_path = component.outputs['output'].get()[0].uri
self.assertTrue(fileio.exists(output_path))
contents = file_io.read_file_to_string(output_path)
self.assertEqual('test', contents)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
mxjl620/scikit-learn | examples/mixture/plot_gmm_classifier.py | 250 | 3918 | """
==================
GMM classification
==================
Demonstration of Gaussian mixture models for classification.
See :ref:`gmm` for more information on the estimator.
Plots predicted labels on both training and held out test data using a
variety of GMM classifiers on the iris dataset.
Compares GMMs with spherical, diagonal, full, and tied covariance
matrices in increasing order of performance. Although one would
expect full covariance to perform best in general, it is prone to
overfitting on small datasets and does not generalize well to held out
test data.
On the plots, train data is shown as dots, while test data is shown as
crosses. The iris dataset is four-dimensional. Only the first two
dimensions are shown here, and thus some points are separated in other
dimensions.
"""
print(__doc__)
# Author: Ron Weiss <ronweiss@gmail.com>, Gael Varoquaux
# License: BSD 3 clause
# $Id$
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
from sklearn import datasets
from sklearn.cross_validation import StratifiedKFold
from sklearn.externals.six.moves import xrange
from sklearn.mixture import GMM
def make_ellipses(gmm, ax):
for n, color in enumerate('rgb'):
v, w = np.linalg.eigh(gmm._get_covars()[n][:2, :2])
u = w[0] / np.linalg.norm(w[0])
angle = np.arctan2(u[1], u[0])
angle = 180 * angle / np.pi # convert to degrees
v *= 9
ell = mpl.patches.Ellipse(gmm.means_[n, :2], v[0], v[1],
180 + angle, color=color)
ell.set_clip_box(ax.bbox)
ell.set_alpha(0.5)
ax.add_artist(ell)
iris = datasets.load_iris()
# Break up the dataset into non-overlapping training (75%) and testing
# (25%) sets.
skf = StratifiedKFold(iris.target, n_folds=4)
# Only take the first fold.
train_index, test_index = next(iter(skf))
X_train = iris.data[train_index]
y_train = iris.target[train_index]
X_test = iris.data[test_index]
y_test = iris.target[test_index]
n_classes = len(np.unique(y_train))
# Try GMMs using different types of covariances.
classifiers = dict((covar_type, GMM(n_components=n_classes,
covariance_type=covar_type, init_params='wc', n_iter=20))
for covar_type in ['spherical', 'diag', 'tied', 'full'])
n_classifiers = len(classifiers)
plt.figure(figsize=(3 * n_classifiers / 2, 6))
plt.subplots_adjust(bottom=.01, top=0.95, hspace=.15, wspace=.05,
left=.01, right=.99)
for index, (name, classifier) in enumerate(classifiers.items()):
# Since we have class labels for the training data, we can
# initialize the GMM parameters in a supervised manner.
classifier.means_ = np.array([X_train[y_train == i].mean(axis=0)
for i in xrange(n_classes)])
# Train the other parameters using the EM algorithm.
classifier.fit(X_train)
h = plt.subplot(2, n_classifiers / 2, index + 1)
make_ellipses(classifier, h)
for n, color in enumerate('rgb'):
data = iris.data[iris.target == n]
plt.scatter(data[:, 0], data[:, 1], 0.8, color=color,
label=iris.target_names[n])
# Plot the test data with crosses
for n, color in enumerate('rgb'):
data = X_test[y_test == n]
plt.plot(data[:, 0], data[:, 1], 'x', color=color)
y_train_pred = classifier.predict(X_train)
train_accuracy = np.mean(y_train_pred.ravel() == y_train.ravel()) * 100
plt.text(0.05, 0.9, 'Train accuracy: %.1f' % train_accuracy,
transform=h.transAxes)
y_test_pred = classifier.predict(X_test)
test_accuracy = np.mean(y_test_pred.ravel() == y_test.ravel()) * 100
plt.text(0.05, 0.8, 'Test accuracy: %.1f' % test_accuracy,
transform=h.transAxes)
plt.xticks(())
plt.yticks(())
plt.title(name)
plt.legend(loc='lower right', prop=dict(size=12))
plt.show()
| bsd-3-clause |
pinterest/pinball | tests/pinball_ext/executor/local_executor_test.py | 6 | 2315 | # Copyright 2015, Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from pinball_ext.executor import local_executor
__author__ = 'Changshu Liu'
__copyright__ = 'Copyright 2015, Pinterest, Inc.'
__credits__ = [__author__]
__license__ = 'Apache'
__version__ = '2.0'
class LocalExecutorTest(unittest.TestCase):
@mock.patch('os.system')
def test_hadoop_job(self, f1):
user_jar_dirs = ['/dir1/jar1', '/dir2/jar2']
user_app_jar = '/dir/jar.jar'
user_archive = '/dir/file.archive'
executor = local_executor.LocalExecutor(
executor_config={
'USER_LIBJAR_DIRS': ','.join(user_jar_dirs),
'USER_APPJAR_PATH': user_app_jar,
'USER_ARCHIVE_PATH': user_archive,
'USER': 'test_user'
}
)
executor.run_hadoop_job(
'test_job_class',
jobconf_args={
'pinball.key1': 'value1',
'pinball.key2': 'value2',
},
extra_args=[
'-Dpinball.key3=value3'
])
self.assertEqual(executor.config.USER_LIBJAR_DIRS, user_jar_dirs)
self.assertEqual(executor.config.USER_APPJAR_PATH, user_app_jar)
self.assertEqual(executor.config.USER_ARCHIVE_PATH, user_archive)
# self.assertEqual(executor.config.PLATFORM, Platform.LOCAL)
f1.assert_called_once_with(
"export HADOOP_CLASSPATH=/dir1/jar1/*:/dir2/jar2/*; "
"hadoop jar /dir/jar.jar test_job_class "
"-libjars `echo /dir1/jar1/*.jar /dir2/jar2/*.jar | tr ' ' ','` "
"-Dmapred.job.name=test_user:AdHocCommand "
"-Dpinball.key2=value2 "
"-Dpinball.key1=value1 "
"-Dpinball.key3=value3")
| apache-2.0 |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/spyderlib/utils/external/rope/refactor/rename.py | 32 | 9365 | import warnings
from rope.base import exceptions, pyobjects, pynames, taskhandle, evaluate, worder, codeanalyze
from rope.base.change import ChangeSet, ChangeContents, MoveResource
from rope.refactor import occurrences, sourceutils
class Rename(object):
"""A class for performing rename refactoring
It can rename everything: classes, functions, modules, packages,
methods, variables and keyword arguments.
"""
def __init__(self, project, resource, offset=None):
"""If `offset` is None, the `resource` itself will be renamed"""
self.project = project
self.pycore = project.pycore
self.resource = resource
if offset is not None:
self.old_name = worder.get_name_at(self.resource, offset)
this_pymodule = self.pycore.resource_to_pyobject(self.resource)
self.old_instance, self.old_pyname = \
evaluate.eval_location2(this_pymodule, offset)
if self.old_pyname is None:
raise exceptions.RefactoringError(
'Rename refactoring should be performed'
' on resolvable python identifiers.')
else:
if not resource.is_folder() and resource.name == '__init__.py':
resource = resource.parent
dummy_pymodule = self.pycore.get_string_module('')
self.old_instance = None
self.old_pyname = pynames.ImportedModule(dummy_pymodule,
resource=resource)
if resource.is_folder():
self.old_name = resource.name
else:
self.old_name = resource.name[:-3]
def get_old_name(self):
return self.old_name
def get_changes(self, new_name, in_file=None, in_hierarchy=False,
unsure=None, docs=False, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Get the changes needed for this refactoring
Parameters:
- `in_hierarchy`: when renaming a method this keyword forces
to rename all matching methods in the hierarchy
- `docs`: when `True` rename refactoring will rename
occurrences in comments and strings where the name is
visible. Setting it will make renames faster, too.
- `unsure`: decides what to do about unsure occurrences.
If `None`, they are ignored. Otherwise `unsure` is
called with an instance of `occurrence.Occurrence` as
parameter. If it returns `True`, the occurrence is
considered to be a match.
- `resources` can be a list of `rope.base.resources.File`\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files.
- `in_file`: this argument has been deprecated; use
`resources` instead.
"""
if unsure in (True, False):
warnings.warn(
'unsure parameter should be a function that returns '
'True or False', DeprecationWarning, stacklevel=2)
def unsure_func(value=unsure):
return value
unsure = unsure_func
if in_file is not None:
warnings.warn(
'`in_file` argument has been deprecated; use `resources` '
'instead. ', DeprecationWarning, stacklevel=2)
if in_file:
resources = [self.resource]
if _is_local(self.old_pyname):
resources = [self.resource]
if resources is None:
resources = self.pycore.get_python_files()
changes = ChangeSet('Renaming <%s> to <%s>' %
(self.old_name, new_name))
finder = occurrences.create_finder(
self.pycore, self.old_name, self.old_pyname, unsure=unsure,
docs=docs, instance=self.old_instance,
in_hierarchy=in_hierarchy and self.is_method())
job_set = task_handle.create_jobset('Collecting Changes', len(resources))
for file_ in resources:
job_set.started_job(file_.path)
new_content = rename_in_module(finder, new_name, resource=file_)
if new_content is not None:
changes.add_change(ChangeContents(file_, new_content))
job_set.finished_job()
if self._is_renaming_a_module():
resource = self.old_pyname.get_object().get_resource()
if self._is_allowed_to_move(resources, resource):
self._rename_module(resource, new_name, changes)
return changes
def _is_allowed_to_move(self, resources, resource):
if resource.is_folder():
try:
return resource.get_child('__init__.py') in resources
except exceptions.ResourceNotFoundError:
return False
else:
return resource in resources
def _is_renaming_a_module(self):
if isinstance(self.old_pyname.get_object(), pyobjects.AbstractModule):
return True
return False
def is_method(self):
pyname = self.old_pyname
return isinstance(pyname, pynames.DefinedName) and \
isinstance(pyname.get_object(), pyobjects.PyFunction) and \
isinstance(pyname.get_object().parent, pyobjects.PyClass)
def _rename_module(self, resource, new_name, changes):
if not resource.is_folder():
new_name = new_name + '.py'
parent_path = resource.parent.path
if parent_path == '':
new_location = new_name
else:
new_location = parent_path + '/' + new_name
changes.add_change(MoveResource(resource, new_location))
class ChangeOccurrences(object):
"""A class for changing the occurrences of a name in a scope
This class replaces the occurrences of a name. Note that it only
changes the scope containing the offset passed to the constructor.
What's more it does not have any side-effects. That is for
example changing occurrences of a module does not rename the
module; it merely replaces the occurrences of that module in a
scope with the given expression. This class is useful for
performing many custom refactorings.
"""
def __init__(self, project, resource, offset):
self.pycore = project.pycore
self.resource = resource
self.offset = offset
self.old_name = worder.get_name_at(resource, offset)
self.pymodule = self.pycore.resource_to_pyobject(self.resource)
self.old_pyname = evaluate.eval_location(self.pymodule, offset)
def get_old_name(self):
word_finder = worder.Worder(self.resource.read())
return word_finder.get_primary_at(self.offset)
def _get_scope_offset(self):
lines = self.pymodule.lines
scope = self.pymodule.get_scope().\
get_inner_scope_for_line(lines.get_line_number(self.offset))
start = lines.get_line_start(scope.get_start())
end = lines.get_line_end(scope.get_end())
return start, end
def get_changes(self, new_name, only_calls=False, reads=True, writes=True):
changes = ChangeSet('Changing <%s> occurrences to <%s>' %
(self.old_name, new_name))
scope_start, scope_end = self._get_scope_offset()
finder = occurrences.create_finder(
self.pycore, self.old_name, self.old_pyname,
imports=False, only_calls=only_calls)
new_contents = rename_in_module(
finder, new_name, pymodule=self.pymodule, replace_primary=True,
region=(scope_start, scope_end), reads=reads, writes=writes)
if new_contents is not None:
changes.add_change(ChangeContents(self.resource, new_contents))
return changes
def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None,
replace_primary=False, region=None, reads=True, writes=True):
"""Returns the changed source or `None` if there is no changes"""
if resource is not None:
source_code = resource.read()
else:
source_code = pymodule.source_code
change_collector = codeanalyze.ChangeCollector(source_code)
for occurrence in occurrences_finder.find_occurrences(resource, pymodule):
if replace_primary and occurrence.is_a_fixed_primary():
continue
if replace_primary:
start, end = occurrence.get_primary_range()
else:
start, end = occurrence.get_word_range()
if (not reads and not occurrence.is_written()) or \
(not writes and occurrence.is_written()):
continue
if region is None or region[0] <= start < region[1]:
change_collector.add_change(start, end, new_name)
return change_collector.get_changed()
def _is_local(pyname):
module, lineno = pyname.get_definition_location()
if lineno is None:
return False
scope = module.get_scope().get_inner_scope_for_line(lineno)
if isinstance(pyname, pynames.DefinedName) and \
scope.get_kind() in ('Function', 'Class'):
scope = scope.parent
return scope.get_kind() == 'Function' and \
pyname in list(scope.get_names().values()) and \
isinstance(pyname, pynames.AssignedName)
| gpl-3.0 |
linusw/linux-mt7630e | tools/perf/scripts/python/sctop.py | 1996 | 2102 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
| gpl-2.0 |
deepsrijit1105/edx-platform | cms/djangoapps/contentstore/context_processors.py | 25 | 1051 | """
Django Template Context Processor for CMS Online Contextual Help
"""
import ConfigParser
from django.conf import settings
from util.help_context_processor import common_doc_url
# Open and parse the configuration file when the module is initialized
CONFIG_FILE = open(settings.REPO_ROOT / "docs" / "cms_config.ini")
CONFIG = ConfigParser.ConfigParser()
CONFIG.readfp(CONFIG_FILE)
def doc_url(request=None): # pylint: disable=unused-argument
"""
This function is added in the list of TEMPLATES 'context_processors' OPTION, which is a django setting for
a tuple of callables that take a request object as their argument and return a dictionary of items
to be merged into the RequestContext.
This function returns a dict with get_online_help_info, making it directly available to all mako templates.
Args:
request: Currently not used, but is passed by django to context processors.
May be used in the future for determining the language of choice.
"""
return common_doc_url(request, CONFIG)
| agpl-3.0 |
ybellavance/python-for-android | python3-alpha/python3-src/PCbuild/build_tkinter.py | 47 | 2336 | """Script to compile the dependencies of _tkinter
Copyright (c) 2007 by Christian Heimes <christian@cheimes.de>
Licensed to PSF under a Contributor Agreement.
"""
import os
import sys
here = os.path.abspath(os.path.dirname(__file__))
par = os.path.pardir
TCL = "tcl8.5.9"
TK = "tk8.5.9"
TIX = "tix-8.4.3.x"
ROOT = os.path.abspath(os.path.join(here, par, par))
# Windows 2000 compatibility: WINVER 0x0500
# http://msdn2.microsoft.com/en-us/library/aa383745.aspx
NMAKE = ('nmake /nologo /f %s '
'COMPILERFLAGS=\"-DWINVER=0x0500 -D_WIN32_WINNT=0x0500 -DNTDDI_VERSION=NTDDI_WIN2KSP4\" '
'%s %s')
def nmake(makefile, command="", **kw):
defines = ' '.join(k+'='+str(v) for k, v in kw.items())
cmd = NMAKE % (makefile, defines, command)
print("\n\n"+cmd+"\n")
if os.system(cmd) != 0:
raise RuntimeError(cmd)
def build(platform, clean):
if platform == "Win32":
dest = os.path.join(ROOT, "tcltk")
machine = "X86"
elif platform == "AMD64":
dest = os.path.join(ROOT, "tcltk64")
machine = "AMD64"
else:
raise ValueError(platform)
# TCL
tcldir = os.path.join(ROOT, TCL)
if 1:
os.chdir(os.path.join(tcldir, "win"))
if clean:
nmake("makefile.vc", "clean")
nmake("makefile.vc", MACHINE=machine)
nmake("makefile.vc", "install", INSTALLDIR=dest, MACHINE=machine)
# TK
if 1:
os.chdir(os.path.join(ROOT, TK, "win"))
if clean:
nmake("makefile.vc", "clean", DEBUG=0, TCLDIR=tcldir)
nmake("makefile.vc", DEBUG=0, MACHINE=machine, TCLDIR=tcldir)
nmake("makefile.vc", "install", DEBUG=0, INSTALLDIR=dest, MACHINE=machine, TCLDIR=tcldir)
# TIX
if 1:
# python9.mak is available at http://svn.python.org
os.chdir(os.path.join(ROOT, TIX, "win"))
if clean:
nmake("python.mak", "clean")
nmake("python.mak", MACHINE=machine, INSTALL_DIR=dest)
nmake("python.mak", "install", MACHINE=machine, INSTALL_DIR=dest)
def main():
if len(sys.argv) < 2 or sys.argv[1] not in ("Win32", "AMD64"):
print("%s Win32|AMD64" % sys.argv[0])
sys.exit(1)
if "-c" in sys.argv:
clean = True
else:
clean = False
build(sys.argv[1], clean)
if __name__ == '__main__':
main()
| apache-2.0 |
ClearCorp/odoo-clearcorp | TODO-8.0/cash_flow_report/cash_flow_type.py | 3 | 1791 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,orm
import decimal_precision as dp
from openerp.tools.translate import _
class cashFlowtype(orm.Model):
_name = "cash.flow.type"
_description = "Cash Flow Type"
_columns = {
'name': fields.char('Name', size=128),
'code': fields.char('Code', size=128),
'type': fields.selection(
[('operation','Operation'),
('investment','Investment'),
('financing','Financing')],
'Type',),
}
class accountCashflowType(orm.Model):
_inherit = "account.account"
_columns = {
'cash_flow_type': fields.many2one('cash.flow.type', 'Cash Flow Type')
}
| agpl-3.0 |
acshi/osf.io | scripts/migrate_comment_root_target.py | 11 | 3794 | """Comment.root_target should always point to a Guid. A few comments on production were missed by a migration and point
to TrashedFileNodes and restored StoredFileNodes instead. This migration points the root_target to the Guids for
those TrashedFileNodes.
"""
import logging
import sys
from modularodm import Q
from modularodm.exceptions import ModularOdmException
from framework.guid.model import Guid
from framework.transactions.context import TokuTransaction
from website.models import Comment, StoredFileNode, TrashedFileNode
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def get_file_node(_id):
# First check the storedfilenode collection
filenode = StoredFileNode.load(_id)
# If no record in storedfilenode, try trashedfilenode
if not filenode:
filenode = TrashedFileNode.load(_id)
if not filenode:
logger.error('Could not find storedfilenode or trashedfilenode with id {}'.format(_id))
else:
logger.info('Found filenode: {}'.format(filenode._id))
return filenode
def get_guid(filenode):
try:
guid = Guid.find_one(Q('referent', 'eq', filenode))
except ModularOdmException:
logger.error('No Guid found for filenode {}'.format(filenode._id))
return None
else:
return guid
def main():
query = Comment.find(Q('root_target.1', 'ne', 'guid'))
logger.info('Found {} comments whose root target is not a guid'.format(query.count()))
migrated = 0
for comment in query:
root_target = comment.to_storage()['root_target']
if root_target:
logger.info('Root target for comment {}: {}'.format(comment._id, root_target))
_id, collection = root_target
if collection == 'storedfilenode':
filenode = get_file_node(_id)
if filenode:
guid = get_guid(filenode)
if guid:
logger.info('Setting root_target to Guid {}'.format(guid._id))
comment.root_target = guid
comment.save()
migrated += 1
else:
logger.error('Unexpected root target: {}'.format(root_target))
# If root_target is unset, look at the target field
elif root_target is None:
logger.info('Root target for comment {} is None'.format(comment._id))
guid = comment.target
if isinstance(guid.referent, (TrashedFileNode, StoredFileNode)):
logger.info('Setting root_target to Guid {}'.format(guid._id))
comment.root_target = guid
comment.save()
migrated += 1
elif isinstance(guid.referent, Comment):
logger.info('Comment {} has a comment target. It is a reply.'.format(comment._id))
found_root = False
parent = guid.referent
while not found_root:
if not isinstance(parent.target.referent, Comment):
found_root = True
else:
parent = parent.target.referent
guid = parent.target
logger.info('Setting root_target to Guid {}'.format(guid._id))
comment.root_target = guid
comment.save()
migrated += 1
logger.info('Successfully migrated {} comments'.format(migrated))
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
init_app(routes=False, set_backends=True)
with TokuTransaction():
main()
if dry:
raise Exception('Dry Run -- Aborting Transaction')
| apache-2.0 |
abhitopia/tensorflow | tensorflow/tools/pip_package/simple_console_for_windows.py | 605 | 1028 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Start a simple interactive console with TensorFlow available."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import code
import sys
def main(_):
"""Run an interactive console."""
code.interact()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 |
mikrosimage/rez | src/rez/vendor/yaml/tokens.py | 985 | 2573 |
class Token(object):
def __init__(self, start_mark, end_mark):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in self.__dict__
if not key.endswith('_mark')]
attributes.sort()
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
#class BOMToken(Token):
# id = '<byte order mark>'
class DirectiveToken(Token):
id = '<directive>'
def __init__(self, name, value, start_mark, end_mark):
self.name = name
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class DocumentStartToken(Token):
id = '<document start>'
class DocumentEndToken(Token):
id = '<document end>'
class StreamStartToken(Token):
id = '<stream start>'
def __init__(self, start_mark=None, end_mark=None,
encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndToken(Token):
id = '<stream end>'
class BlockSequenceStartToken(Token):
id = '<block sequence start>'
class BlockMappingStartToken(Token):
id = '<block mapping start>'
class BlockEndToken(Token):
id = '<block end>'
class FlowSequenceStartToken(Token):
id = '['
class FlowMappingStartToken(Token):
id = '{'
class FlowSequenceEndToken(Token):
id = ']'
class FlowMappingEndToken(Token):
id = '}'
class KeyToken(Token):
id = '?'
class ValueToken(Token):
id = ':'
class BlockEntryToken(Token):
id = '-'
class FlowEntryToken(Token):
id = ','
class AliasToken(Token):
id = '<alias>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class AnchorToken(Token):
id = '<anchor>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class TagToken(Token):
id = '<tag>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class ScalarToken(Token):
id = '<scalar>'
def __init__(self, value, plain, start_mark, end_mark, style=None):
self.value = value
self.plain = plain
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
| lgpl-3.0 |
kvar/ansible | test/units/modules/network/fortios/test_fortios_router_access_list6.py | 21 | 7717 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_router_access_list6
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_router_access_list6.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_router_access_list6_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_access_list6': {
'comments': 'test_value_3',
'name': 'default_name_4',
},
'vdom': 'root'}
is_error, changed, response = fortios_router_access_list6.fortios_router(input_data, fos_instance)
expected_data = {
'comments': 'test_value_3',
'name': 'default_name_4',
}
set_method_mock.assert_called_with('router', 'access-list6', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_router_access_list6_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_access_list6': {
'comments': 'test_value_3',
'name': 'default_name_4',
},
'vdom': 'root'}
is_error, changed, response = fortios_router_access_list6.fortios_router(input_data, fos_instance)
expected_data = {
'comments': 'test_value_3',
'name': 'default_name_4',
}
set_method_mock.assert_called_with('router', 'access-list6', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_router_access_list6_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'router_access_list6': {
'comments': 'test_value_3',
'name': 'default_name_4',
},
'vdom': 'root'}
is_error, changed, response = fortios_router_access_list6.fortios_router(input_data, fos_instance)
delete_method_mock.assert_called_with('router', 'access-list6', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_router_access_list6_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'router_access_list6': {
'comments': 'test_value_3',
'name': 'default_name_4',
},
'vdom': 'root'}
is_error, changed, response = fortios_router_access_list6.fortios_router(input_data, fos_instance)
delete_method_mock.assert_called_with('router', 'access-list6', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_router_access_list6_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_access_list6': {
'comments': 'test_value_3',
'name': 'default_name_4',
},
'vdom': 'root'}
is_error, changed, response = fortios_router_access_list6.fortios_router(input_data, fos_instance)
expected_data = {
'comments': 'test_value_3',
'name': 'default_name_4',
}
set_method_mock.assert_called_with('router', 'access-list6', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_router_access_list6_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'router_access_list6': {
'random_attribute_not_valid': 'tag',
'comments': 'test_value_3',
'name': 'default_name_4',
},
'vdom': 'root'}
is_error, changed, response = fortios_router_access_list6.fortios_router(input_data, fos_instance)
expected_data = {
'comments': 'test_value_3',
'name': 'default_name_4',
}
set_method_mock.assert_called_with('router', 'access-list6', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 |
tambetm/neon | neon/datasets/mnist.py | 3 | 6299 | # ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
MNIST is a handwritten digit image dataset.
More info at: http://yann.lecun.com/exdb/mnist/
"""
import gzip
import logging
import numpy as np
import os
import struct
from neon.datasets.dataset import Dataset
from neon.util.compat import PY3, range
if PY3:
from urllib.parse import urljoin as basejoin
else:
from urllib import basejoin
logger = logging.getLogger(__name__)
class MNIST(Dataset):
"""
Sets up an MNIST dataset.
Attributes:
raw_base_url (str): where to find the source data
raw_train_input_gz (str): URL of the full path to raw train inputs
raw_train_target_gz (str): URL of the full path to raw train targets
raw_test_input_gz (str): URL of the full path to raw test inputs
raw_test_target_gz (str): URL of the full path to raw test targets
backend (neon.backends.Backend): backend used for this data
inputs (dict): structure housing the loaded train/test/validation
input data
targets (dict): structure housing the loaded train/test/validation
target data
Keyword Args:
repo_path (str, optional): where to locally host this dataset on disk
"""
raw_base_url = 'http://yann.lecun.com/exdb/mnist/'
raw_train_input_gz = basejoin(raw_base_url, 'train-images-idx3-ubyte.gz')
raw_train_target_gz = basejoin(raw_base_url, 'train-labels-idx1-ubyte.gz')
raw_test_input_gz = basejoin(raw_base_url, 't10k-images-idx3-ubyte.gz')
raw_test_target_gz = basejoin(raw_base_url, 't10k-labels-idx1-ubyte.gz')
def __init__(self, **kwargs):
self.num_test_sample = 10000
self.macro_batched = False
self.__dict__.update(kwargs)
def initialize(self):
pass
def read_image_file(self, fname, dtype=None):
"""
Carries out the actual reading of MNIST image files.
"""
with open(fname, 'rb') as f:
magic, num_images, rows, cols = struct.unpack('>iiii', f.read(16))
if magic != 2051:
raise ValueError('invalid MNIST image file: ' + fname)
full_image = np.fromfile(f, dtype='uint8').reshape((num_images,
rows * cols))
if dtype is not None:
dtype = np.dtype(dtype)
full_image = full_image.astype(dtype)
full_image /= 255.
return full_image
def read_label_file(self, fname):
"""
Carries out the actual reading of MNIST label files.
"""
with open(fname, 'rb') as f:
magic, num_labels = struct.unpack('>ii', f.read(8))
if magic != 2049:
raise ValueError('invalid MNIST label file:' + fname)
array = np.fromfile(f, dtype='uint8')
return array
def load(self, backend=None, experiment=None):
if self.inputs['train'] is not None:
return
if 'repo_path' in self.__dict__:
self.repo_path = os.path.expandvars(os.path.expanduser(
self.repo_path))
save_dir = os.path.join(self.repo_path,
self.__class__.__name__)
if not os.path.exists(save_dir):
os.makedirs(save_dir)
for url in (self.raw_train_input_gz, self.raw_train_target_gz,
self.raw_test_input_gz, self.raw_test_target_gz):
name = os.path.basename(url).rstrip('.gz')
repo_gz_file = os.path.join(save_dir, name + '.gz')
repo_file = repo_gz_file.rstrip('.gz')
if not os.path.exists(repo_file):
self.download_to_repo(url, save_dir)
with gzip.open(repo_gz_file, 'rb') as infile:
with open(repo_file, 'w') as outfile:
for line in infile:
outfile.write(line)
logger.info('loading: %s', name)
if 'images' in repo_file and 'train' in repo_file:
indat = self.read_image_file(repo_file, 'float32')
# flatten to 1D images
self.inputs['train'] = indat
elif 'images' in repo_file and 't10k' in repo_file:
indat = self.read_image_file(repo_file, 'float32')
self.inputs['test'] = indat[0:self.num_test_sample]
elif 'labels' in repo_file and 'train' in repo_file:
indat = self.read_label_file(repo_file)
# Prep a 1-hot label encoding
tmp = np.zeros((indat.shape[0], 10))
for col in range(10):
tmp[:, col] = indat == col
self.targets['train'] = tmp
elif 'labels' in repo_file and 't10k' in repo_file:
indat = self.read_label_file(
repo_file)[0:self.num_test_sample]
tmp = np.zeros((self.num_test_sample, 10))
for col in range(10):
tmp[:, col] = indat == col
self.targets['test'] = tmp
else:
logger.error('problems loading: %s', name)
if 'sample_pct' in self.__dict__:
self.sample_training_data()
self.format()
else:
raise AttributeError('repo_path not specified in config')
# TODO: try and download and read in directly?
| apache-2.0 |
noamelf/Open-Knesset | video/management/commands/sub_commands/UpdateMembersRelatedVideos.py | 14 | 3347 | # encoding: utf-8
from video.management.commands.sub_commands import SubCommand
from video.utils.youtube import GetYoutubeVideos
from mks.models import Member
from video.utils.parse_dict import validate_dict
from video.utils import get_videos_queryset
from video.models import Video
class UpdateMembersRelatedVideos(SubCommand):
def __init__(self,command,members=None,only_current_knesset=False,member_ids=[]):
SubCommand.__init__(self,command)
if members is None:
if len(member_ids)>0:
members=Member.objects.filter(id__in=member_ids)
elif only_current_knesset is True:
members=Member.current_knesset.filter(is_current=True)
self._debug('only current knesset')
else:
members=Member.objects.all()
self._debug('updating related videos for '+str(len(members))+' members')
for member in members:
self._debug(member.name)
self._check_timer()
relvids=[]
for name in member.names:
self._debug(name)
for video in self._getVideosForMember(name):
if self._verify_related_video(video,name):
relvids.append(video)
if len(relvids)>0:
for video in relvids:
self._update_member_related_video(member,video)
def _getVideosForMember(self,name):
return self._getYoutubeVideos(q='"'+name+'"',max_results=15,limit_time='this_month')
def _getYoutubeVideos(self,**kwargs):
return GetYoutubeVideos(**kwargs).videos
def _verify_related_video(self,video,name):
if validate_dict(video,['title','description']):
titledesc=video['title'] #+video['description']
if (
validate_dict(video,['embed_url_autoplay','thumbnail90x120','id','link','published'])
and name in titledesc
and video['published'] is not None
):
return True
else:
return False
else:
return False
def _getVideoFields(self,video,member):
return {
'embed_link':video['embed_url_autoplay'],
'small_image_link':video['thumbnail90x120'],
'title':video['title'],
'description':video['description'],
'link':video['link'],
'source_type':'youtube',
'source_id':video['id'],
'published':video['published'],
'group':'related',
'content_object':member
}
def _isMemberHaveVideo(self,member,video):
return self._getMemberExistingVideosCount(
ignoreHide=True, member=member,
source_id=video['id'],
source_type='youtube',
)>0
def _getMemberExistingVideosCount(self,ignoreHide,member,source_id,source_type):
qs=get_videos_queryset(member,ignoreHide=ignoreHide)
qs=qs.filter(source_id=source_id,source_type=source_type)
return qs.count()
def _saveVideo(self,videoFields):
v=Video(**videoFields)
v.save()
def _update_member_related_video(self,member,video):
if not self._isMemberHaveVideo(member, video):
self._saveVideo(self._getVideoFields(video, member))
| bsd-3-clause |
chaluemwut/fbserver | venv/lib/python2.7/site-packages/scipy/fftpack/benchmarks/bench_pseudo_diffs.py | 18 | 6191 | """ Benchmark functions for fftpack.pseudo_diffs module
"""
from __future__ import division, print_function, absolute_import
import sys
from numpy import arange, sin, cos, pi, exp, tanh, sign
from numpy.testing import *
from scipy.fftpack import diff, fft, ifft, tilbert, hilbert, shift, fftfreq
def random(size):
return rand(*size)
def direct_diff(x,k=1,period=None):
fx = fft(x)
n = len(fx)
if period is None:
period = 2*pi
w = fftfreq(n)*2j*pi/period*n
if k < 0:
w = 1 / w**k
w[0] = 0.0
else:
w = w**k
if n > 2000:
w[250:n-250] = 0.0
return ifft(w*fx).real
def direct_tilbert(x,h=1,period=None):
fx = fft(x)
n = len(fx)
if period is None:
period = 2*pi
w = fftfreq(n)*h*2*pi/period*n
w[0] = 1
w = 1j/tanh(w)
w[0] = 0j
return ifft(w*fx)
def direct_hilbert(x):
fx = fft(x)
n = len(fx)
w = fftfreq(n)*n
w = 1j*sign(w)
return ifft(w*fx)
def direct_shift(x,a,period=None):
n = len(x)
if period is None:
k = fftfreq(n)*1j*n
else:
k = fftfreq(n)*2j*pi/period*n
return ifft(fft(x)*exp(k*a)).real
class TestDiff(TestCase):
def bench_random(self):
print()
print('Differentiation of periodic functions')
print('=====================================')
print(' size | convolve | naive')
print('-------------------------------------')
for size,repeat in [(100,1500),(1000,300),
(256,1500),
(512,1000),
(1024,500),
(2048,200),
(2048*2,100),
(2048*4,50),
]:
print('%6s' % size, end=' ')
sys.stdout.flush()
x = arange(size)*2*pi/size
if size < 2000:
f = sin(x)*cos(4*x)+exp(sin(3*x))
else:
f = sin(x)*cos(4*x)
assert_array_almost_equal(diff(f,1),direct_diff(f,1))
assert_array_almost_equal(diff(f,2),direct_diff(f,2))
print('| %9.2f' % measure('diff(f,3)',repeat), end=' ')
sys.stdout.flush()
print('| %9.2f' % measure('direct_diff(f,3)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestTilbert(TestCase):
def bench_random(self):
print()
print(' Tilbert transform of periodic functions')
print('=========================================')
print(' size | optimized | naive')
print('-----------------------------------------')
for size,repeat in [(100,1500),(1000,300),
(256,1500),
(512,1000),
(1024,500),
(2048,200),
(2048*2,100),
(2048*4,50),
]:
print('%6s' % size, end=' ')
sys.stdout.flush()
x = arange(size)*2*pi/size
if size < 2000:
f = sin(x)*cos(4*x)+exp(sin(3*x))
else:
f = sin(x)*cos(4*x)
assert_array_almost_equal(tilbert(f,1),direct_tilbert(f,1))
print('| %9.2f' % measure('tilbert(f,1)',repeat), end=' ')
sys.stdout.flush()
print('| %9.2f' % measure('direct_tilbert(f,1)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestHilbert(TestCase):
def bench_random(self):
print()
print(' Hilbert transform of periodic functions')
print('=========================================')
print(' size | optimized | naive')
print('-----------------------------------------')
for size,repeat in [(100,1500),(1000,300),
(256,1500),
(512,1000),
(1024,500),
(2048,200),
(2048*2,100),
(2048*4,50),
]:
print('%6s' % size, end=' ')
sys.stdout.flush()
x = arange(size)*2*pi/size
if size < 2000:
f = sin(x)*cos(4*x)+exp(sin(3*x))
else:
f = sin(x)*cos(4*x)
assert_array_almost_equal(hilbert(f),direct_hilbert(f))
print('| %9.2f' % measure('hilbert(f)',repeat), end=' ')
sys.stdout.flush()
print('| %9.2f' % measure('direct_hilbert(f)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestShift(TestCase):
def bench_random(self):
print()
print(' Shifting periodic functions')
print('==============================')
print(' size | optimized | naive')
print('------------------------------')
for size,repeat in [(100,1500),(1000,300),
(256,1500),
(512,1000),
(1024,500),
(2048,200),
(2048*2,100),
(2048*4,50),
]:
print('%6s' % size, end=' ')
sys.stdout.flush()
x = arange(size)*2*pi/size
a = 1
if size < 2000:
f = sin(x)*cos(4*x)+exp(sin(3*x))
sf = sin(x+a)*cos(4*(x+a))+exp(sin(3*(x+a)))
else:
f = sin(x)*cos(4*x)
sf = sin(x+a)*cos(4*(x+a))
assert_array_almost_equal(direct_shift(f,1),sf)
assert_array_almost_equal(shift(f,1),sf)
print('| %9.2f' % measure('shift(f,a)',repeat), end=' ')
sys.stdout.flush()
print('| %9.2f' % measure('direct_shift(f,a)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
if __name__ == "__main__":
run_module_suite()
| apache-2.0 |
cdfassnacht/CodeCDF | python/plot_grade_hist.py | 1 | 2880 | """
A python program to plot a grade histogram
Usage: python plot_grade_hist.py [filename] [colname] [maxy] ([nscorecols])
Required inputs:
filename - text file containing either two columns (name total) or
three columns (name, score_multiple-choice, score_short-answer)
colname - the name of the column containing the score of interest.
NOTE: for the old-school text files, this will be 'col2'
_unless_ the old-school file also is in 3-column format, in which
case this parameter is ignored and the optional nscorecols
parameter should be set to 2.
If the input file is in CSV format, the colname parameter could
be something like 'Midterm 2 (32620)' or 'MT2' or 'Final Grade'
maxy - maximum value for y axis
Optional input:
nscorecols - number of score columns (1 for 2-column input, 2 for 3-column
input). ONLY set this if the input file is in the old-school
text format AND it is in 3-column format (i.e., with
nscorecols=2). If it is in the old-school text format but is
in the 2-column input, then DO NOT set this keyword, but just
set the colname variable above to 'col2'
"""
import numpy as n
from matplotlib import pyplot as p
import sys
import gradefuncs as gf
if len(sys.argv) < 4:
print('')
print('ERROR: This program requires at least 3 input parameters:')
print(' 1. infile - name of the input file containing scores')
print(' 2. colname - name of column containing the relevant score if the')
print(' input file is in csv format produced by smartsite or canvas or')
print(' if it is in old-school text format with one total-score column')
print(' In the second case (text format with one column of scores) the')
print(' colname parameter should be set to "col2"')
print(' 3. maxy - maximum y value for plot')
print('It may also take an optional fourth parameter, which should ONLY BE')
print(' USED if the file is BOTH in the old-school text format and has')
print(' two columns with scores (one for multiple-choice and one for short')
print(' answer), in which case, this parameter should be used and set to 2.')
print('')
print('Format: python plot_grade_hist.py infile colname maxy')
print(' --- or ---')
print('Format: python plot_grade_hist.py infile colname maxy 2')
print('')
sys.exit()
if len(sys.argv) == 5:
old_3col = True
else:
old_3col = False
infile = sys.argv[1]
colname = sys.argv[2]
maxy = float(sys.argv[3])
if old_3col:
tot = gf.read_text(infile,2)
else:
tot = gf.read_table(infile, colname)
if tot is None:
print('Could not plot histogram')
print('')
sys.exit()
binsize = 3
gf.plot_tothist(infile,tot,maxy,binsize)
| mit |
deerwalk/voltdb | tests/sqlcoverage/config/joined-matview-config.py | 2 | 2330 | #!/usr/bin/env python
# This file is part of VoltDB.
# Copyright (C) 2008-2017 VoltDB Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
{
# Tests of Materialized Views defined using Joins
"joined-matview-default-full": {"schema": "joined-matview-schema.py",
"ddl": "joined-matview-DDL.sql",
"template": "joined-matview-default-full.sql",
"normalizer": "normalizer.py"},
"joined-matview-int": {"schema": "joined-matview-int-schema.py",
"ddl": "joined-matview-int-DDL.sql",
"template": "joined-matview-int.sql",
"normalizer": "nulls-lowest-normalizer.py"},
"joined-matview-string": {"schema": "joined-matview-string-schema.py",
"ddl": "joined-matview-string-DDL.sql",
"template": "joined-matview-string.sql",
"normalizer": "normalizer.py"},
"joined-matview-timestamp": {"schema": "joined-matview-timestamp-schema.py",
"ddl": "joined-matview-timestamp-DDL.sql",
"template": "joined-matview-timestamp.sql",
"normalizer": "normalizer.py"},
}
| agpl-3.0 |
irvingprog/python-pilas-experimental | pilasengine/lanas/deslizador.py | 1 | 1568 | # -*- encoding: utf-8 -*-
# pilas engine: un motor para hacer videojuegos
#
# Copyright 2010-2014 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from PyQt4 import QtGui
from PyQt4 import QtCore
class Deslizador(QtGui.QWidget):
def __init__(self, parent, cursor, valor_inicial, funcion_cuando_cambia):
QtGui.QWidget.__init__(self, parent)
self.funcion_cuando_cambia = funcion_cuando_cambia
layout = QtGui.QGridLayout(self)
slider = QtGui.QSlider(QtCore.Qt.Horizontal)
slider.setMinimumWidth(200)
if '.' in str(valor_inicial):
valor_inicial = int(float(valor_inicial) * 100)
slider.valueChanged[int].connect(self.on_change_float)
else:
valor_inicial = int(str(valor_inicial))
slider.valueChanged[int].connect(self.on_change)
slider.setMaximum(valor_inicial + 300)
slider.setMinimum(valor_inicial - 300)
slider.setValue(valor_inicial)
layout.addWidget(slider)
layout.setContentsMargins(7, 7, 7, 7)
self.setLayout(layout)
self.adjustSize()
self.setWindowFlags(QtCore.Qt.Popup)
point = parent.cursorRect(cursor).bottomRight()
global_point = parent.mapToGlobal(point)
self.move(global_point)
def on_change(self, valor):
self.funcion_cuando_cambia(str(valor))
def on_change_float(self, valor):
valor = str(valor/100.0)
self.funcion_cuando_cambia(str(valor)) | lgpl-3.0 |
caseyrollins/osf.io | api_tests/comments/views/test_comment_report_list.py | 17 | 17901 | from django.utils import timezone
import mock
import pytest
from addons.wiki.tests.factories import WikiFactory
from api.base.settings.defaults import API_BASE
from api_tests import utils as test_utils
from osf.models import Guid
from osf_tests.factories import (
ProjectFactory,
AuthUserFactory,
CommentFactory,
)
from rest_framework import exceptions
@pytest.mark.django_db
class CommentReportsMixin(object):
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def contributor(self):
return AuthUserFactory()
@pytest.fixture()
def non_contrib(self):
return AuthUserFactory()
@pytest.fixture()
def payload(self, user):
return {
'data': {
'id': user._id,
'type': 'comment_reports',
'attributes': {
'category': 'spam',
'message': 'delicious spam'
}
}
}
# check if all necessary features are setup in subclass
@pytest.fixture()
def private_project(self):
raise NotImplementedError
@pytest.fixture()
def comment(self):
raise NotImplementedError
@pytest.fixture()
def private_url(self):
raise NotImplementedError
@pytest.fixture()
def public_project(self):
raise NotImplementedError
@pytest.fixture()
def public_comment(self):
raise NotImplementedError
@pytest.fixture()
def public_url(self):
raise NotImplementedError
@pytest.fixture()
def comment_level(self):
raise NotImplementedError
def test_private_node_view_reports_auth_misc(
self, app, user, contributor, non_contrib, private_url):
# test_private_node_logged_out_user_cannot_view_reports
res = app.get(private_url, expect_errors=True)
assert res.status_code == 401
# test_private_node_logged_in_non_contrib_cannot_view_reports
res = app.get(private_url, auth=non_contrib.auth, expect_errors=True)
assert res.status_code == 403
# test_private_node_only_reporting_user_can_view_reports
res = app.get(private_url, auth=user.auth)
assert res.status_code == 200
report_json = res.json['data']
report_ids = [report['id'] for report in report_json]
assert len(report_json) == 1
assert user._id in report_ids
# test_private_node_reported_user_does_not_see_report
res = app.get(private_url, auth=contributor.auth)
assert res.status_code == 200
report_json = res.json['data']
report_ids = [report['id'] for report in report_json]
assert len(report_json) == 0
assert contributor._id not in report_ids
def test_public_node_view_report_auth_misc(
self, app, user, contributor, non_contrib, public_url):
# test_public_node_logged_out_user_cannot_view_reports
res = app.get(public_url, expect_errors=True)
assert res.status_code == 401
# test_public_node_only_reporting_contributor_can_view_report
res = app.get(public_url, auth=user.auth)
assert res.status_code == 200
report_json = res.json['data']
report_ids = [report['id'] for report in report_json]
assert len(report_json) == 1
assert user._id in report_ids
# test_public_node_reported_user_does_not_see_report
res = app.get(public_url, auth=contributor.auth)
assert res.status_code == 200
report_json = res.json['data']
report_ids = [report['id'] for report in report_json]
assert len(report_json) == 0
assert contributor._id not in report_ids
# test_public_node_non_contrib_does_not_see_other_user_reports
res = app.get(public_url, auth=non_contrib.auth, expect_errors=True)
assert res.status_code == 200
report_json = res.json['data']
report_ids = [report['id'] for report in report_json]
assert len(report_json) == 0
assert non_contrib._id not in report_ids
def test_public_node_non_contrib_reporter_can_view_own_report(
self, app, non_contrib, public_comment, public_url):
public_comment.reports[non_contrib._id] = {
'category': 'spam',
'text': 'This is spam',
'date': timezone.now(),
'retracted': False,
}
public_comment.save()
res = app.get(public_url, auth=non_contrib.auth)
assert res.status_code == 200
report_json = res.json['data']
report_ids = [report['id'] for report in report_json]
assert len(report_json) == 1
assert non_contrib._id in report_ids
def test_public_node_private_comment_level_non_contrib_cannot_see_reports(
self, app, non_contrib, public_project, public_url):
public_project.comment_level = 'private'
public_project.save()
res = app.get(public_url, auth=non_contrib.auth, expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
def test_invalid_report_comment(self, app, user, private_url):
# test_report_comment_invalid_type
payload = {
'data': {
'type': 'Not a valid type.',
'attributes': {
'category': 'spam',
'message': 'delicious spam'
}
}
}
res = app.post_json_api(
private_url, payload,
auth=user.auth, expect_errors=True
)
assert res.status_code == 409
# test_report_comment_no_type
payload = {
'data': {
'type': '',
'attributes': {
'category': 'spam',
'message': 'delicious spam'
}
}
}
res = app.post_json_api(
private_url, payload,
auth=user.auth, expect_errors=True
)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'This field may not be blank.'
assert res.json['errors'][0]['source']['pointer'] == '/data/type'
# test_report_comment_invalid_spam_category
category = 'Not a valid category'
payload = {
'data': {
'type': 'comment_reports',
'attributes': {
'category': category,
'message': 'delicious spam'
}
}
}
res = app.post_json_api(
private_url, payload,
auth=user.auth, expect_errors=True
)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '\"' + \
category + '\"' + ' is not a valid choice.'
def test_report_comment_allow_blank_message(
self, app, user, contributor, private_project, comment):
comment_new = CommentFactory(
node=private_project,
user=contributor,
target=comment.target)
url = '/{}comments/{}/reports/'.format(API_BASE, comment_new._id)
payload = {
'data': {
'type': 'comment_reports',
'attributes': {
'category': 'spam',
'message': ''
}
}
}
res = app.post_json_api(url, payload, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == user._id
assert res.json['data']['attributes']['message'] == payload['data']['attributes']['message']
def test_private_node_report_comment_auth_misc(
self, app, user, contributor,
non_contrib, private_project,
private_url, comment, payload
):
# test_private_node_logged_out_user_cannot_report_comment
res = app.post_json_api(private_url, payload, expect_errors=True)
assert res.status_code == 401
# test_private_node_logged_in_non_contrib_cannot_report_comment
res = app.post_json_api(
private_url, payload,
auth=non_contrib.auth, expect_errors=True
)
assert res.status_code == 403
# test_private_node_logged_in_contributor_can_report_comment
comment_new = CommentFactory(
node=private_project,
user=contributor,
target=comment.target)
url = '/{}comments/{}/reports/'.format(API_BASE, comment_new._id)
res = app.post_json_api(url, payload, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == user._id
def test_user_cannot_report_comment_condition(
self, app, user, contributor, private_url, payload):
# test_user_cannot_report_own_comment
res = app.post_json_api(
private_url, payload,
auth=contributor.auth, expect_errors=True
)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'You cannot report your own comment.'
# test_user_cannot_report_comment_twice
# User cannot report the comment again
res = app.post_json_api(
private_url, payload,
auth=user.auth, expect_errors=True
)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Comment already reported.'
def test_public_node_report_comment_auth_misc(
self, app, user, contributor,
non_contrib, public_project,
public_url, public_comment, payload
):
# def test_public_node_logged_out_user_cannot_report_comment(self):
res = app.post_json_api(public_url, payload, expect_errors=True)
assert res.status_code == 401
# def test_public_node_contributor_can_report_comment(self):
comment = CommentFactory(
node=public_project,
user=contributor,
target=public_comment.target)
url = '/{}comments/{}/reports/'.format(API_BASE, comment._id)
res = app.post_json_api(url, payload, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == user._id
# def test_public_node_non_contrib_can_report_comment(self):
""" Test that when a public project allows any osf user to
comment (comment_level == 'public), non-contributors
can also report comments.
"""
res = app.post_json_api(public_url, payload, auth=non_contrib.auth)
assert res.status_code == 201
assert res.json['data']['id'] == non_contrib._id
def test_public_node_private_comment_level_non_contrib_cannot_report_comment(
self, app, non_contrib, public_project, public_url):
public_project.comment_level = 'private'
public_project.save()
res = app.get(public_url, auth=non_contrib.auth, expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
class TestCommentReportsView(CommentReportsMixin):
# private_project_comment_reports
@pytest.fixture()
def private_project(self, user, contributor):
private_project = ProjectFactory.create(is_public=False, creator=user)
private_project.add_contributor(contributor=contributor, save=True)
return private_project
@pytest.fixture()
def comment(self, user, contributor, private_project):
comment = CommentFactory(node=private_project, user=contributor)
comment.reports = comment.reports or {}
comment.reports[user._id] = {
'category': 'spam',
'text': 'This is spam',
'date': timezone.now(),
'retracted': False,
}
comment.save()
return comment
@pytest.fixture()
def private_url(self, user, comment):
return '/{}comments/{}/reports/'.format(API_BASE, comment._id)
# public_project_comment_reports
@pytest.fixture()
def public_project(self, user, contributor):
public_project = ProjectFactory.create(
is_public=True, creator=user, comment_level='public')
public_project.add_contributor(contributor=contributor, save=True)
return public_project
@pytest.fixture()
def public_comment(self, user, contributor, public_project):
public_comment = CommentFactory(node=public_project, user=contributor)
public_comment.reports = public_comment.reports or {}
public_comment.reports[user._id] = {
'category': 'spam',
'text': 'This is spam',
'date': timezone.now(),
'retracted': False,
}
public_comment.save()
return public_comment
@pytest.fixture()
def public_url(self, user, public_comment):
return '/{}comments/{}/reports/'.format(API_BASE, public_comment._id)
class TestWikiCommentReportsView(CommentReportsMixin):
# private_project_comment_reports
@pytest.fixture()
def private_project(self, user, contributor):
private_project = ProjectFactory.create(is_public=False, creator=user)
private_project.add_contributor(contributor=contributor, save=True)
return private_project
@pytest.fixture()
def wiki(self, user, private_project):
with mock.patch('osf.models.AbstractNode.update_search'):
return WikiFactory(
user=user,
node=private_project,
)
@pytest.fixture()
def comment(self, user, contributor, private_project, wiki):
comment = CommentFactory(
node=private_project,
target=Guid.load(wiki._id),
user=contributor
)
comment.reports = comment.reports or {}
comment.reports[user._id] = {
'category': 'spam',
'text': 'This is spam',
'date': timezone.now(),
'retracted': False,
}
comment.save()
return comment
@pytest.fixture()
def private_url(self, user, comment):
return '/{}comments/{}/reports/'.format(API_BASE, comment._id)
# public_project_comment_reports
@pytest.fixture()
def public_project(self, user, contributor):
public_project = ProjectFactory.create(
is_public=True, creator=user, comment_level='public')
public_project.add_contributor(contributor=contributor, save=True)
return public_project
@pytest.fixture()
def public_wiki(self, user, public_project):
with mock.patch('osf.models.AbstractNode.update_search'):
return WikiFactory(
user=user,
node=public_project,
)
@pytest.fixture()
def public_comment(self, user, contributor, public_project, public_wiki):
public_comment = CommentFactory(
node=public_project,
target=Guid.load(public_wiki._id),
user=contributor
)
public_comment.reports = public_comment.reports or {}
public_comment.reports[user._id] = {
'category': 'spam',
'text': 'This is spam',
'date': timezone.now(),
'retracted': False,
}
public_comment.save()
return public_comment
@pytest.fixture()
def public_url(self, user, public_comment):
return '/{}comments/{}/reports/'.format(API_BASE, public_comment._id)
class TestFileCommentReportsView(CommentReportsMixin):
# private_project_comment_reports
@pytest.fixture()
def private_project(self, user, contributor):
private_project = ProjectFactory.create(is_public=False, creator=user)
private_project.add_contributor(contributor=contributor, save=True)
return private_project
@pytest.fixture()
def file(self, user, private_project):
return test_utils.create_test_file(private_project, user)
@pytest.fixture()
def comment(self, user, contributor, private_project, file):
comment = CommentFactory(
node=private_project,
target=file.get_guid(),
user=contributor)
comment.reports = comment.reports or {}
comment.reports[user._id] = {
'category': 'spam',
'text': 'This is spam',
'date': timezone.now(),
'retracted': False,
}
comment.save()
return comment
@pytest.fixture()
def private_url(self, user, comment):
return '/{}comments/{}/reports/'.format(API_BASE, comment._id)
# public_project_comment_reports
@pytest.fixture()
def public_project(self, user, contributor):
public_project = ProjectFactory.create(
is_public=True, creator=user, comment_level='public')
public_project.add_contributor(contributor=contributor, save=True)
return public_project
@pytest.fixture()
def public_file(self, user, public_project):
return test_utils.create_test_file(public_project, user)
@pytest.fixture()
def public_comment(self, user, contributor, public_project, public_file):
public_comment = CommentFactory(
node=public_project,
target=public_file.get_guid(),
user=contributor)
public_comment.reports = public_comment.reports or {}
public_comment.reports[user._id] = {
'category': 'spam',
'text': 'This is spam',
'date': timezone.now(),
'retracted': False,
}
public_comment.save()
return public_comment
@pytest.fixture()
def public_url(self, user, public_comment):
return '/{}comments/{}/reports/'.format(API_BASE, public_comment._id)
| apache-2.0 |
maxdeliso/elevatorSim | Lib/plat-freebsd4/IN.py | 374 | 7777 | # Generated by h2py from /usr/include/netinet/in.h
IPPROTO_IP = 0
IPPROTO_HOPOPTS = 0
IPPROTO_ICMP = 1
IPPROTO_IGMP = 2
IPPROTO_GGP = 3
IPPROTO_IPV4 = 4
IPPROTO_IPIP = IPPROTO_IPV4
IPPROTO_TCP = 6
IPPROTO_ST = 7
IPPROTO_EGP = 8
IPPROTO_PIGP = 9
IPPROTO_RCCMON = 10
IPPROTO_NVPII = 11
IPPROTO_PUP = 12
IPPROTO_ARGUS = 13
IPPROTO_EMCON = 14
IPPROTO_XNET = 15
IPPROTO_CHAOS = 16
IPPROTO_UDP = 17
IPPROTO_MUX = 18
IPPROTO_MEAS = 19
IPPROTO_HMP = 20
IPPROTO_PRM = 21
IPPROTO_IDP = 22
IPPROTO_TRUNK1 = 23
IPPROTO_TRUNK2 = 24
IPPROTO_LEAF1 = 25
IPPROTO_LEAF2 = 26
IPPROTO_RDP = 27
IPPROTO_IRTP = 28
IPPROTO_TP = 29
IPPROTO_BLT = 30
IPPROTO_NSP = 31
IPPROTO_INP = 32
IPPROTO_SEP = 33
IPPROTO_3PC = 34
IPPROTO_IDPR = 35
IPPROTO_XTP = 36
IPPROTO_DDP = 37
IPPROTO_CMTP = 38
IPPROTO_TPXX = 39
IPPROTO_IL = 40
IPPROTO_IPV6 = 41
IPPROTO_SDRP = 42
IPPROTO_ROUTING = 43
IPPROTO_FRAGMENT = 44
IPPROTO_IDRP = 45
IPPROTO_RSVP = 46
IPPROTO_GRE = 47
IPPROTO_MHRP = 48
IPPROTO_BHA = 49
IPPROTO_ESP = 50
IPPROTO_AH = 51
IPPROTO_INLSP = 52
IPPROTO_SWIPE = 53
IPPROTO_NHRP = 54
IPPROTO_ICMPV6 = 58
IPPROTO_NONE = 59
IPPROTO_DSTOPTS = 60
IPPROTO_AHIP = 61
IPPROTO_CFTP = 62
IPPROTO_HELLO = 63
IPPROTO_SATEXPAK = 64
IPPROTO_KRYPTOLAN = 65
IPPROTO_RVD = 66
IPPROTO_IPPC = 67
IPPROTO_ADFS = 68
IPPROTO_SATMON = 69
IPPROTO_VISA = 70
IPPROTO_IPCV = 71
IPPROTO_CPNX = 72
IPPROTO_CPHB = 73
IPPROTO_WSN = 74
IPPROTO_PVP = 75
IPPROTO_BRSATMON = 76
IPPROTO_ND = 77
IPPROTO_WBMON = 78
IPPROTO_WBEXPAK = 79
IPPROTO_EON = 80
IPPROTO_VMTP = 81
IPPROTO_SVMTP = 82
IPPROTO_VINES = 83
IPPROTO_TTP = 84
IPPROTO_IGP = 85
IPPROTO_DGP = 86
IPPROTO_TCF = 87
IPPROTO_IGRP = 88
IPPROTO_OSPFIGP = 89
IPPROTO_SRPC = 90
IPPROTO_LARP = 91
IPPROTO_MTP = 92
IPPROTO_AX25 = 93
IPPROTO_IPEIP = 94
IPPROTO_MICP = 95
IPPROTO_SCCSP = 96
IPPROTO_ETHERIP = 97
IPPROTO_ENCAP = 98
IPPROTO_APES = 99
IPPROTO_GMTP = 100
IPPROTO_IPCOMP = 108
IPPROTO_PIM = 103
IPPROTO_PGM = 113
IPPROTO_DIVERT = 254
IPPROTO_RAW = 255
IPPROTO_MAX = 256
IPPROTO_DONE = 257
IPPORT_RESERVED = 1024
IPPORT_USERRESERVED = 5000
IPPORT_HIFIRSTAUTO = 49152
IPPORT_HILASTAUTO = 65535
IPPORT_RESERVEDSTART = 600
def IN_CLASSA(i): return (((u_int32_t)(i) & 0x80000000) == 0)
IN_CLASSA_NET = 0xff000000
IN_CLASSA_NSHIFT = 24
IN_CLASSA_HOST = 0x00ffffff
IN_CLASSA_MAX = 128
def IN_CLASSB(i): return (((u_int32_t)(i) & 0xc0000000) == 0x80000000)
IN_CLASSB_NET = 0xffff0000
IN_CLASSB_NSHIFT = 16
IN_CLASSB_HOST = 0x0000ffff
IN_CLASSB_MAX = 65536
def IN_CLASSC(i): return (((u_int32_t)(i) & 0xe0000000) == 0xc0000000)
IN_CLASSC_NET = 0xffffff00
IN_CLASSC_NSHIFT = 8
IN_CLASSC_HOST = 0x000000ff
def IN_CLASSD(i): return (((u_int32_t)(i) & 0xf0000000) == 0xe0000000)
IN_CLASSD_NET = 0xf0000000
IN_CLASSD_NSHIFT = 28
IN_CLASSD_HOST = 0x0fffffff
def IN_MULTICAST(i): return IN_CLASSD(i)
def IN_EXPERIMENTAL(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
def IN_BADCLASS(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
INADDR_NONE = 0xffffffff
IN_LOOPBACKNET = 127
INET_ADDRSTRLEN = 16
IP_OPTIONS = 1
IP_HDRINCL = 2
IP_TOS = 3
IP_TTL = 4
IP_RECVOPTS = 5
IP_RECVRETOPTS = 6
IP_RECVDSTADDR = 7
IP_RETOPTS = 8
IP_MULTICAST_IF = 9
IP_MULTICAST_TTL = 10
IP_MULTICAST_LOOP = 11
IP_ADD_MEMBERSHIP = 12
IP_DROP_MEMBERSHIP = 13
IP_MULTICAST_VIF = 14
IP_RSVP_ON = 15
IP_RSVP_OFF = 16
IP_RSVP_VIF_ON = 17
IP_RSVP_VIF_OFF = 18
IP_PORTRANGE = 19
IP_RECVIF = 20
IP_IPSEC_POLICY = 21
IP_FAITH = 22
IP_FW_ADD = 50
IP_FW_DEL = 51
IP_FW_FLUSH = 52
IP_FW_ZERO = 53
IP_FW_GET = 54
IP_FW_RESETLOG = 55
IP_DUMMYNET_CONFIGURE = 60
IP_DUMMYNET_DEL = 61
IP_DUMMYNET_FLUSH = 62
IP_DUMMYNET_GET = 64
IP_DEFAULT_MULTICAST_TTL = 1
IP_DEFAULT_MULTICAST_LOOP = 1
IP_MAX_MEMBERSHIPS = 20
IP_PORTRANGE_DEFAULT = 0
IP_PORTRANGE_HIGH = 1
IP_PORTRANGE_LOW = 2
IPPROTO_MAXID = (IPPROTO_AH + 1)
IPCTL_FORWARDING = 1
IPCTL_SENDREDIRECTS = 2
IPCTL_DEFTTL = 3
IPCTL_DEFMTU = 4
IPCTL_RTEXPIRE = 5
IPCTL_RTMINEXPIRE = 6
IPCTL_RTMAXCACHE = 7
IPCTL_SOURCEROUTE = 8
IPCTL_DIRECTEDBROADCAST = 9
IPCTL_INTRQMAXLEN = 10
IPCTL_INTRQDROPS = 11
IPCTL_STATS = 12
IPCTL_ACCEPTSOURCEROUTE = 13
IPCTL_FASTFORWARDING = 14
IPCTL_KEEPFAITH = 15
IPCTL_GIF_TTL = 16
IPCTL_MAXID = 17
# Included from netinet6/in6.h
# Included from sys/queue.h
def SLIST_HEAD_INITIALIZER(head): return \
def SLIST_ENTRY(type): return \
def STAILQ_HEAD_INITIALIZER(head): return \
def STAILQ_ENTRY(type): return \
def LIST_HEAD_INITIALIZER(head): return \
def LIST_ENTRY(type): return \
def TAILQ_HEAD_INITIALIZER(head): return \
def TAILQ_ENTRY(type): return \
def CIRCLEQ_ENTRY(type): return \
__KAME_VERSION = "20000701/FreeBSD-current"
IPV6PORT_RESERVED = 1024
IPV6PORT_ANONMIN = 49152
IPV6PORT_ANONMAX = 65535
IPV6PORT_RESERVEDMIN = 600
IPV6PORT_RESERVEDMAX = (IPV6PORT_RESERVED-1)
INET6_ADDRSTRLEN = 46
IPV6_ADDR_INT32_ONE = 1
IPV6_ADDR_INT32_TWO = 2
IPV6_ADDR_INT32_MNL = 0xff010000
IPV6_ADDR_INT32_MLL = 0xff020000
IPV6_ADDR_INT32_SMP = 0x0000ffff
IPV6_ADDR_INT16_ULL = 0xfe80
IPV6_ADDR_INT16_USL = 0xfec0
IPV6_ADDR_INT16_MLL = 0xff02
IPV6_ADDR_INT32_ONE = 0x01000000
IPV6_ADDR_INT32_TWO = 0x02000000
IPV6_ADDR_INT32_MNL = 0x000001ff
IPV6_ADDR_INT32_MLL = 0x000002ff
IPV6_ADDR_INT32_SMP = 0xffff0000
IPV6_ADDR_INT16_ULL = 0x80fe
IPV6_ADDR_INT16_USL = 0xc0fe
IPV6_ADDR_INT16_MLL = 0x02ff
def IN6_IS_ADDR_UNSPECIFIED(a): return \
def IN6_IS_ADDR_LOOPBACK(a): return \
def IN6_IS_ADDR_V4COMPAT(a): return \
def IN6_IS_ADDR_V4MAPPED(a): return \
IPV6_ADDR_SCOPE_NODELOCAL = 0x01
IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
IPV6_ADDR_SCOPE_SITELOCAL = 0x05
IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
IPV6_ADDR_SCOPE_GLOBAL = 0x0e
__IPV6_ADDR_SCOPE_NODELOCAL = 0x01
__IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
__IPV6_ADDR_SCOPE_SITELOCAL = 0x05
__IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
__IPV6_ADDR_SCOPE_GLOBAL = 0x0e
def IN6_IS_ADDR_LINKLOCAL(a): return \
def IN6_IS_ADDR_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_NODELOCAL(a): return \
def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
def IN6_IS_ADDR_MC_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
def IN6_IS_ADDR_MC_GLOBAL(a): return \
def IN6_IS_ADDR_MC_NODELOCAL(a): return \
def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
def IN6_IS_ADDR_MC_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
def IN6_IS_ADDR_MC_GLOBAL(a): return \
def IN6_IS_SCOPE_LINKLOCAL(a): return \
IPV6_OPTIONS = 1
IPV6_RECVOPTS = 5
IPV6_RECVRETOPTS = 6
IPV6_RECVDSTADDR = 7
IPV6_RETOPTS = 8
IPV6_SOCKOPT_RESERVED1 = 3
IPV6_UNICAST_HOPS = 4
IPV6_MULTICAST_IF = 9
IPV6_MULTICAST_HOPS = 10
IPV6_MULTICAST_LOOP = 11
IPV6_JOIN_GROUP = 12
IPV6_LEAVE_GROUP = 13
IPV6_PORTRANGE = 14
ICMP6_FILTER = 18
IPV6_PKTINFO = 19
IPV6_HOPLIMIT = 20
IPV6_NEXTHOP = 21
IPV6_HOPOPTS = 22
IPV6_DSTOPTS = 23
IPV6_RTHDR = 24
IPV6_PKTOPTIONS = 25
IPV6_CHECKSUM = 26
IPV6_BINDV6ONLY = 27
IPV6_IPSEC_POLICY = 28
IPV6_FAITH = 29
IPV6_FW_ADD = 30
IPV6_FW_DEL = 31
IPV6_FW_FLUSH = 32
IPV6_FW_ZERO = 33
IPV6_FW_GET = 34
IPV6_RTHDR_LOOSE = 0
IPV6_RTHDR_STRICT = 1
IPV6_RTHDR_TYPE_0 = 0
IPV6_DEFAULT_MULTICAST_HOPS = 1
IPV6_DEFAULT_MULTICAST_LOOP = 1
IPV6_PORTRANGE_DEFAULT = 0
IPV6_PORTRANGE_HIGH = 1
IPV6_PORTRANGE_LOW = 2
IPV6PROTO_MAXID = (IPPROTO_PIM + 1)
IPV6CTL_FORWARDING = 1
IPV6CTL_SENDREDIRECTS = 2
IPV6CTL_DEFHLIM = 3
IPV6CTL_DEFMTU = 4
IPV6CTL_FORWSRCRT = 5
IPV6CTL_STATS = 6
IPV6CTL_MRTSTATS = 7
IPV6CTL_MRTPROTO = 8
IPV6CTL_MAXFRAGPACKETS = 9
IPV6CTL_SOURCECHECK = 10
IPV6CTL_SOURCECHECK_LOGINT = 11
IPV6CTL_ACCEPT_RTADV = 12
IPV6CTL_KEEPFAITH = 13
IPV6CTL_LOG_INTERVAL = 14
IPV6CTL_HDRNESTLIMIT = 15
IPV6CTL_DAD_COUNT = 16
IPV6CTL_AUTO_FLOWLABEL = 17
IPV6CTL_DEFMCASTHLIM = 18
IPV6CTL_GIF_HLIM = 19
IPV6CTL_KAME_VERSION = 20
IPV6CTL_USE_DEPRECATED = 21
IPV6CTL_RR_PRUNE = 22
IPV6CTL_MAPPED_ADDR = 23
IPV6CTL_BINDV6ONLY = 24
IPV6CTL_RTEXPIRE = 25
IPV6CTL_RTMINEXPIRE = 26
IPV6CTL_RTMAXCACHE = 27
IPV6CTL_MAXID = 28
| bsd-2-clause |
yury-s/v8-inspector | Source/chrome/tools/json_schema_compiler/util_cc_helper.py | 31 | 1498 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_API_UTIL_NAMESPACE = 'json_schema_compiler::util'
class UtilCCHelper(object):
"""A util class that generates code that uses
tools/json_schema_compiler/util.cc.
"""
def __init__(self, type_manager):
self._type_manager = type_manager
def PopulateArrayFromListFunction(self, optional):
"""Returns the function to turn a list into a vector.
"""
populate_list_fn = ('PopulateOptionalArrayFromList' if optional
else 'PopulateArrayFromList')
return ('%s::%s') % (_API_UTIL_NAMESPACE, populate_list_fn)
def CreateValueFromArray(self, src, optional):
"""Generates code to create a scoped_pt<Value> from the array at src.
|src| The variable to convert, either a vector or scoped_ptr<vector>.
|optional| Whether |type_| was optional. Optional types are pointers so
must be treated differently.
"""
if optional:
name = 'CreateValueFromOptionalArray'
else:
name = 'CreateValueFromArray'
return '%s::%s(%s)' % (_API_UTIL_NAMESPACE, name, src)
def GetIncludePath(self):
return '#include "tools/json_schema_compiler/util.h"'
def GetValueTypeString(self, value, is_ptr=False):
call = '.GetType()'
if is_ptr:
call = '->GetType()'
return 'json_schema_compiler::util::ValueTypeToString(%s%s)' % (value, call)
| bsd-3-clause |
rafaeltomesouza/frontend-class1 | aula2/a11/linkedin/client/.gradle/nodejs/node-v7.5.0-darwin-x64/lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py | 1446 | 65937 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the MSVSSettings.py file."""
import StringIO
import unittest
import gyp.MSVSSettings as MSVSSettings
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def _ExpectedWarnings(self, expected):
"""Compares recorded lines to expected warnings."""
self.stderr.seek(0)
actual = self.stderr.read().split('\n')
actual = [line for line in actual if line]
self.assertEqual(sorted(expected), sorted(actual))
def testValidateMSVSSettings_tool_names(self):
"""Tests that only MSVS tool names are allowed."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {},
'VCLinkerTool': {},
'VCMIDLTool': {},
'foo': {},
'VCResourceCompilerTool': {},
'VCLibrarianTool': {},
'VCManifestTool': {},
'ClCompile': {}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized tool foo',
'Warning: unrecognized tool ClCompile'])
def testValidateMSVSSettings_settings(self):
"""Tests that for invalid MSVS settings."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '5',
'BrowseInformation': 'fdkslj',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '-1',
'CompileAs': '1',
'DebugInformationFormat': '2',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': '1',
'ExceptionHandling': '1',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '1',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '1',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'string1;string2',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '1',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '1',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalDependencies_excluded': 'file3',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '2',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'CLRImageType': '2',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '2',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': '2',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'ErrorReporting': '2',
'FixedBaseAddress': '2',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '2',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '2',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '2',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '2',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '2',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'true',
'Version': 'a string1'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'CPreprocessOptions': 'a string1',
'DefaultCharType': '1',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '1',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'notgood': 'bogus',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'VCResourceCompilerTool': {
'AdditionalOptions': 'a string1',
'AdditionalIncludeDirectories': 'folder1;folder2',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'notgood2': 'bogus',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a string1',
'ManifestResourceFile': 'a_file_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'truel',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}},
self.stderr)
self._ExpectedWarnings([
'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
'index value (5) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/BrowseInformation, '
"invalid literal for int() with base 10: 'fdkslj'",
'Warning: for VCCLCompilerTool/CallingConvention, '
'index value (-1) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
'Warning: for VCLinkerTool/TargetMachine, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCMIDLTool/notgood',
'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
'Warning: for VCManifestTool/UpdateFileHashes, '
"expected bool; got 'truel'"
''])
def testValidateMSBuildSettings_settings(self):
"""Tests that for invalid MSBuild settings."""
MSVSSettings.ValidateMSBuildSettings(
{'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'false',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'CompileAsManaged': 'true',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'SyncCThrow',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Precise',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'FunctionLevelLinking': 'false',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'false',
'MinimalRebuild': 'true',
'MultiProcessorCompilation': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Disabled',
'PrecompiledHeader': 'NotUsing',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'string1;string2',
'PreprocessOutputPath': 'a string1',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'false',
'ProcessorNumber': '33',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TreatSpecificWarningsAsErrors': 'string1;string2',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UseUnicodeForAssemblerListing': 'true',
'WarningLevel': 'TurnOffAllWarnings',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'Link': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'BuildingInIDE': 'true',
'CLRImageType': 'ForceIJWImage',
'CLRSupportLastError': 'Enabled',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'CreateHotPatchableImage': 'X86Image',
'DataExecutionPrevention': 'false',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': 'NotSet',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'FixedBaseAddress': 'false',
'ForceFileOutput': 'Enabled',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'a_file_list',
'ImageHasSafeExceptionHandlers': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'false',
'LinkDLL': 'true',
'LinkErrorReporting': 'SendErrorReport',
'LinkStatus': 'true',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'MSDOSStubFileName': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': 'false',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'PreventDllBinding': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SectionAlignment': '33',
'SetChecksum': 'true',
'ShowProgress': 'LinkVerboseREF',
'SpecifySectionAttributes': 'a string1',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Console',
'SupportNobindOfDelayLoadedDLL': 'true',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TrackerLogDirectory': 'a_folder',
'TreatLinkerWarningAsErrors': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'AsInvoker',
'UACUIAccess': 'true',
'Version': 'a string1'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'Culture': '0x236',
'IgnoreStandardIncludePath': 'true',
'NullTerminateStrings': 'true',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ApplicationConfigurationMode': 'true',
'ClientStubFile': 'a_file_name',
'CPreprocessOptions': 'a string1',
'DefaultCharType': 'Signed',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'EnableCustom',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateClientFiles': 'Stub',
'GenerateServerFiles': 'None',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'LocaleID': '33',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'ServerStubFile': 'a_file_name',
'StructMemberAlignment': 'NotSet',
'SuppressCompilerWarnings': 'true',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Itanium',
'TrackerLogDirectory': 'a_folder',
'TypeLibFormat': 'NewFormat',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'Lib': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'DisplayLibrary': 'a string1',
'ErrorReporting': 'PromptImmediately',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkTimeCodeGeneration': 'true',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'Name': 'a_file_name',
'OutputFile': 'a_file_name',
'RemoveObjects': 'file1;file2',
'SubSystem': 'Console',
'SuppressStartupBanner': 'true',
'TargetMachine': 'MachineX86i',
'TrackerLogDirectory': 'a_folder',
'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'EnableDPIAwareness': 'fal',
'GenerateCatalogFiles': 'truel',
'GenerateCategoryTags': 'true',
'InputResourceManifests': 'a string1',
'ManifestFromManagedAssembly': 'a_file_name',
'notgood3': 'bogus',
'OutputManifestFile': 'a_file_name',
'OutputResourceManifests': 'a string1',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressDependencyElement': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'a_file_name'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Manifest/notgood3',
'Warning: for Manifest/GenerateCatalogFiles, '
"expected bool; got 'truel'",
'Warning: for Lib/TargetMachine, unrecognized enumerated value '
'MachineX86i',
"Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
def testConvertToMSBuildSettings_empty(self):
"""Tests an empty conversion."""
msvs_settings = {}
expected_msbuild_settings = {}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_minimal(self):
"""Tests a minimal conversion."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': '0',
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': '1',
'ErrorReporting': '1',
'DataExecutionPrevention': '2',
},
}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': 'Default',
},
'Link': {
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'LinkErrorReporting': 'PromptImmediately',
'DataExecutionPrevention': 'true',
},
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_warnings(self):
"""Tests conversion that generates warnings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2',
# These are incorrect values:
'BasicRuntimeChecks': '12',
'BrowseInformation': '21',
'UsePrecompiledHeader': '13',
'GeneratePreprocessedFile': '14'},
'VCLinkerTool': {
# These are incorrect values:
'Driver': '10',
'LinkTimeCodeGeneration': '31',
'ErrorReporting': '21',
'FixedBaseAddress': '6'},
'VCResourceCompilerTool': {
# Custom
'Culture': '1003'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2'},
'Link': {},
'ResourceCompile': {
# Custom
'Culture': '0x03eb'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([
'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
'MSBuild, index value (12) not in expected range [0, 4)',
'Warning: while converting VCCLCompilerTool/BrowseInformation to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
'MSBuild, index value (13) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
'MSBuild, value must be one of [0, 1, 2]; got 14',
'Warning: while converting VCLinkerTool/Driver to '
'MSBuild, index value (10) not in expected range [0, 4)',
'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
'MSBuild, index value (31) not in expected range [0, 5)',
'Warning: while converting VCLinkerTool/ErrorReporting to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCLinkerTool/FixedBaseAddress to '
'MSBuild, index value (6) not in expected range [0, 3)',
])
def testConvertToMSBuildSettings_full_synthetic(self):
"""Tests conversion of all the MSBuild settings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '1',
'BrowseInformation': '2',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '0',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': '0',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '1',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '0',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '2',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '0',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '0',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': '1',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': '1',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '0',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'ErrorReporting': '0',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2;file3',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '1',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '0',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '0',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '3',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '1',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'false',
'UseUnicodeResponseFiles': 'true',
'Version': 'a_string'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': '0',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '2',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'EmbedManifest': 'true',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'ManifestResourceFile': 'my_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string /J',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'true',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': 'NotSet',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'AnySuitable',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'Create',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'WarningLevel': 'Level2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'Link': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': 'ForceIJWImage',
'CLRThreadAttribute': 'STAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': 'Driver',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'NoErrorReport',
'LinkTimeCodeGeneration': 'PGInstrument',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': '',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'true',
'ShowProgress': 'NotSet',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Windows',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineARM',
'TerminalServerAware': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'HighestAvailable',
'UACUIAccess': 'true',
'Version': 'a_string'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '0x03eb',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': 'Unsigned',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'All',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '4',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Win32',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'Lib': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'my_name'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'false'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_actual(self):
"""Tests the conversion of an actual project.
A VS2008 project with most of the options defined was created through the
VS2008 IDE. It was then converted to VS2010. The tool settings found in
the .vcproj and .vcxproj files were converted to the two dictionaries
msvs_settings and expected_msbuild_settings.
Note that for many settings, the VS2010 converter adds macros like
%(AdditionalIncludeDirectories) to make sure than inherited values are
included. Since the Gyp projects we generate do not use inheritance,
we removed these macros. They were:
ClCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
AdditionalOptions: ' %(AdditionalOptions)'
AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
ForcedUsingFiles: ';%(ForcedUsingFiles)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
UndefinePreprocessorDefinitions:
';%(UndefinePreprocessorDefinitions)',
Link:
AdditionalDependencies: ';%(AdditionalDependencies)',
AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
AdditionalManifestDependencies:
';%(AdditionalManifestDependencies)',
AdditionalOptions: ' %(AdditionalOptions)',
AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
AssemblyLinkResource: ';%(AssemblyLinkResource)',
DelayLoadDLLs: ';%(DelayLoadDLLs)',
EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
ForceSymbolReferences: ';%(ForceSymbolReferences)',
IgnoreSpecificDefaultLibraries:
';%(IgnoreSpecificDefaultLibraries)',
ResourceCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Manifest:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)',
"""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)\\a',
'AssemblerOutput': '1',
'BasicRuntimeChecks': '3',
'BrowseInformation': '1',
'BrowseInformationFile': '$(IntDir)\\e',
'BufferSecurityCheck': 'false',
'CallingConvention': '1',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '2',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '2',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'GeneratePreprocessedFile': '2',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': '$(IntDir)\\b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
'PrecompiledHeaderThrough': 'StdAfx.hd',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
'RuntimeLibrary': '3',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'false',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '0',
'UseUnicodeResponseFiles': 'false',
'WarnAsError': 'true',
'WarningLevel': '3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)\\c'},
'VCLinkerTool': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': '1',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': '3',
'CLRThreadAttribute': '1',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': '1',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'ErrorReporting': '2',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'false',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'flob;flok',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': '2',
'LinkIncremental': '0',
'LinkLibraryDependencies': 'false',
'LinkTimeCodeGeneration': '1',
'ManifestFile':
'$(IntDir)\\$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'OptimizeForWindows98': '2',
'OptimizeReferences': '2',
'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'false',
'ShowProgress': '1',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': '1',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '1',
'TerminalServerAware': '1',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'false',
'Version': '333'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '3084',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
'ShowProgress': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
'EmbedManifest': 'false',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'ManifestResourceFile':
'$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'false',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more /J',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)a',
'AssemblerOutput': 'AssemblyCode',
'BasicRuntimeChecks': 'EnableFastChecks',
'BrowseInformation': 'true',
'BrowseInformationFile': '$(IntDir)e',
'BufferSecurityCheck': 'false',
'CallingConvention': 'FastCall',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Queue',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Size',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': '$(IntDir)b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
'PrecompiledHeaderFile': 'StdAfx.hd',
'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'PreprocessSuppressLineNumbers': 'true',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
'RuntimeLibrary': 'MultiThreadedDebugDLL',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '4Bytes',
'SuppressStartupBanner': 'false',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'WarningLevel': 'Level3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)c'},
'Link': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': 'true',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': 'ForceSafeILImage',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': 'UpOnly',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'flob;flok',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'QueueForNextLogin',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'NoEntryPoint': 'true',
'OptimizeReferences': 'true',
'OutputFile': '$(OutDir)$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'false',
'ShowProgress': 'LinkVerbose',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': 'Console',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': 'RequireAdministrator',
'UACUIAccess': 'true',
'Version': '333'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '0x0c0c',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'false',
'UseLibraryDependencyInputs': 'true'},
'': {
'EmbedManifest': 'false',
'GenerateManifest': 'false',
'IgnoreImportLibrary': 'true',
'LinkIncremental': ''
},
'ManifestResourceCompile': {
'ResourceOutputFileName':
'$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
if __name__ == '__main__':
unittest.main()
| mit |
hajgato/easybuild-easyblocks | easybuild/easyblocks/l/libsmm.py | 3 | 8295 | ##
# Copyright 2009-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing the libsmm library, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import os
import shutil
from distutils.version import LooseVersion
import easybuild.tools.toolchain as toolchain
from easybuild.framework.easyblock import EasyBlock
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.filetools import run_cmd
from easybuild.tools.modules import get_software_root, get_software_version
class EB_libsmm(EasyBlock):
"""
Support for the CP2K small matrix library
Notes: - build can take really really long, and no real rebuilding needed for each get_version
- CP2K can be built without this
"""
@staticmethod
def extra_options():
# default dimensions
dd = [1,4,5,6,9,13,16,17,22]
extra_vars = {
'transpose_flavour': [1, "Transpose flavour of routines", CUSTOM],
'max_tiny_dim': [12, "Maximum tiny dimension", CUSTOM],
'dims': [dd, "Generate routines for these matrix dims", CUSTOM],
}
return EasyBlock.extra_options(extra_vars)
def configure_step(self):
"""Configure build: change to tools/build_libsmm dir"""
try:
dst = 'tools/build_libsmm'
os.chdir(dst)
self.log.debug('Change to directory %s' % dst)
except OSError, err:
self.log.exception('Failed to change to directory %s: %s' % (dst, err))
def build_step(self):
"""Build libsmm
Possible iterations over precision (single/double) and type (real/complex)
- also type of transpose matrix
- all set in the config file
Make the config.in file (is source afterwards in the build)
"""
fn = 'config.in'
cfg_tpl = """# This config file was generated by EasyBuild
# the build script can generate optimized routines packed in a library for
# 1) 'nn' => C=C+MATMUL(A,B)
# 2) 'tn' => C=C+MATMUL(TRANSPOSE(A),B)
# 3) 'nt' => C=C+MATMUL(A,TRANSPOSE(B))
# 4) 'tt' => C=C+MATMUL(TRANPOSE(A),TRANPOSE(B))
#
# select a tranpose_flavor from the list 1 2 3 4
#
transpose_flavor=%(transposeflavour)s
# 1) d => double precision real
# 2) s => single precision real
# 3) z => double precision complex
# 4) c => single precision complex
#
# select a data_type from the list 1 2 3 4
#
data_type=%(datatype)s
# target compiler... this are the options used for building the library.
# They should be aggessive enough to e.g. perform vectorization for the specific CPU (e.g. -ftree-vectorize -march=native),
# and allow some flexibility in reordering floating point expressions (-ffast-math).
# Higher level optimisation (in particular loop nest optimization) should not be used.
#
target_compile="%(targetcompile)s"
# target dgemm link options... these are the options needed to link blas (e.g. -lblas)
# blas is used as a fall back option for sizes not included in the library or in those cases where it is faster
# the same blas library should thus also be used when libsmm is linked.
#
OMP_NUM_THREADS=1
blas_linking="%(LIBBLAS)s"
# matrix dimensions for which optimized routines will be generated.
# since all combinations of M,N,K are being generated the size of the library becomes very large
# if too many sizes are being optimized for. Numbers have to be ascending.
#
dims_small="%(dims)s"
# tiny dimensions are used as primitves and generated in an 'exhaustive' search.
# They should be a sequence from 1 to N,
# where N is a number that is large enough to have good cache performance
# (e.g. for modern SSE cpus 8 to 12)
# Too large (>12?) is not beneficial, but increases the time needed to build the library
# Too small (<8) will lead to a slow library, but the build might proceed quickly
# The minimum number for a successful build is 4
#
dims_tiny="%(tiny_dims)s"
# host compiler... this is used only to compile a few tools needed to build the library.
# The library itself is not compiled this way.
# This compiler needs to be able to deal with some Fortran2003 constructs.
#
host_compile="%(hostcompile)s "
# number of processes to use in parallel for compiling / building and benchmarking the library.
# Should *not* be more than the physical (available) number of cores of the machine
#
tasks=%(tasks)s
"""
# only GCC is supported for now
if self.toolchain.comp_family() == toolchain.GCC: #@UndefinedVariable
hostcompile = os.getenv('F90')
# optimizations
opts = "-O2 -funroll-loops -ffast-math -ftree-vectorize -march=native -fno-inline-functions"
# Depending on the get_version, we need extra options
extra = ''
gccVersion = LooseVersion(get_software_version('GCC'))
if gccVersion >= LooseVersion('4.6'):
extra = "-flto"
targetcompile = "%s %s %s" % (hostcompile, opts, extra)
else:
self.log.error('No supported compiler found (tried GCC)')
if not os.getenv('LIBBLAS'):
self.log.error('No BLAS library specifications found (LIBBLAS not set)!')
cfgdict = {
'datatype': None,
'transposeflavour': self.cfg['transpose_flavour'],
'targetcompile': targetcompile,
'hostcompile': hostcompile,
'dims': ' '.join([str(d) for d in self.cfg['dims']]),
'tiny_dims': ' '.join([str(d) for d in range(1, self.cfg['max_tiny_dim']+1)]),
'tasks': self.cfg['parallel'],
'LIBBLAS': "%s %s" % (os.getenv('LDFLAGS'), os.getenv('LIBBLAS'))
}
# configure for various iterations
datatypes = [(1, 'double precision real'), (3, 'double precision complex')]
for (dt, descr) in datatypes:
cfgdict['datatype'] = dt
try:
txt = cfg_tpl % cfgdict
f = open(fn, 'w')
f.write(txt)
f.close()
self.log.debug("config file %s for datatype %s ('%s'): %s" % (fn, dt, descr, txt))
except IOError, err:
self.log.error("Failed to write %s: %s" % (fn, err))
self.log.info("Building for datatype %s ('%s')..." % (dt, descr))
run_cmd("./do_clean")
run_cmd("./do_all")
def install_step(self):
"""Install CP2K: clean, and copy lib directory to install dir"""
run_cmd("./do_clean")
try:
shutil.copytree('lib', os.path.join(self.installdir, 'lib'))
except Exception, err:
self.log.error("Something went wrong during dir lib copying to installdir: %s" % err)
def sanity_check_step(self):
"""Custom sanity check for libsmm"""
custom_paths = {
'files': ["lib/libsmm_%s.a" % x for x in ["dnn", "znn"]],
'dirs': []
}
super(EB_libsmm, self).sanity_check_step(custom_paths=custom_paths)
| gpl-2.0 |
NL66278/OCB | addons/google_account/controllers/main.py | 350 | 1270 | import simplejson
import urllib
import openerp
from openerp import http
from openerp.http import request
import openerp.addons.web.controllers.main as webmain
from openerp.addons.web.http import SessionExpiredException
from werkzeug.exceptions import BadRequest
import werkzeug.utils
class google_auth(http.Controller):
@http.route('/google_account/authentication', type='http', auth="none")
def oauth2callback(self, **kw):
""" This route/function is called by Google when user Accept/Refuse the consent of Google """
state = simplejson.loads(kw['state'])
dbname = state.get('d')
service = state.get('s')
url_return = state.get('f')
registry = openerp.modules.registry.RegistryManager.get(dbname)
with registry.cursor() as cr:
if kw.get('code',False):
registry.get('google.%s' % service).set_all_tokens(cr,request.session.uid,kw['code'])
return werkzeug.utils.redirect(url_return)
elif kw.get('error'):
return werkzeug.utils.redirect("%s%s%s" % (url_return ,"?error=" , kw.get('error')))
else:
return werkzeug.utils.redirect("%s%s" % (url_return ,"?error=Unknown_error"))
| agpl-3.0 |
chaiken/irq-tracing | tools/perf/scripts/python/call-graph-from-postgresql.py | 758 | 11125 | #!/usr/bin/python2
# call-graph-from-postgresql.py: create call-graph from postgresql database
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
# To use this script you will need to have exported data using the
# export-to-postgresql.py script. Refer to that script for details.
#
# Following on from the example in the export-to-postgresql.py script, a
# call-graph can be displayed for the pt_example database like this:
#
# python tools/perf/scripts/python/call-graph-from-postgresql.py pt_example
#
# Note this script supports connecting to remote databases by setting hostname,
# port, username, password, and dbname e.g.
#
# python tools/perf/scripts/python/call-graph-from-postgresql.py "hostname=myhost username=myuser password=mypassword dbname=pt_example"
#
# The result is a GUI window with a tree representing a context-sensitive
# call-graph. Expanding a couple of levels of the tree and adjusting column
# widths to suit will display something like:
#
# Call Graph: pt_example
# Call Path Object Count Time(ns) Time(%) Branch Count Branch Count(%)
# v- ls
# v- 2638:2638
# v- _start ld-2.19.so 1 10074071 100.0 211135 100.0
# |- unknown unknown 1 13198 0.1 1 0.0
# >- _dl_start ld-2.19.so 1 1400980 13.9 19637 9.3
# >- _d_linit_internal ld-2.19.so 1 448152 4.4 11094 5.3
# v-__libc_start_main@plt ls 1 8211741 81.5 180397 85.4
# >- _dl_fixup ld-2.19.so 1 7607 0.1 108 0.1
# >- __cxa_atexit libc-2.19.so 1 11737 0.1 10 0.0
# >- __libc_csu_init ls 1 10354 0.1 10 0.0
# |- _setjmp libc-2.19.so 1 0 0.0 4 0.0
# v- main ls 1 8182043 99.6 180254 99.9
#
# Points to note:
# The top level is a command name (comm)
# The next level is a thread (pid:tid)
# Subsequent levels are functions
# 'Count' is the number of calls
# 'Time' is the elapsed time until the function returns
# Percentages are relative to the level above
# 'Branch Count' is the total number of branches for that function and all
# functions that it calls
import sys
from PySide.QtCore import *
from PySide.QtGui import *
from PySide.QtSql import *
from decimal import *
class TreeItem():
def __init__(self, db, row, parent_item):
self.db = db
self.row = row
self.parent_item = parent_item
self.query_done = False;
self.child_count = 0
self.child_items = []
self.data = ["", "", "", "", "", "", ""]
self.comm_id = 0
self.thread_id = 0
self.call_path_id = 1
self.branch_count = 0
self.time = 0
if not parent_item:
self.setUpRoot()
def setUpRoot(self):
self.query_done = True
query = QSqlQuery(self.db)
ret = query.exec_('SELECT id, comm FROM comms')
if not ret:
raise Exception("Query failed: " + query.lastError().text())
while query.next():
if not query.value(0):
continue
child_item = TreeItem(self.db, self.child_count, self)
self.child_items.append(child_item)
self.child_count += 1
child_item.setUpLevel1(query.value(0), query.value(1))
def setUpLevel1(self, comm_id, comm):
self.query_done = True;
self.comm_id = comm_id
self.data[0] = comm
self.child_items = []
self.child_count = 0
query = QSqlQuery(self.db)
ret = query.exec_('SELECT thread_id, ( SELECT pid FROM threads WHERE id = thread_id ), ( SELECT tid FROM threads WHERE id = thread_id ) FROM comm_threads WHERE comm_id = ' + str(comm_id))
if not ret:
raise Exception("Query failed: " + query.lastError().text())
while query.next():
child_item = TreeItem(self.db, self.child_count, self)
self.child_items.append(child_item)
self.child_count += 1
child_item.setUpLevel2(comm_id, query.value(0), query.value(1), query.value(2))
def setUpLevel2(self, comm_id, thread_id, pid, tid):
self.comm_id = comm_id
self.thread_id = thread_id
self.data[0] = str(pid) + ":" + str(tid)
def getChildItem(self, row):
return self.child_items[row]
def getParentItem(self):
return self.parent_item
def getRow(self):
return self.row
def timePercent(self, b):
if not self.time:
return "0.0"
x = (b * Decimal(100)) / self.time
return str(x.quantize(Decimal('.1'), rounding=ROUND_HALF_UP))
def branchPercent(self, b):
if not self.branch_count:
return "0.0"
x = (b * Decimal(100)) / self.branch_count
return str(x.quantize(Decimal('.1'), rounding=ROUND_HALF_UP))
def addChild(self, call_path_id, name, dso, count, time, branch_count):
child_item = TreeItem(self.db, self.child_count, self)
child_item.comm_id = self.comm_id
child_item.thread_id = self.thread_id
child_item.call_path_id = call_path_id
child_item.branch_count = branch_count
child_item.time = time
child_item.data[0] = name
if dso == "[kernel.kallsyms]":
dso = "[kernel]"
child_item.data[1] = dso
child_item.data[2] = str(count)
child_item.data[3] = str(time)
child_item.data[4] = self.timePercent(time)
child_item.data[5] = str(branch_count)
child_item.data[6] = self.branchPercent(branch_count)
self.child_items.append(child_item)
self.child_count += 1
def selectCalls(self):
self.query_done = True;
query = QSqlQuery(self.db)
ret = query.exec_('SELECT id, call_path_id, branch_count, call_time, return_time, '
'( SELECT name FROM symbols WHERE id = ( SELECT symbol_id FROM call_paths WHERE id = call_path_id ) ), '
'( SELECT short_name FROM dsos WHERE id = ( SELECT dso_id FROM symbols WHERE id = ( SELECT symbol_id FROM call_paths WHERE id = call_path_id ) ) ), '
'( SELECT ip FROM call_paths where id = call_path_id ) '
'FROM calls WHERE parent_call_path_id = ' + str(self.call_path_id) + ' AND comm_id = ' + str(self.comm_id) + ' AND thread_id = ' + str(self.thread_id) +
'ORDER BY call_path_id')
if not ret:
raise Exception("Query failed: " + query.lastError().text())
last_call_path_id = 0
name = ""
dso = ""
count = 0
branch_count = 0
total_branch_count = 0
time = 0
total_time = 0
while query.next():
if query.value(1) == last_call_path_id:
count += 1
branch_count += query.value(2)
time += query.value(4) - query.value(3)
else:
if count:
self.addChild(last_call_path_id, name, dso, count, time, branch_count)
last_call_path_id = query.value(1)
name = query.value(5)
dso = query.value(6)
count = 1
total_branch_count += branch_count
total_time += time
branch_count = query.value(2)
time = query.value(4) - query.value(3)
if count:
self.addChild(last_call_path_id, name, dso, count, time, branch_count)
total_branch_count += branch_count
total_time += time
# Top level does not have time or branch count, so fix that here
if total_branch_count > self.branch_count:
self.branch_count = total_branch_count
if self.branch_count:
for child_item in self.child_items:
child_item.data[6] = self.branchPercent(child_item.branch_count)
if total_time > self.time:
self.time = total_time
if self.time:
for child_item in self.child_items:
child_item.data[4] = self.timePercent(child_item.time)
def childCount(self):
if not self.query_done:
self.selectCalls()
return self.child_count
def columnCount(self):
return 7
def columnHeader(self, column):
headers = ["Call Path", "Object", "Count ", "Time (ns) ", "Time (%) ", "Branch Count ", "Branch Count (%) "]
return headers[column]
def getData(self, column):
return self.data[column]
class TreeModel(QAbstractItemModel):
def __init__(self, db, parent=None):
super(TreeModel, self).__init__(parent)
self.db = db
self.root = TreeItem(db, 0, None)
def columnCount(self, parent):
return self.root.columnCount()
def rowCount(self, parent):
if parent.isValid():
parent_item = parent.internalPointer()
else:
parent_item = self.root
return parent_item.childCount()
def headerData(self, section, orientation, role):
if role == Qt.TextAlignmentRole:
if section > 1:
return Qt.AlignRight
if role != Qt.DisplayRole:
return None
if orientation != Qt.Horizontal:
return None
return self.root.columnHeader(section)
def parent(self, child):
child_item = child.internalPointer()
if child_item is self.root:
return QModelIndex()
parent_item = child_item.getParentItem()
return self.createIndex(parent_item.getRow(), 0, parent_item)
def index(self, row, column, parent):
if parent.isValid():
parent_item = parent.internalPointer()
else:
parent_item = self.root
child_item = parent_item.getChildItem(row)
return self.createIndex(row, column, child_item)
def data(self, index, role):
if role == Qt.TextAlignmentRole:
if index.column() > 1:
return Qt.AlignRight
if role != Qt.DisplayRole:
return None
index_item = index.internalPointer()
return index_item.getData(index.column())
class MainWindow(QMainWindow):
def __init__(self, db, dbname, parent=None):
super(MainWindow, self).__init__(parent)
self.setObjectName("MainWindow")
self.setWindowTitle("Call Graph: " + dbname)
self.move(100, 100)
self.resize(800, 600)
style = self.style()
icon = style.standardIcon(QStyle.SP_MessageBoxInformation)
self.setWindowIcon(icon);
self.model = TreeModel(db)
self.view = QTreeView()
self.view.setModel(self.model)
self.setCentralWidget(self.view)
if __name__ == '__main__':
if (len(sys.argv) < 2):
print >> sys.stderr, "Usage is: call-graph-from-postgresql.py <database name>"
raise Exception("Too few arguments")
dbname = sys.argv[1]
db = QSqlDatabase.addDatabase('QPSQL')
opts = dbname.split()
for opt in opts:
if '=' in opt:
opt = opt.split('=')
if opt[0] == 'hostname':
db.setHostName(opt[1])
elif opt[0] == 'port':
db.setPort(int(opt[1]))
elif opt[0] == 'username':
db.setUserName(opt[1])
elif opt[0] == 'password':
db.setPassword(opt[1])
elif opt[0] == 'dbname':
dbname = opt[1]
else:
dbname = opt
db.setDatabaseName(dbname)
if not db.open():
raise Exception("Failed to open database " + dbname + " error: " + db.lastError().text())
app = QApplication(sys.argv)
window = MainWindow(db, dbname)
window.show()
err = app.exec_()
db.close()
sys.exit(err)
| gpl-2.0 |
pyrocko/pyrocko | src/io/resp.py | 1 | 17240 | # http://pyrocko.org - GPLv3
#
# The Pyrocko Developers, 21st Century
# ---|P------/S----------~Lg----------
from __future__ import absolute_import, division, print_function
import time
import re
import logging
from pyrocko import util, guts
from pyrocko.io import io_common
from pyrocko.io import stationxml as sxml
logger = logging.getLogger('pyrocko.io.resp')
class RespError(io_common.FileLoadError):
pass
def ppolezero(s):
v = s.split()
return sxml.PoleZero(
number=int(v[0]),
real=sxml.FloatNoUnit(
value=float(v[1]),
plus_error=float(v[3]) or None,
minus_error=float(v[3]) or None),
imaginary=sxml.FloatNoUnit(
value=float(v[2]),
plus_error=float(v[4]) or None,
minus_error=float(v[4]) or None))
def pcfu(s):
v = list(map(float, s.split()))
return sxml.FloatWithUnit(
value=float(v[-2]),
plus_error=float(v[-1]) or None,
minus_error=float(v[-1]) or None)
def pnc(s):
v = list(map(float, s.split()))
return sxml.NumeratorCoefficient(i=int(v[0]), value=float(v[1]))
def punit(s):
return str(s.split()[0].decode('ascii'))
def psymmetry(s):
return {
b'A': 'NONE',
b'B': 'ODD',
b'C': 'EVEN'}[s.upper()]
def ptftype(s):
if s.startswith(b'A'):
return 'LAPLACE (RADIANS/SECOND)'
elif s.startswith(b'B'):
return 'LAPLACE (HERTZ)'
elif s.startswith(b'D'):
return 'DIGITAL (Z-TRANSFORM)'
else:
raise RespError('unknown pz transfer function type')
def pcftype(s):
if s.startswith(b'A'):
return 'ANALOG (RADIANS/SECOND)'
elif s.startswith(b'B'):
return 'ANALOG (HERTZ)'
elif s.startswith(b'D'):
return 'DIGITAL'
else:
raise RespError('unknown cf transfer function type')
def pblock_060(content):
stage_number = int(get1(content, b'04'))
return stage_number, None
def pblock_053(content):
stage_number = int(get1(content, b'04'))
pzs = sxml.PolesZeros(
pz_transfer_function_type=ptftype(get1(content, b'03')),
input_units=sxml.Units(name=punit(get1(content, b'05'))),
output_units=sxml.Units(name=punit(get1(content, b'06'))),
normalization_factor=float(get1(content, b'07')),
normalization_frequency=sxml.Frequency(
value=float(get1(content, b'08'))),
zero_list=list(map(ppolezero, getn(content, b'10-13'))),
pole_list=list(map(ppolezero, getn(content, b'15-18'))))
for i, x in enumerate(pzs.zero_list):
x.number = i
for i, x in enumerate(pzs.pole_list):
x.number = i
return stage_number, pzs
def pblock_043(content):
stage_number = -1
pzs = sxml.PolesZeros(
pz_transfer_function_type=ptftype(get1(content, b'05')),
input_units=sxml.Units(name=punit(get1(content, b'06'))),
output_units=sxml.Units(name=punit(get1(content, b'07'))),
normalization_factor=float(get1(content, b'08')),
normalization_frequency=sxml.Frequency(
value=float(get1(content, b'09'))),
zero_list=list(map(ppolezero, getn(content, b'11-14'))),
pole_list=list(map(ppolezero, getn(content, b'16-19'))))
for i, x in enumerate(pzs.zero_list):
x.number = i
for i, x in enumerate(pzs.pole_list):
x.number = i
return stage_number, pzs
def pblock_058(content):
stage_number = int(get1(content, b'03'))
gain = sxml.Gain(
value=float(get1(content, b'04')),
frequency=float(get1(content, b'05').split()[0]))
return stage_number, gain
def pblock_048(content):
stage_number = -1
gain = sxml.Gain(
value=float(get1(content, b'05')),
frequency=float(get1(content, b'06').split()[0]))
return stage_number, gain
def pblock_054(content):
stage_number = int(get1(content, b'04'))
cfs = sxml.Coefficients(
cf_transfer_function_type=pcftype(get1(content, b'03')),
input_units=sxml.Units(name=punit(get1(content, b'05'))),
output_units=sxml.Units(name=punit(get1(content, b'06'))),
numerator_list=list(map(pcfu, getn(content, b'08-09'))),
denominator_list=list(map(pcfu, getn(content, b'11-12'))))
return stage_number, cfs
def pblock_044(content):
stage_number = -1
cfs = sxml.Coefficients(
cf_transfer_function_type=pcftype(get1(content, b'05')),
input_units=sxml.Units(name=punit(get1(content, b'06'))),
output_units=sxml.Units(name=punit(get1(content, b'07'))),
numerator_list=list(map(pcfu, getn(content, b'09-10'))),
denominator_list=list(map(pcfu, getn(content, b'12-13'))))
return stage_number, cfs
def pblock_057(content):
stage_number = int(get1(content, b'03'))
deci = sxml.Decimation(
input_sample_rate=sxml.Frequency(value=float(get1(content, b'04'))),
factor=int(get1(content, b'05')),
offset=int(get1(content, b'06')),
delay=sxml.FloatWithUnit(value=float(get1(content, b'07'))),
correction=sxml.FloatWithUnit(value=float(get1(content, b'08'))))
return stage_number, deci
def pblock_047(content):
stage_number = -1
deci = sxml.Decimation(
input_sample_rate=sxml.Frequency(value=float(get1(content, b'05'))),
factor=int(get1(content, b'06')),
offset=int(get1(content, b'07')),
delay=sxml.FloatWithUnit(value=float(get1(content, b'08'))),
correction=sxml.FloatWithUnit(value=float(get1(content, b'09'))))
return stage_number, deci
def pblock_061(content):
stage_number = int(get1(content, b'03'))
fir = sxml.FIR(
name=get1(content, b'04', optional=True),
input_units=sxml.Units(name=punit(get1(content, b'06'))),
output_units=sxml.Units(name=punit(get1(content, b'07'))),
symmetry=psymmetry(get1(content, b'05')),
numerator_coefficient_list=list(map(pnc, getn(content, b'09'))))
return stage_number, fir
def pblock_041(content):
stage_number = -1
fir = sxml.FIR(
name=get1(content, b'04', optional=True),
input_units=sxml.Units(name=punit(get1(content, b'06'))),
output_units=sxml.Units(name=punit(get1(content, b'07'))),
symmetry=psymmetry(get1(content, b'05')),
numerator_coefficient_list=list(map(pnc, getn(content, b'09'))))
return stage_number, fir
bdefs = {
b'050': {
'name': 'Station Identifier Blockette',
},
b'052': {
'name': 'Channel Identifier Blockette',
},
b'060': {
'name': 'Response Reference Information',
'parse': pblock_060,
},
b'053': {
'name': 'Response (Poles & Zeros) Blockette',
'parse': pblock_053,
},
b'043': {
'name': 'Response (Poles & Zeros) Dictionary Blockette',
'parse': pblock_043,
},
b'054': {
'name': 'Response (Coefficients) Blockette',
'parse': pblock_054,
},
b'044': {
'name': 'Response (Coefficients) Dictionary Blockette',
'parse': pblock_044,
},
b'057': {
'name': 'Decimation Blockette',
'parse': pblock_057,
},
b'047': {
'name': 'Decimation Dictionary Blockette',
'parse': pblock_047,
},
b'058': {
'name': 'Channel Sensitivity/Gain Blockette',
'parse': pblock_058,
},
b'048': {
'name': 'Channel Sensitivity/Gain Dictionary Blockette',
'parse': pblock_048,
},
b'061': {
'name': 'FIR Response Blockette',
'parse': pblock_061,
},
b'041': {
'name': 'FIR Dictionary Blockette',
'parse': pblock_041,
},
}
def parse1(f):
for line in f:
line = line.rstrip(b'\r\n')
m = re.match(
br'\s*(#(.+)|B(\d\d\d)F(\d\d(-\d\d)?)\s+(([^:]+):\s*)?(.*))', line)
if m:
if m.group(2):
pass
elif m.group(3):
block = m.group(3)
field = m.group(4)
key = m.group(7)
value = m.group(8)
yield block, field, key, value
def parse2(f):
current_b = None
content = []
for block, field, key, value in parse1(f):
if current_b != block or field == b'03':
if current_b is not None:
yield current_b, content
current_b = block
content = []
content.append((field, key, value))
if current_b is not None:
yield current_b, content
def parse3(f):
state = [None, None, []]
for block, content in parse2(f):
if block == b'050' and state[0] and state[1]:
yield state
state = [None, None, []]
if block == b'050':
state[0] = content
elif block == b'052':
state[1] = content
else:
state[2].append((block, content))
if state[0] and state[1]:
yield state
def get1(content, field, default=None, optional=False):
for field_, _, value in content:
if field_ == field:
return value
else:
if optional:
return None
elif default is not None:
return default
else:
raise RespError('key not found: %s' % field)
def getn(content, field):
lst = []
for field_, _, value in content:
if field_ == field:
lst.append(value)
return lst
def pdate(s):
if len(s) < 17:
s += b'0000,001,00:00:00'[len(s):]
if s.startswith(b'2599') or s.startswith(b'2999'):
return None
elif s.lower().startswith(b'no'):
return None
else:
t = s.split(b',')
if len(t) > 2 and t[1] == b'000':
s = b','.join([t[0], b'001'] + t[2:])
return util.str_to_time(
str(s.decode('ascii')), format='%Y,%j,%H:%M:%S.OPTFRAC')
def ploc(s):
if s == b'??':
return ''
else:
return str(s.decode('ascii'))
def pcode(s):
return str(s.decode('ascii'))
def gett(lst, t):
return [x for x in lst if isinstance(x, t)]
def gett1o(lst, t):
lst = [x for x in lst if isinstance(x, t)]
if len(lst) == 0:
return None
elif len(lst) == 1:
return lst[0]
else:
raise RespError('duplicate entry')
def gett1(lst, t):
lst = [x for x in lst if isinstance(x, t)]
if len(lst) == 0:
raise RespError('entry not found')
elif len(lst) == 1:
return lst[0]
else:
raise RespError('duplicate entry')
class ChannelResponse(guts.Object):
'''Response information + channel codes and time span.'''
codes = guts.Tuple.T(4, guts.String.T(default=''))
start_date = guts.Timestamp.T()
end_date = guts.Timestamp.T()
response = sxml.Response.T()
def iload_fh(f):
'''Read RESP information from open file handle.'''
for sc, cc, rcs in parse3(f):
nslc = (
pcode(get1(sc, b'16')),
pcode(get1(sc, b'03')),
ploc(get1(cc, b'03', b'')),
pcode(get1(cc, b'04')))
try:
tmin = pdate(get1(cc, b'22'))
tmax = pdate(get1(cc, b'23'))
except util.TimeStrError as e:
raise RespError('invalid date in RESP information. (%s)' % str(e))
stage_elements = {}
istage = -1
for block, content in rcs:
if block not in bdefs:
raise RespError('unknown block type found: %s' % block)
istage_temp, x = bdefs[block]['parse'](content)
if istage_temp != -1:
istage = istage_temp
if x is None:
continue
x.validate()
if istage not in stage_elements:
stage_elements[istage] = []
stage_elements[istage].append(x)
istages = sorted(stage_elements.keys())
stages = []
totalgain = None
for istage in istages:
elements = stage_elements[istage]
if istage == 0:
totalgain = gett1(elements, sxml.Gain)
else:
stage = sxml.ResponseStage(
number=istage,
poles_zeros_list=gett(elements, sxml.PolesZeros),
coefficients_list=gett(elements, sxml.Coefficients),
fir=gett1o(elements, sxml.FIR),
decimation=gett1o(elements, sxml.Decimation),
stage_gain=gett1o(elements, sxml.Gain))
stages.append(stage)
if stages:
resp = sxml.Response(
stage_list=stages)
if totalgain:
totalgain_value = totalgain.value
totalgain_frequency = totalgain.frequency
else:
totalgain_value = 1.
gain_frequencies = []
for stage in stages:
totalgain_value *= stage.stage_gain.value
gain_frequencies.append(stage.stage_gain.frequency)
totalgain_frequency = gain_frequencies[0]
if not all(f == totalgain_frequency for f in gain_frequencies):
logger.warn(
'no total gain reported and inconsistent gain '
'frequency values found in resp file for %s.%s.%s.%s: '
'omitting total gain and frequency from created '
'instrument sensitivity object' % nslc)
totalgain_value = None
totalgain_frequency = None
resp.instrument_sensitivity = sxml.Sensitivity(
value=totalgain_value,
frequency=totalgain_frequency,
input_units=stages[0].input_units,
output_units=stages[-1].output_units)
yield ChannelResponse(
codes=nslc,
start_date=tmin,
end_date=tmax,
response=resp)
else:
raise RespError('incomplete response information')
iload_filename, iload_dirname, iload_glob, iload = util.make_iload_family(
iload_fh, 'RESP', ':py:class:`ChannelResponse`')
def make_stationxml(pyrocko_stations, channel_responses):
'''Create stationxml from pyrocko station list and RESP information.
:param pyrocko_stations: list of :py:class:`pyrocko.model.Station` objects
:param channel_responses: iterable yielding :py:class:`ChannelResponse`
objects
:returns: :py:class:`pyrocko.fdsn.station.FDSNStationXML` object with
merged information
If no station information is available for any response information, it
is skipped and a warning is emitted.
'''
pstations = dict((s.nsl(), s) for s in pyrocko_stations)
networks = {}
stations = {}
for (net, sta, loc) in sorted(pstations.keys()):
pstation = pstations[net, sta, loc]
if net not in networks:
networks[net] = sxml.Network(code=net)
if (net, sta) not in stations:
stations[net, sta] = sxml.Station(
code=sta,
latitude=sxml.Latitude(pstation.lat),
longitude=sxml.Longitude(pstation.lon),
elevation=sxml.Distance(pstation.elevation))
networks[net].station_list.append(stations[net, sta])
for cr in channel_responses:
net, sta, loc, cha = cr.codes
if (net, sta, loc) in pstations:
pstation = pstations[net, sta, loc]
pchannel = pstation.get_channel(cha)
extra = {}
if pchannel is not None:
if pchannel.azimuth is not None:
extra['azimuth'] = sxml.Azimuth(pchannel.azimuth)
if pchannel.dip is not None:
extra['dip'] = sxml.Dip(pchannel.dip)
channel = sxml.Channel(
code=cha,
location_code=loc,
start_date=cr.start_date,
end_date=cr.end_date,
latitude=sxml.Latitude(pstation.lat),
longitude=sxml.Longitude(pstation.lon),
elevation=sxml.Distance(pstation.elevation),
depth=sxml.Distance(pstation.depth),
response=cr.response,
**extra)
stations[net, sta].channel_list.append(channel)
else:
logger.warning('no station information for %s.%s.%s' %
(net, sta, loc))
for station in stations.values():
station.channel_list.sort(key=lambda c: (c.location_code, c.code))
return sxml.FDSNStationXML(
source='Converted from Pyrocko stations file and RESP information',
created=time.time(),
network_list=[networks[net_] for net_ in sorted(networks.keys())])
if __name__ == '__main__':
import sys
from pyrocko.model.station import load_stations
util.setup_logging(__name__)
if len(sys.argv) < 2:
sys.exit('usage: python -m pyrocko.fdsn.resp <stations> <resp> ...')
stations = load_stations(sys.argv[1])
sxml = make_stationxml(stations, iload(sys.argv[2:]))
print(sxml.dump_xml())
| gpl-3.0 |
tlodge/dreamplug_kernel | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
mintoo/NetDim | pyNMS/views/geographical_view.py | 2 | 7551 | # Copyright (C) 2017 Antoine Fourmy <antoine dot fourmy at gmail dot com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from collections import OrderedDict
from os.path import join
from .base_view import BaseView
from math import asin, cos, radians, sin, sqrt
try:
import shapefile
import shapely.geometry
from pyproj import Proj
except ImportError as e:
import warnings
warnings.warn(str(e))
warnings.warn('SHP librairies missing: pyNMS will not start')
warnings.warn('please install "pyshp", "shapely", and "pyproj" with pip')
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt
from PyQt5.QtGui import (
QBrush,
QPen,
QColor,
QDrag,
QPainter,
QPixmap
)
from PyQt5.QtWidgets import (
QFrame,
QPushButton,
QWidget,
QApplication,
QLabel,
QGraphicsItem,
QGraphicsLineItem,
QGraphicsPixmapItem,
QGroupBox,
)
class GeographicalView(BaseView):
def __init__(self, controller):
super().__init__(controller)
# initialize the map
self.world_map = Map(self)
def update_geographical_coordinates(self, *gnodes):
for gnode in gnodes:
lon, lat = self.world_map.to_geographical_coordinates(gnode.x, gnode.y)
gnode.node.longitude, gnode.node.latitude = lon, lat
def update_logical_coordinates(self, *gnodes):
for gnode in gnodes:
gnode.node.logical_x, gnode.node.logical_y = gnode.x, gnode.y
def move_to_geographical_coordinates(self, *gnodes):
if not gnodes:
gnodes = self.all_gnodes()
for gnode in gnodes:
gnode.x, gnode.y = self.world_map.to_canvas_coordinates(
gnode.node.longitude,
gnode.node.latitude
)
def move_to_logical_coordinates(self, *gnodes):
if not gnodes:
gnodes = self.all_gnodes()
for gnode in gnodes:
gnode.x, gnode.y = gnode.node.logical_x, gnode.node.logical_y
def haversine_distance(self, s, d):
coord = (s.longitude, s.latitude, d.longitude, d.latitude)
# decimal degrees to radians conversion
lon_s, lat_s, lon_d, lat_d = map(radians, coord)
delta_lon = lon_d - lon_s
delta_lat = lat_d - lat_s
a = sin(delta_lat/2)**2 + cos(lat_s)*cos(lat_d)*sin(delta_lon/2)**2
c = 2*asin(sqrt(a))
# radius of earth (km)
r = 6371
return c*r
class Map():
projections = OrderedDict([
('Spherical', Proj('+proj=ortho +lat_0=48 +lon_0=17')),
('Mercator', Proj(init='epsg:3395')),
('WGS84', Proj(init='epsg:3857')),
('ETRS89 - LAEA Europe', Proj("+init=EPSG:3035"))
])
def __init__(self, view):
self.view = view
self.proj = 'Spherical'
self.ratio, self.offset = 1/1000, (0, 0)
self.display = True
self.polygons = self.view.scene.createItemGroup([])
# brush for water and lands
self.water_brush = QBrush(QColor(64, 164, 223))
self.land_brush = QBrush(QColor(52, 165, 111))
self.land_pen = QPen(QColor(52, 165, 111))
def to_geographical_coordinates(self, x, y):
px, py = (x - self.offset[0])/self.ratio, (self.offset[1] - y)/self.ratio
return self.projections[self.proj](px, py, inverse=True)
def to_canvas_coordinates(self, longitude, latitude):
px, py = self.projections[self.proj](longitude, latitude)
return px*self.ratio + self.offset[0], -py*self.ratio + self.offset[1]
def draw_water(self):
if self.proj in ('Spherical', 'ETRS89 - LAEA Europe'):
cx, cy = self.to_canvas_coordinates(17, 48)
# if the projection is ETRS89, we need the diameter and not the radius
R = 6371000*self.ratio*(1 if self.proj == 'Spherical' else 2)
earth_water = QtWidgets.QGraphicsEllipseItem(cx - R, cy - R, 2*R, 2*R)
earth_water.setZValue(0)
earth_water.setBrush(self.water_brush)
self.polygons.addToGroup(earth_water)
else:
# we compute the projected bounds of the Mercator (3395) projection
# upper-left corner x and y coordinates:
ulc_x, ulc_y = self.to_canvas_coordinates(-180, 84)
# lower-right corner x and y coordinates
lrc_x, lrc_y = self.to_canvas_coordinates(180, -84.72)
# width and height of the map (required for the QRectItem)
width, height = lrc_x - ulc_x, lrc_y - ulc_y
earth_water = QtWidgets.QGraphicsRectItem(ulc_x, ulc_y, width, height)
earth_water.setZValue(0)
earth_water.setBrush(self.water_brush)
self.polygons.addToGroup(earth_water)
def draw_polygons(self):
sf = shapefile.Reader(self.shapefile)
polygons = sf.shapes()
for polygon in polygons:
# convert shapefile geometries into shapely geometries
# to extract the polygons of a multipolygon
polygon = shapely.geometry.shape(polygon)
# if it is a polygon, we use a list to make it iterable
if polygon.geom_type == 'Polygon':
polygon = [polygon]
for land in polygon:
qt_polygon = QtGui.QPolygonF()
longitudes, latitudes = land.exterior.coords.xy
for lon, lat in zip(longitudes, latitudes):
px, py = self.to_canvas_coordinates(lon, lat)
if px > 1e+10:
continue
qt_polygon.append(QtCore.QPointF(px, py))
polygon_item = QtWidgets.QGraphicsPolygonItem(qt_polygon)
polygon_item.setBrush(self.land_brush)
polygon_item.setPen(self.land_pen)
polygon_item.setZValue(1)
yield polygon_item
def show_hide_map(self):
self.display = not self.display
self.polygons.show() if self.display else self.polygons.hide()
def delete_map(self):
self.view.scene.removeItem(self.polygons)
def redraw_map(self):
self.delete_map()
self.polygons = self.view.scene.createItemGroup(self.draw_polygons())
self.draw_water()
# replace the nodes at their geographical location
self.view.move_to_geographical_coordinates()
| gpl-3.0 |
LittleLama/Sick-Beard-BoxCar2 | lib/hachoir_metadata/register.py | 90 | 7003 | from lib.hachoir_core.i18n import _
from lib.hachoir_core.tools import (
humanDuration, humanBitRate,
humanFrequency, humanBitSize, humanFilesize,
humanDatetime)
from lib.hachoir_core.language import Language
from lib.hachoir_metadata.filter import Filter, NumberFilter, DATETIME_FILTER
from datetime import date, datetime, timedelta
from lib.hachoir_metadata.formatter import (
humanAudioChannel, humanFrameRate, humanComprRate, humanAltitude,
humanPixelSize, humanDPI)
from lib.hachoir_metadata.setter import (
setDatetime, setTrackNumber, setTrackTotal, setLanguage)
from lib.hachoir_metadata.metadata_item import Data
MIN_SAMPLE_RATE = 1000 # 1 kHz
MAX_SAMPLE_RATE = 192000 # 192 kHz
MAX_NB_CHANNEL = 8 # 8 channels
MAX_WIDTH = 20000 # 20 000 pixels
MAX_BIT_RATE = 500 * 1024 * 1024 # 500 Mbit/s
MAX_HEIGHT = MAX_WIDTH
MAX_DPI_WIDTH = 10000
MAX_DPI_HEIGHT = MAX_DPI_WIDTH
MAX_NB_COLOR = 2 ** 24 # 16 million of color
MAX_BITS_PER_PIXEL = 256 # 256 bits/pixel
MAX_FRAME_RATE = 150 # 150 frame/sec
MAX_NB_PAGE = 20000
MAX_COMPR_RATE = 1000.0
MIN_COMPR_RATE = 0.001
MAX_TRACK = 999
DURATION_FILTER = Filter(timedelta,
timedelta(milliseconds=1),
timedelta(days=365))
def registerAllItems(meta):
meta.register(Data("title", 100, _("Title"), type=unicode))
meta.register(Data("artist", 101, _("Artist"), type=unicode))
meta.register(Data("author", 102, _("Author"), type=unicode))
meta.register(Data("music_composer", 103, _("Music composer"), type=unicode))
meta.register(Data("album", 200, _("Album"), type=unicode))
meta.register(Data("duration", 201, _("Duration"), # integer in milliseconde
type=timedelta, text_handler=humanDuration, filter=DURATION_FILTER))
meta.register(Data("nb_page", 202, _("Nb page"), filter=NumberFilter(1, MAX_NB_PAGE)))
meta.register(Data("music_genre", 203, _("Music genre"), type=unicode))
meta.register(Data("language", 204, _("Language"), conversion=setLanguage, type=Language))
meta.register(Data("track_number", 205, _("Track number"), conversion=setTrackNumber,
filter=NumberFilter(1, MAX_TRACK), type=(int, long)))
meta.register(Data("track_total", 206, _("Track total"), conversion=setTrackTotal,
filter=NumberFilter(1, MAX_TRACK), type=(int, long)))
meta.register(Data("organization", 210, _("Organization"), type=unicode))
meta.register(Data("version", 220, _("Version")))
meta.register(Data("width", 301, _("Image width"), filter=NumberFilter(1, MAX_WIDTH), type=(int, long), text_handler=humanPixelSize))
meta.register(Data("height", 302, _("Image height"), filter=NumberFilter(1, MAX_HEIGHT), type=(int, long), text_handler=humanPixelSize))
meta.register(Data("nb_channel", 303, _("Channel"), text_handler=humanAudioChannel, filter=NumberFilter(1, MAX_NB_CHANNEL), type=(int, long)))
meta.register(Data("sample_rate", 304, _("Sample rate"), text_handler=humanFrequency, filter=NumberFilter(MIN_SAMPLE_RATE, MAX_SAMPLE_RATE), type=(int, long, float)))
meta.register(Data("bits_per_sample", 305, _("Bits/sample"), text_handler=humanBitSize, filter=NumberFilter(1, 64), type=(int, long)))
meta.register(Data("image_orientation", 306, _("Image orientation")))
meta.register(Data("nb_colors", 307, _("Number of colors"), filter=NumberFilter(1, MAX_NB_COLOR), type=(int, long)))
meta.register(Data("bits_per_pixel", 308, _("Bits/pixel"), filter=NumberFilter(1, MAX_BITS_PER_PIXEL), type=(int, long)))
meta.register(Data("filename", 309, _("File name"), type=unicode))
meta.register(Data("file_size", 310, _("File size"), text_handler=humanFilesize, type=(int, long)))
meta.register(Data("pixel_format", 311, _("Pixel format")))
meta.register(Data("compr_size", 312, _("Compressed file size"), text_handler=humanFilesize, type=(int, long)))
meta.register(Data("compr_rate", 313, _("Compression rate"), text_handler=humanComprRate, filter=NumberFilter(MIN_COMPR_RATE, MAX_COMPR_RATE), type=(int, long, float)))
meta.register(Data("width_dpi", 320, _("Image DPI width"), filter=NumberFilter(1, MAX_DPI_WIDTH), type=(int, long), text_handler=humanDPI))
meta.register(Data("height_dpi", 321, _("Image DPI height"), filter=NumberFilter(1, MAX_DPI_HEIGHT), type=(int, long), text_handler=humanDPI))
meta.register(Data("file_attr", 400, _("File attributes")))
meta.register(Data("file_type", 401, _("File type")))
meta.register(Data("subtitle_author", 402, _("Subtitle author"), type=unicode))
meta.register(Data("creation_date", 500, _("Creation date"), text_handler=humanDatetime,
filter=DATETIME_FILTER, type=(datetime, date), conversion=setDatetime))
meta.register(Data("last_modification", 501, _("Last modification"), text_handler=humanDatetime,
filter=DATETIME_FILTER, type=(datetime, date), conversion=setDatetime))
meta.register(Data("latitude", 510, _("Latitude"), type=float))
meta.register(Data("longitude", 511, _("Longitude"), type=float))
meta.register(Data("altitude", 511, _("Altitude"), type=float, text_handler=humanAltitude))
meta.register(Data("location", 530, _("Location"), type=unicode))
meta.register(Data("city", 531, _("City"), type=unicode))
meta.register(Data("country", 532, _("Country"), type=unicode))
meta.register(Data("charset", 540, _("Charset"), type=unicode))
meta.register(Data("font_weight", 550, _("Font weight")))
meta.register(Data("camera_aperture", 520, _("Camera aperture")))
meta.register(Data("camera_focal", 521, _("Camera focal")))
meta.register(Data("camera_exposure", 522, _("Camera exposure")))
meta.register(Data("camera_brightness", 530, _("Camera brightness")))
meta.register(Data("camera_model", 531, _("Camera model"), type=unicode))
meta.register(Data("camera_manufacturer", 532, _("Camera manufacturer"), type=unicode))
meta.register(Data("compression", 600, _("Compression")))
meta.register(Data("copyright", 601, _("Copyright"), type=unicode))
meta.register(Data("url", 602, _("URL"), type=unicode))
meta.register(Data("frame_rate", 603, _("Frame rate"), text_handler=humanFrameRate,
filter=NumberFilter(1, MAX_FRAME_RATE), type=(int, long, float)))
meta.register(Data("bit_rate", 604, _("Bit rate"), text_handler=humanBitRate,
filter=NumberFilter(1, MAX_BIT_RATE), type=(int, long, float)))
meta.register(Data("aspect_ratio", 604, _("Aspect ratio"), type=(int, long, float)))
meta.register(Data("os", 900, _("OS"), type=unicode))
meta.register(Data("producer", 901, _("Producer"), type=unicode))
meta.register(Data("comment", 902, _("Comment"), type=unicode))
meta.register(Data("format_version", 950, _("Format version"), type=unicode))
meta.register(Data("mime_type", 951, _("MIME type"), type=unicode))
meta.register(Data("endian", 952, _("Endianness"), type=unicode))
| gpl-3.0 |
tensorflow/agents | tf_agents/policies/policy_info_updater_wrapper_test.py | 1 | 4131 | # coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
r"""Tests for tf_agents.policies.policy_info_updater_wrapper.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tf_agents.policies import policy_info_updater_wrapper
from tf_agents.policies import tf_policy
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import policy_step
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import test_utils
class DistributionPolicy(tf_policy.TFPolicy):
"""A policy which always returns the configured distribution."""
def __init__(self,
distribution,
time_step_spec,
action_spec,
info_spec,
name=None):
self._distribution_value = distribution
super(DistributionPolicy, self).__init__(
time_step_spec=time_step_spec,
action_spec=action_spec,
info_spec=info_spec,
name=name)
def _action(self, time_step, policy_state, seed):
return policy_step.PolicyStep(tf.constant(1., shape=(1,)), policy_state,
{'test_info': tf.constant(2, shape=(1,))})
def _distribution(self, time_step, policy_state):
return policy_step.PolicyStep(self._distribution_value, policy_state,
{'test_info': tf.constant(2, shape=(1,))})
def _variables(self):
return []
class ModelIdUpdater(object):
def __call__(self, step):
del step
return {'model_id': tf.expand_dims(2, axis=0)}
class PolicyInfoUpdaterWrapperTest(test_utils.TestCase, parameterized.TestCase):
def setUp(self):
super(PolicyInfoUpdaterWrapperTest, self).setUp()
self._obs_spec = tensor_spec.TensorSpec([2], tf.float32)
self._time_step_spec = ts.time_step_spec(self._obs_spec)
def test_model_id_updater(self):
loc = 0.0
scale = 0.5
action_spec = tensor_spec.BoundedTensorSpec([1], tf.float32, tf.float32.min,
tf.float32.max)
wrapped_policy = DistributionPolicy(
distribution=tfp.distributions.Normal([loc], [scale]),
time_step_spec=self._time_step_spec,
action_spec=action_spec,
info_spec={
'test_info':
tf.TensorSpec(shape=(1,), dtype=tf.int32, name='test_info')
})
updater_info_spec = {
'model_id': tf.TensorSpec(shape=(1,), dtype=tf.int32, name='model_id')
}
updater_info_spec.update(wrapped_policy.info_spec)
policy = policy_info_updater_wrapper.PolicyInfoUpdaterWrapper(
policy=wrapped_policy,
info_spec=updater_info_spec,
updater_fn=ModelIdUpdater(),
name='model_id_updater')
self.assertEqual(policy.time_step_spec, self._time_step_spec)
self.assertEqual(policy.action_spec, action_spec)
observations = tf.constant([[1, 2], [3, 4]], dtype=tf.float32)
time_step = ts.restart(observations, batch_size=2)
action_step = policy.action(time_step)
distribution_step = policy.distribution(time_step)
tf.nest.assert_same_structure(action_spec, action_step.action)
tf.nest.assert_same_structure(action_spec, distribution_step.action)
self.assertListEqual(list(self.evaluate(action_step.info['model_id'])), [2])
self.assertListEqual(
list(self.evaluate(distribution_step.info['model_id'])), [2])
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
micropython/micropython | examples/bluetooth/ble_bonding_peripheral.py | 7 | 6806 | # This example demonstrates a simple temperature sensor peripheral.
#
# The sensor's local value updates every second, and it will notify
# any connected central every 10 seconds.
#
# Work-in-progress demo of implementing bonding and passkey auth.
import bluetooth
import random
import struct
import time
import json
import binascii
from ble_advertising import advertising_payload
from micropython import const
_IRQ_CENTRAL_CONNECT = const(1)
_IRQ_CENTRAL_DISCONNECT = const(2)
_IRQ_GATTS_INDICATE_DONE = const(20)
_IRQ_ENCRYPTION_UPDATE = const(28)
_IRQ_PASSKEY_ACTION = const(31)
_IRQ_GET_SECRET = const(29)
_IRQ_SET_SECRET = const(30)
_FLAG_READ = const(0x0002)
_FLAG_NOTIFY = const(0x0010)
_FLAG_INDICATE = const(0x0020)
_FLAG_READ_ENCRYPTED = const(0x0200)
# org.bluetooth.service.environmental_sensing
_ENV_SENSE_UUID = bluetooth.UUID(0x181A)
# org.bluetooth.characteristic.temperature
_TEMP_CHAR = (
bluetooth.UUID(0x2A6E),
_FLAG_READ | _FLAG_NOTIFY | _FLAG_INDICATE | _FLAG_READ_ENCRYPTED,
)
_ENV_SENSE_SERVICE = (
_ENV_SENSE_UUID,
(_TEMP_CHAR,),
)
# org.bluetooth.characteristic.gap.appearance.xml
_ADV_APPEARANCE_GENERIC_THERMOMETER = const(768)
_IO_CAPABILITY_DISPLAY_ONLY = const(0)
_IO_CAPABILITY_DISPLAY_YESNO = const(1)
_IO_CAPABILITY_KEYBOARD_ONLY = const(2)
_IO_CAPABILITY_NO_INPUT_OUTPUT = const(3)
_IO_CAPABILITY_KEYBOARD_DISPLAY = const(4)
_PASSKEY_ACTION_INPUT = const(2)
_PASSKEY_ACTION_DISP = const(3)
_PASSKEY_ACTION_NUMCMP = const(4)
class BLETemperature:
def __init__(self, ble, name="mpy-temp"):
self._ble = ble
self._load_secrets()
self._ble.irq(self._irq)
self._ble.config(bond=True)
self._ble.config(le_secure=True)
self._ble.config(mitm=True)
self._ble.config(io=_IO_CAPABILITY_DISPLAY_YESNO)
self._ble.active(True)
self._ble.config(addr_mode=2)
((self._handle,),) = self._ble.gatts_register_services((_ENV_SENSE_SERVICE,))
self._connections = set()
self._payload = advertising_payload(
name=name, services=[_ENV_SENSE_UUID], appearance=_ADV_APPEARANCE_GENERIC_THERMOMETER
)
self._advertise()
def _irq(self, event, data):
# Track connections so we can send notifications.
if event == _IRQ_CENTRAL_CONNECT:
conn_handle, _, _ = data
self._connections.add(conn_handle)
elif event == _IRQ_CENTRAL_DISCONNECT:
conn_handle, _, _ = data
self._connections.remove(conn_handle)
self._save_secrets()
# Start advertising again to allow a new connection.
self._advertise()
elif event == _IRQ_ENCRYPTION_UPDATE:
conn_handle, encrypted, authenticated, bonded, key_size = data
print("encryption update", conn_handle, encrypted, authenticated, bonded, key_size)
elif event == _IRQ_PASSKEY_ACTION:
conn_handle, action, passkey = data
print("passkey action", conn_handle, action, passkey)
if action == _PASSKEY_ACTION_NUMCMP:
accept = int(input("accept? "))
self._ble.gap_passkey(conn_handle, action, accept)
elif action == _PASSKEY_ACTION_DISP:
print("displaying 123456")
self._ble.gap_passkey(conn_handle, action, 123456)
elif action == _PASSKEY_ACTION_INPUT:
print("prompting for passkey")
passkey = int(input("passkey? "))
self._ble.gap_passkey(conn_handle, action, passkey)
else:
print("unknown action")
elif event == _IRQ_GATTS_INDICATE_DONE:
conn_handle, value_handle, status = data
elif event == _IRQ_SET_SECRET:
sec_type, key, value = data
key = sec_type, bytes(key)
value = bytes(value) if value else None
print("set secret:", key, value)
if value is None:
if key in self._secrets:
del self._secrets[key]
return True
else:
return False
else:
self._secrets[key] = value
return True
elif event == _IRQ_GET_SECRET:
sec_type, index, key = data
print("get secret:", sec_type, index, bytes(key) if key else None)
if key is None:
i = 0
for (t, _key), value in self._secrets.items():
if t == sec_type:
if i == index:
return value
i += 1
return None
else:
key = sec_type, bytes(key)
return self._secrets.get(key, None)
def set_temperature(self, temp_deg_c, notify=False, indicate=False):
# Data is sint16 in degrees Celsius with a resolution of 0.01 degrees Celsius.
# Write the local value, ready for a central to read.
self._ble.gatts_write(self._handle, struct.pack("<h", int(temp_deg_c * 100)))
if notify or indicate:
for conn_handle in self._connections:
if notify:
# Notify connected centrals.
self._ble.gatts_notify(conn_handle, self._handle)
if indicate:
# Indicate connected centrals.
self._ble.gatts_indicate(conn_handle, self._handle)
def _advertise(self, interval_us=500000):
self._ble.config(addr_mode=2)
self._ble.gap_advertise(interval_us, adv_data=self._payload)
def _load_secrets(self):
self._secrets = {}
try:
with open("secrets.json", "r") as f:
entries = json.load(f)
for sec_type, key, value in entries:
self._secrets[sec_type, binascii.a2b_base64(key)] = binascii.a2b_base64(value)
except:
print("no secrets available")
def _save_secrets(self):
try:
with open("secrets.json", "w") as f:
json_secrets = [
(sec_type, binascii.b2a_base64(key), binascii.b2a_base64(value))
for (sec_type, key), value in self._secrets.items()
]
json.dump(json_secrets, f)
except:
print("failed to save secrets")
def demo():
ble = bluetooth.BLE()
temp = BLETemperature(ble)
t = 25
i = 0
while True:
# Write every second, notify every 10 seconds.
i = (i + 1) % 10
temp.set_temperature(t, notify=i == 0, indicate=False)
# Random walk the temperature.
t += random.uniform(-0.5, 0.5)
time.sleep_ms(1000)
if __name__ == "__main__":
demo()
| mit |
MIPS/qemu-android | scripts/tracetool/backend/simple.py | 97 | 2669 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Simple built-in backend.
"""
__author__ = "Lluís Vilanova <vilanova@ac.upc.edu>"
__copyright__ = "Copyright 2012-2014, Lluís Vilanova <vilanova@ac.upc.edu>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "stefanha@linux.vnet.ibm.com"
from tracetool import out
PUBLIC = True
def is_string(arg):
strtype = ('const char*', 'char*', 'const char *', 'char *')
if arg.lstrip().startswith(strtype):
return True
else:
return False
def generate_h_begin(events):
for event in events:
out('void _simple_%(api)s(%(args)s);',
api=event.api(),
args=event.args)
out('')
def generate_h(event):
out(' _simple_%(api)s(%(args)s);',
api=event.api(),
args=", ".join(event.args.names()))
def generate_c_begin(events):
out('#include "trace.h"',
'#include "trace/control.h"',
'#include "trace/simple.h"',
'')
def generate_c(event):
out('void _simple_%(api)s(%(args)s)',
'{',
' TraceBufferRecord rec;',
api=event.api(),
args=event.args)
sizes = []
for type_, name in event.args:
if is_string(type_):
out(' size_t arg%(name)s_len = %(name)s ? MIN(strlen(%(name)s), MAX_TRACE_STRLEN) : 0;',
name=name)
strsizeinfo = "4 + arg%s_len" % name
sizes.append(strsizeinfo)
else:
sizes.append("8")
sizestr = " + ".join(sizes)
if len(event.args) == 0:
sizestr = '0'
out('',
' if (!trace_event_get_state(%(event_id)s)) {',
' return;',
' }',
'',
' if (trace_record_start(&rec, %(event_id)s, %(size_str)s)) {',
' return; /* Trace Buffer Full, Event Dropped ! */',
' }',
event_id='TRACE_' + event.name.upper(),
size_str=sizestr)
if len(event.args) > 0:
for type_, name in event.args:
# string
if is_string(type_):
out(' trace_record_write_str(&rec, %(name)s, arg%(name)s_len);',
name=name)
# pointer var (not string)
elif type_.endswith('*'):
out(' trace_record_write_u64(&rec, (uintptr_t)(uint64_t *)%(name)s);',
name=name)
# primitive data type
else:
out(' trace_record_write_u64(&rec, (uint64_t)%(name)s);',
name=name)
out(' trace_record_finish(&rec);',
'}',
'')
| gpl-2.0 |
uwescience/raco | raco/operator_test.py | 1 | 6417 | import unittest
import raco.fakedb
from raco.relation_key import RelationKey
from raco.algebra import *
from raco.expression import *
import raco.relation_key as relation_key
from raco.expression import StateVar
class TestQueryFunctions():
emp_table = collections.Counter([
# id dept_id name salary
(1, 2, "Bill Howe", 25000),
(2, 1, "Dan Halperin", 90000),
(3, 1, "Andrew Whitaker", 5000),
(4, 2, "Shumo Chu", 5000),
(5, 1, "Victor Almeida", 25000),
(6, 3, "Dan Suciu", 90000),
(7, 1, "Magdalena Balazinska", 25000)])
emp_schema = scheme.Scheme([("id", types.LONG_TYPE),
("dept_id", types.LONG_TYPE),
("name", types.STRING_TYPE),
("salary", types.LONG_TYPE)])
emp_key = relation_key.RelationKey.from_string("andrew:adhoc:employee")
class OperatorTest(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.db.ingest(TestQueryFunctions.emp_key,
TestQueryFunctions.emp_table,
TestQueryFunctions.emp_schema)
def test_counter_stateful_apply(self):
"""Test stateful apply operator that produces a counter"""
scan = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
initex = NumericLiteral(-1)
iterex = NamedStateAttributeRef("count")
updateex = PLUS(UnnamedStateAttributeRef(0),
NumericLiteral(1))
sapply = StatefulApply([("count", iterex)],
[StateVar("count", initex, updateex)], scan)
result = collections.Counter(self.db.evaluate(sapply))
self.assertEqual(len(result), len(TestQueryFunctions.emp_table))
self.assertEqual([x[0] for x in result], range(7))
def test_times_equal_uda(self):
input_op = Scan(TestQueryFunctions.emp_key,
TestQueryFunctions.emp_schema)
init_ex = NumericLiteral(1)
update_ex = TIMES(NamedStateAttributeRef("value"),
NamedAttributeRef("salary"))
emit_ex = UdaAggregateExpression(NamedStateAttributeRef("value"))
statemods = [StateVar("value", init_ex, update_ex)]
gb = GroupBy([UnnamedAttributeRef(1)], [emit_ex], input_op, statemods)
result = self.db.evaluate_to_bag(gb)
d = collections.defaultdict(lambda: 1)
for tpl in TestQueryFunctions.emp_table:
d[tpl[1]] *= tpl[3]
expected = collections.Counter(
[(key, val) for key, val in d.iteritems()])
self.assertEquals(result, expected)
def test_running_mean_stateful_apply(self):
"""Calculate the mean using stateful apply"""
scan = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
initex0 = NumericLiteral(0)
updateex0 = PLUS(NamedStateAttributeRef("count"),
NumericLiteral(1))
initex1 = NumericLiteral(0)
updateex1 = PLUS(NamedStateAttributeRef("sum"),
NamedAttributeRef("salary"))
avgex = IDIVIDE(NamedStateAttributeRef("sum"),
NamedStateAttributeRef("count"))
sapply = StatefulApply([("avg", avgex)],
[StateVar("count", initex0, updateex0),
StateVar("sum", initex1, updateex1)], scan)
store = Store(RelationKey("OUTPUT"), sapply)
result = list(self.db.evaluate(sapply))
self.assertEqual(len(result), len(TestQueryFunctions.emp_table))
self.assertEqual([x[0] for x in result][-1], 37857)
# test whether we can generate json without errors
from raco.backends.myria import (compile_to_json,
MyriaLeftDeepTreeAlgebra)
from compile import optimize
plan = compile_to_json("", None, optimize(
store, MyriaLeftDeepTreeAlgebra())) # noqa
for op in plan['plan']['fragments'][0]['operators']:
if op['opType'] == 'StatefulApply':
assert not any(exp is None for exp in op['emitExpressions'])
def test_cast_to_float(self):
scan = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
cast = CAST(types.DOUBLE_TYPE, NamedAttributeRef("salary"))
applyop = Apply([("salaryf", cast)], scan)
res = list(self.db.evaluate(applyop))
for x in res:
assert isinstance(x[0], float)
self.assertEqual([x[0] for x in res],
[x[3] for x in TestQueryFunctions.emp_table])
def test_projecting_join_scheme(self):
emp = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
emp1 = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
pj = ProjectingJoin(condition=BooleanLiteral(True),
left=emp, right=emp1)
names = ([n for n in emp.scheme().get_names()]
+ ["{n}1".format(n=n) for n in emp.scheme().get_names()])
self.assertEquals(names, pj.scheme().get_names())
def test_projecting_join_scheme_no_dups_alternate(self):
emp = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
emp1 = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
num_cols = len(emp.scheme())
# alternate which copy of emp we keep a col from
refs = [UnnamedAttributeRef(i + (i % 2) * num_cols)
for i in range(num_cols)]
pj = ProjectingJoin(condition=BooleanLiteral(True),
left=emp, right=emp1, output_columns=refs)
self.assertEquals(emp.scheme().get_names(), pj.scheme().get_names())
def test_projecting_join_scheme_no_dups_only_keep_right(self):
emp = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
emp1 = Scan(TestQueryFunctions.emp_key, TestQueryFunctions.emp_schema)
num_cols = len(emp.scheme())
# keep only the right child's columns
refs = [UnnamedAttributeRef(i + num_cols)
for i in range(num_cols)]
pj = ProjectingJoin(condition=BooleanLiteral(True),
left=emp, right=emp1, output_columns=refs)
self.assertEquals(emp.scheme().get_names(), pj.scheme().get_names())
| bsd-3-clause |
dlazz/ansible | lib/ansible/executor/action_write_locks.py | 140 | 1911 | # (c) 2016 - Red Hat, Inc. <info@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from multiprocessing import Lock
from ansible.module_utils.facts.system.pkg_mgr import PKG_MGRS
if 'action_write_locks' not in globals():
# Do not initialize this more than once because it seems to bash
# the existing one. multiprocessing must be reloading the module
# when it forks?
action_write_locks = dict()
# Below is a Lock for use when we weren't expecting a named module. It gets used when an action
# plugin invokes a module whose name does not match with the action's name. Slightly less
# efficient as all processes with unexpected module names will wait on this lock
action_write_locks[None] = Lock()
# These plugins are known to be called directly by action plugins with names differing from the
# action plugin name. We precreate them here as an optimization.
# If a list of service managers is created in the future we can do the same for them.
mods = set(p['name'] for p in PKG_MGRS)
mods.update(('copy', 'file', 'setup', 'slurp', 'stat'))
for mod_name in mods:
action_write_locks[mod_name] = Lock()
| gpl-3.0 |
plivo/plivo-python | tests/resources/test_multipartycalls.py | 1 | 22887 | # -*- coding: utf-8 -*-
import json
from plivo.base import (ListResponseObject)
from plivo.exceptions import ValidationError
from plivo.resources import MultiPartyCall, MultiPartyCallParticipant
from plivo.utils.signature_v3 import construct_get_url
from tests.base import PlivoResourceTestCase
from tests.decorators import with_response
class MultiPartyCallsTest(PlivoResourceTestCase):
def __assert_requests(self, expected_url, expected_method, expected_request_body=None, actual_response=None):
self.maxDiff = None
# Verifying the api hit
self.assertEqual(expected_url, self.client.current_request.url)
# Verifying the method used
self.assertEqual(expected_method, self.client.current_request.method)
if expected_request_body:
# Verifying the request body sent
self.assertDictEqual(expected_request_body, json.loads(self.client.current_request.body.decode('utf-8')))
if actual_response:
# Verifying the mock response
self.assertResponseMatches(actual_response)
def test_add_participant_validations(self):
error_message = ''
friendly_name = 'friendly_name'
uuid = '1234-5678-9012-3456'
try:
self.client.multi_party_calls.add_participant(role='agent')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, 'specify either multi party call friendly name or uuid')
try:
self.client.multi_party_calls.add_participant(role='supervisor', friendly_name=friendly_name, uuid=uuid)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, 'cannot specify both multi party call friendly name or uuid')
try:
self.client.multi_party_calls.add_participant(role='customer', uuid=uuid)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, 'specify either call_uuid or (from, to)')
try:
self.client.multi_party_calls.add_participant(role='customer', uuid=uuid, from_='123456', call_uuid=uuid)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, 'cannot specify call_uuid when (from, to) is provided')
try:
self.client.multi_party_calls.add_participant(role='agent', uuid=uuid, to_='123456')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, 'specify (from, to) when not adding an existing call_uuid '
'to multi party participant')
try:
self.client.multi_party_calls.add_participant(role='manager')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"role should be in ('agent', 'supervisor', "
"'customer') (actual value: manager)\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', friendly_name=1234)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"friendly_name should be of type: ['str']\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', uuid=1234)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"uuid should be of type: ['str']\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', call_status_callback_url='callback_python')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "['call_status_callback_url should match format "
"(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|"
"(?:%[0-9a-fA-F][0-9a-fA-F]))+|None) (actual value: callback_python)']")
try:
self.client.multi_party_calls.add_participant(role='supervisor', call_status_callback_method='HEAD')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"call_status_callback_method should be in "
"('GET', 'POST') (actual value: HEAD)\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', confirm_key='K')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"confirm_key should be in ('0', '1', '2', '3', '4', '5', '6', '7', '8', "
"'9', '#', '*') (actual value: K)\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', ring_timeout='2500')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"ring_timeout should be of type: ['int']\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', max_duration=29867)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "['300 < max_duration <= 28800 (actual value: 29867)']")
try:
self.client.multi_party_calls.add_participant(role='supervisor', status_callback_events='agent-transfer')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"status_callback_events should be among ('mpc-state-changes', "
"'participant-state-changes', 'participant-speak-events', "
"'participant-digit-input-events', 'add-participant-api-events'). multiple "
"values should be COMMA(,) separated (actual value: agent-transfer)\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', stay_alone=1)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"stay_alone should be of type: ['bool']\"]")
try:
self.client.multi_party_calls.add_participant(role='supervisor', enter_sound='beep:3')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "enter_sound did not satisfy any of the required types")
@with_response(200)
def test_add_participant(self):
request_body = {
'exit_sound_method': 'GET',
'exit_sound': 'beep:2',
'enter_sound_method': 'GET',
'enter_sound': 'beep:1',
'relay_dtmf_inputs': False,
'end_mpc_on_exit': False,
'start_mpc_on_enter': True,
'hold': False, 'mute': False,
'coach_mode': True,
'stay_alone': False,
'status_callback_events': 'mpc-state-changes,participant-state-changes',
'record_file_format': 'mp3',
'record': False,
'on_exit_action_method': 'POST',
'status_callback_method': 'GET',
'recording_callback_method': 'GET',
'customer_hold_music_method': 'GET',
'agent_hold_music_method': 'GET',
'wait_music_method': 'GET',
'max_participants': 10,
'max_duration': 14400,
'ring_timeout': 45,
'dial_music': 'real',
'confirm_key_sound_method': 'GET',
'call_status_callback_method': 'POST',
'call_uuid': '1234-5678-4321-0987',
'role': 'agent'
}
add_participant_response = self.client.multi_party_calls.add_participant(friendly_name='Voice', role='agent',
call_uuid='1234-5678-4321-0987')
self.__assert_requests(actual_response=add_participant_response, expected_method='POST',
expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/name_Voice/Participant/',
expected_request_body=request_body)
# update the request body for next set of params
request_body.pop('call_uuid', None)
request_body['to'] = '180012341234'
request_body['from'] = '918888888888'
request_body['role'] = 'supervisor'
request_body['coach_mode'] = False
request_body['dial_music'] = 'http://music.plivo.com/bella-ciao.wav'
request_body['status_callback_events'] = 'participant-speak-events'
request_body['ring_timeout'] = 100
request_body['max_duration'] = 25000
request_body['max_participants'] = 5
request_body['relay_dtmf_inputs'] = True
request_body['customer_hold_music_url'] = 'http://music.plivo.com/bella-ciao.wav'
request_body['customer_hold_music_method'] = 'POST'
request_body['exit_sound_method'] = 'POST'
request_body['record_file_format'] = 'wav'
add_participant_response = self.client.multi_party_calls.add_participant(
uuid='12345678-90123456', role='supervisor', to_='180012341234', from_='918888888888',
coach_mode=False, dial_music='http://music.plivo.com/bella-ciao.wav', ring_timeout=100,
status_callback_events='participant-speak-events', max_duration=25000, max_participants=5,
relay_dtmf_inputs=True, customer_hold_music_url='http://music.plivo.com/bella-ciao.wav',
customer_hold_music_method='post', exit_sound_method='Post', record_file_format='wav')
self.__assert_requests(actual_response=add_participant_response, expected_method='POST',
expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/uuid_12345678-90123456/Participant/',
expected_request_body=request_body)
@with_response(200)
def test_start_MPC(self):
request_body = {'status': 'active'}
start_mpc_response = self.client.multi_party_calls.start(friendly_name='Voice')
self.__assert_requests(actual_response=start_mpc_response, expected_method='POST',
expected_url='https://api.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/name_Voice/',
expected_request_body=request_body)
start_mpc_response = self.client.multi_party_calls.start(uuid='12345678-90123456')
self.__assert_requests(actual_response=start_mpc_response, expected_method='POST',
expected_url='https://api.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/uuid_12345678-90123456/',
expected_request_body=request_body)
@with_response(200)
def test_end_MPC(self):
end_mpc_response = self.client.multi_party_calls.stop(friendly_name='Voice')
self.__assert_requests(actual_response=end_mpc_response, expected_method='DELETE',
expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/name_Voice/')
end_mpc_response = self.client.multi_party_calls.stop(uuid='12345678-90123456')
self.__assert_requests(actual_response=end_mpc_response, expected_method='DELETE',
expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/uuid_12345678-90123456/')
def test_list_mpc_validations(self):
error_message = ''
try:
self.client.multi_party_calls.list(sub_account='Voice')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, 'sub_account did not satisfy any of the required types')
try:
self.client.multi_party_calls.list(status='terminating')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"status should be in ('active', 'initialized', 'ended') "
"(actual value: terminating)\"]")
try:
self.client.multi_party_calls.list(termination_cause_code='2000')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "[\"termination_cause_code should be of type: ['int']\"]")
try:
self.client.multi_party_calls.list(end_time__lte='20-10-3 9:22')
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "['end_time__lte should match format ^\\\\d{4}-\\\\d{2}-\\\\d{2} \\\\d{2}:"
"\\\\d{2}(:\\\\d{2}(\\\\.\\\\d{1,6})?)?$ (actual value: 20-10-3 9:22)']")
try:
self.client.multi_party_calls.list(limit=300)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "['0 < limit <= 20 (actual value: 300)']")
try:
self.client.multi_party_calls.list(offset=-1)
except ValidationError as e:
error_message = str(e)
self.assertEqual(error_message, "['0 <= offset (actual value: -1)']")
@with_response(200)
def test_list_MPC(self):
multi_party_calls = self.client.multi_party_calls.list()
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/',
expected_method='GET')
# check we received a list response
self.assertIsInstance(multi_party_calls, ListResponseObject)
# check if objects are case to MultiPartyCall
self.assertIsInstance(multi_party_calls.objects[0], MultiPartyCall)
self.assertIsInstance(multi_party_calls.objects[len(multi_party_calls.objects)-1], MultiPartyCall)
# check if ID is correctly read in 5th random object
self.assertEqual(multi_party_calls.objects[5].id, "9aad6d16-ed2c-4433-9313-26f8cfc4d99c")
# check if friendly_name is correctly read in 18th random object
self.assertEqual(multi_party_calls.objects[18].friendly_name, "Gasteiz / Vitoria")
# check if termination_cause is correctly read in 13th random object
self.assertEqual(multi_party_calls.objects[13].termination_cause, "Hangup API Triggered")
# check if termination_cause_code is correctly read in 12th random object
self.assertEqual(multi_party_calls.objects[12].termination_cause_code, 2000)
# check if status is correctly read in 7th random object
self.assertEqual(multi_party_calls.objects[7].status, "Active")
# check if billed_amount is correctly read in 17th random object
self.assertEqual(multi_party_calls.objects[17].billed_amount, 0.66)
# check for case where filters are sent in request body and compare request body this time
request_body = {
'sub_account': 'SAWWWWWWWWWWWWWWWWWW',
'friendly_name': 'axa',
'status': 'active',
'termination_cause_code': 1010,
'end_time__gte': '2020-03-10 11:45',
'creation_time__lte': '2020-03-30 09:35',
'limit': 15,
'offset': 156
}
self.client.multi_party_calls.list(**request_body)
# Construct sorted GET url for both cases
expected_url = construct_get_url('https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/',
params=request_body)
actual_url = construct_get_url(self.client.current_request.url, params={})
print(actual_url)
self.assertEqual(expected_url, actual_url)
# Verifying the method used
self.assertEqual('GET', self.client.current_request.method)
def test_get_MPC(self):
response = {
'api_id': 'd0e000c6-9ace-11ea-97d8-1094bbeb5c2c',
'friendly_name': 'Chamblee',
'mpc_uuid': '9aad6d16-ed2c-4433-9313-26f8cfc4d99c',
'participants': '/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/name_Chamblee/Participant/',
'recording': 'not-recording',
'resource_uri': '/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/name_Chamblee/',
'start_time': '2020-05-18 22:02:51+05:30',
'status': 'Active',
'stay_alone': True
}
self.expected_response = response
actual_response = self.client.set_expected_response(status_code=200, data_to_return=response)
multi_party_call = self.client.multi_party_calls.get(friendly_name=response['friendly_name'])
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/name_{}/'.format(response['friendly_name']),
expected_method='GET', actual_response=actual_response)
# check we received a list response
self.assertIsInstance(multi_party_call, MultiPartyCall)
# check if ID is correctly read in object
self.assertEqual(multi_party_call.id, response['mpc_uuid'])
# check if friendly_name is correctly read in object
self.assertEqual(multi_party_call.friendly_name, response['friendly_name'])
# check if termination_cause is correctly read in 13th random object
self.assertEqual(multi_party_call.recording, response['recording'])
# check if billed_amount is correctly read in object
self.assertEqual(multi_party_call.stay_alone, True)
@with_response(200)
def test_update_MPC_participant(self):
participant_id = '10'
uuid = '12345678-90123456'
coach_mode = False
mute = True
update_response = self.client.multi_party_calls.update_participant(
participant_id=participant_id,
uuid=uuid,
coach_mode=coach_mode,
mute=mute
)
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/uuid_{}/Participant/{}/'.format(uuid, participant_id),
expected_method='POST', expected_request_body={'coach_mode': coach_mode, 'mute': mute},
actual_response=update_response)
def test_kick_MPC_participant(self):
self.client.set_expected_response(status_code=204, data_to_return=None)
participant_id = 10
uuid = '12345678-90123456'
self.client.multi_party_calls.kick_participant(
participant_id=participant_id,
uuid=uuid
)
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/uuid_{}/Participant/{}/'.format(uuid, participant_id),
expected_method='DELETE')
@with_response(200)
def test_start_recording(self):
file_format = 'wav'
status_callback_url = 'https://plivo.com/status'
start_recording_response = self.client.multi_party_calls.\
start_recording(friendly_name='Voice', file_format=file_format, status_callback_url=status_callback_url)
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/'
'MultiPartyCall/name_{}/Record/'.format('Voice'),
expected_method='POST',
expected_request_body={'file_format': 'wav',
'status_callback_url': status_callback_url,
'status_callback_method': 'POST'},
actual_response=start_recording_response)
def test_stop_recording(self):
self.client.set_expected_response(status_code=204, data_to_return=None)
self.client.multi_party_calls.stop_recording(friendly_name='Voice')
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/'
'name_{}/Record/'.format('Voice'), expected_method='DELETE')
def test_pause_recording(self):
self.client.set_expected_response(status_code=204, data_to_return=None)
self.client.multi_party_calls.pause_recording(friendly_name='Voice')
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/'
'name_{}/Record/Pause/'.format('Voice'), expected_method='POST')
def test_resume_recording(self):
self.client.set_expected_response(status_code=204, data_to_return=None)
self.client.multi_party_calls.resume_recording(friendly_name='Voice')
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/'
'name_{}/Record/Resume/'.format('Voice'), expected_method='POST')
@with_response(200)
def test_get_participant(self):
participant_id = 49
uuid = '18905d56-79c8-41d4-a840-25feff71070e'
resp = self.client.multi_party_calls.get_participant(participant_id=participant_id, uuid=uuid)
self.__assert_requests(expected_url='https://voice.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/MultiPartyCall/'
'uuid_{}/Participant/{}/'.format(uuid, participant_id),
expected_method='GET')
self.assertIsInstance(resp, MultiPartyCallParticipant)
# Verify whether SecondaryResourceID has been set properly
self.assertEqual(resp.secondary_id, str(participant_id))
# Verify whether call_uuid has been set properly
self.assertEqual(resp.call_uuid, '90de6710-9404-40d1-ba31-f26d2f7c533f')
# Verify whether role has been set properly
self.assertEqual(resp.role, 'customer')
# Verify whether start_on_enter has been set properly
self.assertEqual(resp.start_mpc_on_enter, True)
# Verify whether duration has been set properly
self.assertEqual(resp.duration, 30)
# Verify whether billed_amount has been set properly
self.assertEqual(resp.billed_amount, 0.005)
| mit |
gaqzi/ansible-modules-extras | monitoring/zabbix_hostmacro.py | 91 | 7996 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013-2014, Epic Games, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: zabbix_hostmacro
short_description: Zabbix host macro creates/updates/deletes
description:
- manages Zabbix host macros, it can create, update or delete them.
version_added: "2.0"
author:
- "(@cave)"
- Dean Hailin Song
requirements:
- "python >= 2.6"
- zabbix-api
options:
server_url:
description:
- Url of Zabbix server, with protocol (http or https).
required: true
aliases: [ "url" ]
login_user:
description:
- Zabbix user name.
required: true
login_password:
description:
- Zabbix user password.
required: true
host_name:
description:
- Name of the host.
required: true
macro_name:
description:
- Name of the host macro.
required: true
macro_value:
description:
- Value of the host macro.
required: true
state:
description:
- State of the macro.
- On C(present), it will create if macro does not exist or update the macro if the associated data is different.
- On C(absent) will remove a macro if it exists.
required: false
choices: ['present', 'absent']
default: "present"
timeout:
description:
- The timeout of API request (seconds).
default: 10
'''
EXAMPLES = '''
- name: Create a new host macro or update an existing macro's value
local_action:
module: zabbix_hostmacro
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
macro_name:Example macro
macro_value:Example value
state: present
'''
import logging
import copy
try:
from zabbix_api import ZabbixAPI, ZabbixAPISubClass
HAS_ZABBIX_API = True
except ImportError:
HAS_ZABBIX_API = False
# Extend the ZabbixAPI
# Since the zabbix-api python module too old (version 1.0, no higher version so far).
class ZabbixAPIExtends(ZabbixAPI):
def __init__(self, server, timeout, **kwargs):
ZabbixAPI.__init__(self, server, timeout=timeout)
class HostMacro(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# exist host
def is_host_exist(self, host_name):
result = self._zapi.host.exists({'host': host_name})
return result
# get host id by host name
def get_host_id(self, host_name):
try:
host_list = self._zapi.host.get({'output': 'extend', 'filter': {'host': host_name}})
if len(host_list) < 1:
self._module.fail_json(msg="Host not found: %s" % host_name)
else:
host_id = host_list[0]['hostid']
return host_id
except Exception, e:
self._module.fail_json(msg="Failed to get the host %s id: %s." % (host_name, e))
# get host macro
def get_host_macro(self, macro_name, host_id):
try:
host_macro_list = self._zapi.usermacro.get(
{"output": "extend", "selectSteps": "extend", 'hostids': [host_id], 'filter': {'macro': '{$' + macro_name + '}'}})
if len(host_macro_list) > 0:
return host_macro_list[0]
return None
except Exception, e:
self._module.fail_json(msg="Failed to get host macro %s: %s" % (macro_name, e))
# create host macro
def create_host_macro(self, macro_name, macro_value, host_id):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.create({'hostid': host_id, 'macro': '{$' + macro_name + '}', 'value': macro_value})
self._module.exit_json(changed=True, result="Successfully added host macro %s " % macro_name)
except Exception, e:
self._module.fail_json(msg="Failed to create host macro %s: %s" % (macro_name, e))
# update host macro
def update_host_macro(self, host_macro_obj, macro_name, macro_value):
host_macro_id = host_macro_obj['hostmacroid']
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.update({'hostmacroid': host_macro_id, 'value': macro_value})
self._module.exit_json(changed=True, result="Successfully updated host macro %s " % macro_name)
except Exception, e:
self._module.fail_json(msg="Failed to updated host macro %s: %s" % (macro_name, e))
# delete host macro
def delete_host_macro(self, host_macro_obj, macro_name):
host_macro_id = host_macro_obj['hostmacroid']
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.delete([host_macro_id])
self._module.exit_json(changed=True, result="Successfully deleted host macro %s " % macro_name)
except Exception, e:
self._module.fail_json(msg="Failed to delete host macro %s: %s" % (macro_name, e))
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(required=True, aliases=['url']),
login_user=dict(required=True),
login_password=dict(required=True, no_log=True),
host_name=dict(required=True),
macro_name=dict(required=True),
macro_value=dict(required=True),
state=dict(default="present", choices=['present', 'absent']),
timeout=dict(type='int', default=10)
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg="Missing requried zabbix-api module (check docs or install with: pip install zabbix-api)")
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
host_name = module.params['host_name']
macro_name = (module.params['macro_name']).upper()
macro_value = module.params['macro_value']
state = module.params['state']
timeout = module.params['timeout']
zbx = None
# login to zabbix
try:
zbx = ZabbixAPIExtends(server_url, timeout=timeout)
zbx.login(login_user, login_password)
except Exception, e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
host_macro_class_obj = HostMacro(module, zbx)
changed = False
if host_name:
host_id = host_macro_class_obj.get_host_id(host_name)
host_macro_obj = host_macro_class_obj.get_host_macro(macro_name, host_id)
if state == 'absent':
if not host_macro_obj:
module.exit_json(changed=False, msg="Host Macro %s does not exist" % macro_name)
else:
# delete a macro
host_macro_class_obj.delete_host_macro(host_macro_obj, macro_name)
else:
if not host_macro_obj:
# create host macro
host_macro_class_obj.create_host_macro(macro_name, macro_value, host_id)
else:
# update host macro
host_macro_class_obj.update_host_macro(host_macro_obj, macro_name, macro_value)
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
manasi24/tempest | tempest/api_schema/response/compute/v2_1/flavors_access.py | 38 | 1305 | # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
add_remove_list_flavor_access = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'flavor_access': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'flavor_id': {'type': 'string'},
'tenant_id': {'type': 'string'},
},
'additionalProperties': False,
'required': ['flavor_id', 'tenant_id'],
}
}
},
'additionalProperties': False,
'required': ['flavor_access']
}
}
| apache-2.0 |
cfei18/incubator-airflow | airflow/contrib/kubernetes/pod_generator.py | 10 | 4846 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.contrib.kubernetes.pod import Pod
import uuid
from airflow.contrib.kubernetes.volume_mount import VolumeMount # noqa
from airflow.contrib.kubernetes.volume import Volume # noqa
class PodGenerator:
"""Contains Kubernetes Airflow Worker configuration logic"""
def __init__(self, kube_config=None):
self.kube_config = kube_config
self.volumes = []
self.volume_mounts = []
self.init_containers = []
def add_init_container(self,
name,
image,
security_context,
init_environment,
volume_mounts
):
"""
Adds an init container to the launched pod. useful for pre-
Args:
name (str):
image (str):
security_context (dict):
init_environment (dict):
volume_mounts (dict):
Returns:
"""
self.init_containers.append(
{
'name': name,
'image': image,
'securityContext': security_context,
'env': init_environment,
'volumeMounts': volume_mounts
}
)
def _get_init_containers(self):
return self.init_containers
def add_volume(self, volume):
"""
Args:
volume (Volume):
"""
self._add_volume(name=volume.name, configs=volume.configs)
def _add_volume(self, name, configs):
"""
Args:
name (str):
configs (dict): Configurations for the volume.
Could be used to define PersistentVolumeClaim, ConfigMap, etc...
Returns:
"""
volume_map = {'name': name}
for k, v in configs.items():
volume_map[k] = v
self.volumes.append(volume_map)
def add_volume_with_configmap(self, name, config_map):
self.volumes.append(
{
'name': name,
'configMap': config_map
}
)
def _add_mount(self,
name,
mount_path,
sub_path,
read_only):
"""
Args:
name (str):
mount_path (str):
sub_path (str):
read_only:
Returns:
"""
self.volume_mounts.append({
'name': name,
'mountPath': mount_path,
'subPath': sub_path,
'readOnly': read_only
})
def add_mount(self,
volume_mount):
"""
Args:
volume_mount (VolumeMount):
"""
self._add_mount(
name=volume_mount.name,
mount_path=volume_mount.mount_path,
sub_path=volume_mount.sub_path,
read_only=volume_mount.read_only
)
def _get_volumes_and_mounts(self):
return self.volumes, self.volume_mounts
def _get_image_pull_secrets(self):
"""Extracts any image pull secrets for fetching container(s)"""
if not self.kube_config.image_pull_secrets:
return []
return self.kube_config.image_pull_secrets.split(',')
def make_pod(self, namespace, image, pod_id, cmds, arguments, labels):
volumes, volume_mounts = self._get_volumes_and_mounts()
worker_init_container_spec = self._get_init_containers()
return Pod(
namespace=namespace,
name=pod_id + "-" + str(uuid.uuid1())[:8],
image=image,
cmds=cmds,
args=arguments,
labels=labels,
envs={},
secrets=[],
# service_account_name=self.kube_config.worker_service_account_name,
# image_pull_secrets=self.kube_config.image_pull_secrets,
init_containers=worker_init_container_spec,
volumes=volumes,
volume_mounts=volume_mounts,
resources=None
)
| apache-2.0 |
Taffer/courses | algs-2/week-5/tsp.py | 1 | 4969 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
In this assignment you will implement one or more algorithms for the travelling
salesman problem, such as the dynamic programming algorithm covered in the
video lectures. Here is a data file describing a TSP instance. The first line
indicates the number of cities. Each city is a point in the plane, and each
subsequent line indicates the x- and y-coordinates of a single city.
The distance between two cities is defined as the Euclidean distance --- that
is, two cities at locations (x,y) and (z,w) have distance (x−z)2+(y−w)2
between them. [That is, square root of (x - z)^2 + (y - w)^2.]
In the box below, type in the minimum cost of a travelling salesman tour for
this instance, rounded down to the nearest integer.
OPTIONAL: If you want bigger data sets to play with, check out the TSP
instances from around the world here. The smallest data set (Western Sahara)
has 29 cities, and most of the data sets are much bigger than that. What's the
largest of these data sets that you're able to solve --- using dynamic
programming or, if you like, a completely different method?
HINT: You might experiment with ways to reduce the data set size. For example,
trying plotting the points. Can you infer any structure of the optimal
solution? Can you use that structure to speed up your algorithm?
'''
import itertools
import math
import numpy
import sys
class Traveller(object):
''' Travelling Salesperson Problem implementation '''
def __init__(self, filename):
''' Load TSP data from filename. '''
self.cities = [] # list of (x, y) co-ords
self.distances = {} # (city index 1, city index 2): distance
num_cities = 0
with open(filename) as tsp_file:
for line in tsp_file.readlines():
if line[0] == '#':
continue
if len(line.strip()) == 0:
continue
parts = line.split()
if len(parts) == 1 and num_cities == 0:
num_cities = int(parts[0])
elif len(parts) == 2:
self.cities.append((float(parts[0]), float(parts[1])))
else:
raise ValueError("Can't parse line: %s" % (line))
if len(self.cities) != num_cities:
raise ValueError('Expected %s cities but found %s' % (num_cities, len(self.cities)))
def distance(self, index_1, index_2):
''' Calculate the distance between city1 and city2. '''
if (index_1, index_2) in self.distances:
return self.distances[(index_1, index_2)]
city1 = self.cities[index_1]
city2 = self.cities[index_2]
result = math.sqrt((city1[0] - city2[0]) ** 2 + (city1[1] - city2[1]) ** 2)
self.distances[(index_1, index_2)] = result
return result
def subsets(self, src, length):
''' Return all subsets of the set of cities, all must contain city src, with specified length. '''
return [x for x in itertools.combinations(range(len(self.cities)), length) if src in x]
def tsp(self):
''' Compute the minimum cost of the TSP tour. '''
n = len(self.cities)
A_prev = {} # A[subset] = list of [0 .. n] destinations
subsets = self.subsets(0, 1)
s = subsets[0]
# Set up base case.
A_prev[s] = numpy.repeat(numpy.inf, n)
A_prev[s][0] = 0.0
# Work it.
for m in range(2, n + 1):
print 'm = %s' % (m)
A = {}
sets = self.subsets(0, m)
for s in sets:
A[s] = numpy.repeat(numpy.inf, n)
for j in [x for x in s if x != 0]:
jless_s = list(s[:])
jless_s.remove(j)
jless_s = tuple(jless_s)
distances = []
for k in jless_s:
delta = A_prev[jless_s][k] + self.distance(k, j)
distances.append(delta)
#A[s][j] = min([(A[jless_s][k] + self.distance(k, j)) for k in s if k != j])
A[s][j] = min(distances)
del(A_prev)
A_prev = A
results = []
sets = self.subsets(0, n)
for j in range(1, n):
results.append(A[sets[0]][j] + self.distance(j, 0))
result = min(results)
try:
return int(result) # returns floor(result) as an integer
except OverflowError:
return 'inf'
def main(args):
for filename in args:
tsp = Traveller(filename)
print '%s has %s cities' % (filename, len(tsp.cities))
result = tsp.tsp()
print '=> shortest tour: %s' % (result)
if __name__ == '__main__':
main(sys.argv[1:])
| cc0-1.0 |
PatriGotts3/Patrick | py/openage/convert/gamedata/sound.py | 46 | 1288 | from .. import dataformat
from struct import Struct, unpack_from
from ..util import dbg, zstr
from .empiresdat import endianness
class SoundItem(dataformat.Exportable):
name_struct = "sound_item"
name_struct_file = "sound"
struct_description = "one possible file for a sound."
data_format = (
(dataformat.READ_EXPORT, "filename", "char[13]"),
(dataformat.READ_EXPORT, "resource_id", "int32_t"),
(dataformat.READ_EXPORT, "probablilty", "int16_t"),
(dataformat.READ_EXPORT, "civilisation", "int16_t"),
(dataformat.READ_UNKNOWN, None, "int16_t"),
)
def __init__(self):
super().__init__()
class Sound(dataformat.Exportable):
name_struct = "sound"
name_struct_file = "sound"
struct_description = "describes a sound, consisting of several sound items."
data_format = (
(dataformat.READ_EXPORT, "id", "int32_t"),
(dataformat.READ_EXPORT, "item_count", "uint16_t"),
(dataformat.READ_UNKNOWN, None, "int32_t"),
(dataformat.READ_EXPORT, "sound_items", dataformat.SubdataMember(
ref_type=SoundItem,
ref_to="id",
length="item_count",
)),
)
def __init__(self):
super().__init__()
| gpl-3.0 |
Cinntax/home-assistant | homeassistant/components/velux/cover.py | 2 | 3262 | """Support for Velux covers."""
from homeassistant.components.cover import (
ATTR_POSITION,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverDevice,
)
from homeassistant.core import callback
from . import DATA_VELUX
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up cover(s) for Velux platform."""
entities = []
for node in hass.data[DATA_VELUX].pyvlx.nodes:
from pyvlx import OpeningDevice
if isinstance(node, OpeningDevice):
entities.append(VeluxCover(node))
async_add_entities(entities)
class VeluxCover(CoverDevice):
"""Representation of a Velux cover."""
def __init__(self, node):
"""Initialize the cover."""
self.node = node
@callback
def async_register_callbacks(self):
"""Register callbacks to update hass after device was changed."""
async def after_update_callback(device):
"""Call after device was updated."""
await self.async_update_ha_state()
self.node.register_device_updated_cb(after_update_callback)
async def async_added_to_hass(self):
"""Store register state change callback."""
self.async_register_callbacks()
@property
def name(self):
"""Return the name of the Velux device."""
return self.node.name
@property
def should_poll(self):
"""No polling needed within Velux."""
return False
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION | SUPPORT_STOP
@property
def current_cover_position(self):
"""Return the current position of the cover."""
return 100 - self.node.position.position_percent
@property
def device_class(self):
"""Define this cover as either window/blind/awning/shutter."""
from pyvlx.opening_device import Blind, RollerShutter, Window, Awning
if isinstance(self.node, Window):
return "window"
if isinstance(self.node, Blind):
return "blind"
if isinstance(self.node, RollerShutter):
return "shutter"
if isinstance(self.node, Awning):
return "awning"
return "window"
@property
def is_closed(self):
"""Return if the cover is closed."""
return self.node.position.closed
async def async_close_cover(self, **kwargs):
"""Close the cover."""
await self.node.close(wait_for_completion=False)
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self.node.open(wait_for_completion=False)
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
if ATTR_POSITION in kwargs:
position_percent = 100 - kwargs[ATTR_POSITION]
from pyvlx import Position
await self.node.set_position(
Position(position_percent=position_percent), wait_for_completion=False
)
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self.node.stop(wait_for_completion=False)
| apache-2.0 |
laminr/guiliman | node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py | 2767 | 2174 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Applies a fix to CR LF TAB handling in xml.dom.
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
Working around this: http://bugs.python.org/issue5752
TODO(bradnelson): Consider dropping this when we drop XP support.
"""
import xml.dom.minidom
def _Replacement_write_data(writer, data, is_attrib=False):
"""Writes datachars to writer."""
data = data.replace("&", "&").replace("<", "<")
data = data.replace("\"", """).replace(">", ">")
if is_attrib:
data = data.replace(
"\r", "
").replace(
"\n", "
").replace(
"\t", "	")
writer.write(data)
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent+"<" + self.tagName)
attrs = self._get_attributes()
a_names = attrs.keys()
a_names.sort()
for a_name in a_names:
writer.write(" %s=\"" % a_name)
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
writer.write("\"")
if self.childNodes:
writer.write(">%s" % newl)
for node in self.childNodes:
node.writexml(writer, indent + addindent, addindent, newl)
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
else:
writer.write("/>%s" % newl)
class XmlFix(object):
"""Object to manage temporary patching of xml.dom.minidom."""
def __init__(self):
# Preserve current xml.dom.minidom functions.
self.write_data = xml.dom.minidom._write_data
self.writexml = xml.dom.minidom.Element.writexml
# Inject replacement versions of a function and a method.
xml.dom.minidom._write_data = _Replacement_write_data
xml.dom.minidom.Element.writexml = _Replacement_writexml
def Cleanup(self):
if self.write_data:
xml.dom.minidom._write_data = self.write_data
xml.dom.minidom.Element.writexml = self.writexml
self.write_data = None
def __del__(self):
self.Cleanup()
| mit |
stonebig/numba | numba/cuda/tests/cudapy/test_globals.py | 7 | 1384 | import numpy as np
from numba import cuda, int32, float32
from numba.cuda.testing import unittest, CUDATestCase
N = 100
def simple_smem(ary):
sm = cuda.shared.array(N, int32)
i = cuda.grid(1)
if i == 0:
for j in range(N):
sm[j] = j
cuda.syncthreads()
ary[i] = sm[i]
S0 = 10
S1 = 20
def coop_smem2d(ary):
i, j = cuda.grid(2)
sm = cuda.shared.array((S0, S1), float32)
sm[i, j] = (i + 1) / (j + 1)
cuda.syncthreads()
ary[i, j] = sm[i, j]
class TestCudaTestGlobal(CUDATestCase):
def test_global_int_const(self):
"""Test simple_smem
"""
compiled = cuda.jit("void(int32[:])")(simple_smem)
nelem = 100
ary = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary)
self.assertTrue(np.all(ary == np.arange(nelem, dtype=np.int32)))
@unittest.SkipTest
def test_global_tuple_const(self):
"""Test coop_smem2d
"""
compiled = cuda.jit("void(float32[:,:])")(coop_smem2d)
shape = 10, 20
ary = np.empty(shape, dtype=np.float32)
compiled[1, shape](ary)
exp = np.empty_like(ary)
for i in range(ary.shape[0]):
for j in range(ary.shape[1]):
exp[i, j] = float(i + 1) / (j + 1)
self.assertTrue(np.allclose(ary, exp))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
justathoughtor2/atomicApe | cygwin/lib/python2.7/site-packages/pylint/test/functional/dangerous_default_value.py | 26 | 2198 | # pylint: disable=missing-docstring
HEHE = {}
def function1(value=[]): # [dangerous-default-value]
"""docstring"""
return value
def function2(value=HEHE): # [dangerous-default-value]
"""docstring"""
return value
def function3(value):
"""docstring"""
return value
def function4(value=set()): # [dangerous-default-value]
"""set is mutable and dangerous."""
return value
def function5(value=frozenset()):
"""frozenset is immutable and safe."""
return value
GLOBAL_SET = set()
def function6(value=GLOBAL_SET): # [dangerous-default-value]
"""set is mutable and dangerous."""
return value
def function7(value=dict()): # [dangerous-default-value]
"""dict is mutable and dangerous."""
return value
def function8(value=list()): # [dangerous-default-value]
"""list is mutable and dangerous."""
return value
def function9(value=[1, 2, 3, 4]): # [dangerous-default-value]
"""list with items should not output item values in error message"""
return value
def function10(value={'a': 1, 'b': 2}): # [dangerous-default-value]
"""dictionaries with items should not output item values in error message"""
return value
def function11(value=list([1, 2, 3])): # [dangerous-default-value]
"""list with items should not output item values in error message"""
return value
def function12(value=dict([('a', 1), ('b', 2)])): # [dangerous-default-value]
"""dictionaries with items should not output item values in error message"""
return value
OINK = {
'a': 1,
'b': 2
}
def function13(value=OINK): # [dangerous-default-value]
"""dictionaries with items should not output item values in error message"""
return value
def function14(value=dict([(1, 2), (1, 2, 3)])): # [dangerous-default-value]
"""a dictionary which will not be inferred to a syntax AST, but to an
astroid.Instance.
"""
return value
INVALID_DICT = dict([(1, 2), (1, 2, 3)])
def function15(value=INVALID_DICT): # [dangerous-default-value]
"""The same situation as function14."""
return value
def function16(value={1}): # [dangerous-default-value]
"""set literal as default value"""
return value
| gpl-3.0 |
hackerbot/DjangoDev | tests/custom_methods/models.py | 66 | 1260 | """
Giving models custom methods
Any method you add to a model will be available to instances.
"""
import datetime
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
def __str__(self):
return self.headline
def was_published_today(self):
return self.pub_date == datetime.date.today()
def articles_from_same_day_1(self):
return Article.objects.filter(pub_date=self.pub_date).exclude(id=self.id)
def articles_from_same_day_2(self):
"""
Verbose version of get_articles_from_same_day_1, which does a custom
database query for the sake of demonstration.
"""
from django.db import connection
with connection.cursor() as cursor:
cursor.execute("""
SELECT id, headline, pub_date
FROM custom_methods_article
WHERE pub_date = %s
AND id != %s""", [connection.ops.value_to_db_date(self.pub_date),
self.id])
return [self.__class__(*row) for row in cursor.fetchall()]
| bsd-3-clause |
slisson/intellij-community | plugins/hg4idea/testData/bin/mercurial/windows.py | 93 | 10768 | # windows.py - Windows utility function implementations for Mercurial
#
# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from i18n import _
import osutil, encoding
import errno, msvcrt, os, re, stat, sys, _winreg
import win32
executablepath = win32.executablepath
getuser = win32.getuser
hidewindow = win32.hidewindow
makedir = win32.makedir
nlinks = win32.nlinks
oslink = win32.oslink
samedevice = win32.samedevice
samefile = win32.samefile
setsignalhandler = win32.setsignalhandler
spawndetached = win32.spawndetached
split = os.path.split
termwidth = win32.termwidth
testpid = win32.testpid
unlink = win32.unlink
umask = 0022
# wrap osutil.posixfile to provide friendlier exceptions
def posixfile(name, mode='r', buffering=-1):
try:
return osutil.posixfile(name, mode, buffering)
except WindowsError, err:
raise IOError(err.errno, '%s: %s' % (name, err.strerror))
posixfile.__doc__ = osutil.posixfile.__doc__
class winstdout(object):
'''stdout on windows misbehaves if sent through a pipe'''
def __init__(self, fp):
self.fp = fp
def __getattr__(self, key):
return getattr(self.fp, key)
def close(self):
try:
self.fp.close()
except IOError:
pass
def write(self, s):
try:
# This is workaround for "Not enough space" error on
# writing large size of data to console.
limit = 16000
l = len(s)
start = 0
self.softspace = 0
while start < l:
end = start + limit
self.fp.write(s[start:end])
start = end
except IOError, inst:
if inst.errno != 0:
raise
self.close()
raise IOError(errno.EPIPE, 'Broken pipe')
def flush(self):
try:
return self.fp.flush()
except IOError, inst:
if inst.errno != errno.EINVAL:
raise
self.close()
raise IOError(errno.EPIPE, 'Broken pipe')
sys.__stdout__ = sys.stdout = winstdout(sys.stdout)
def _is_win_9x():
'''return true if run on windows 95, 98 or me.'''
try:
return sys.getwindowsversion()[3] == 1
except AttributeError:
return 'command' in os.environ.get('comspec', '')
def openhardlinks():
return not _is_win_9x()
def parsepatchoutput(output_line):
"""parses the output produced by patch and returns the filename"""
pf = output_line[14:]
if pf[0] == '`':
pf = pf[1:-1] # Remove the quotes
return pf
def sshargs(sshcmd, host, user, port):
'''Build argument list for ssh or Plink'''
pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
args = user and ("%s@%s" % (user, host)) or host
return port and ("%s %s %s" % (args, pflag, port)) or args
def setflags(f, l, x):
pass
def copymode(src, dst, mode=None):
pass
def checkexec(path):
return False
def checklink(path):
return False
def setbinary(fd):
# When run without console, pipes may expose invalid
# fileno(), usually set to -1.
fno = getattr(fd, 'fileno', None)
if fno is not None and fno() >= 0:
msvcrt.setmode(fno(), os.O_BINARY)
def pconvert(path):
return path.replace(os.sep, '/')
def localpath(path):
return path.replace('/', '\\')
def normpath(path):
return pconvert(os.path.normpath(path))
def normcase(path):
return encoding.upper(path)
def realpath(path):
'''
Returns the true, canonical file system path equivalent to the given
path.
'''
# TODO: There may be a more clever way to do this that also handles other,
# less common file systems.
return os.path.normpath(normcase(os.path.realpath(path)))
def samestat(s1, s2):
return False
# A sequence of backslashes is special iff it precedes a double quote:
# - if there's an even number of backslashes, the double quote is not
# quoted (i.e. it ends the quoted region)
# - if there's an odd number of backslashes, the double quote is quoted
# - in both cases, every pair of backslashes is unquoted into a single
# backslash
# (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
# So, to quote a string, we must surround it in double quotes, double
# the number of backslashes that precede double quotes and add another
# backslash before every double quote (being careful with the double
# quote we've appended to the end)
_quotere = None
def shellquote(s):
global _quotere
if _quotere is None:
_quotere = re.compile(r'(\\*)("|\\$)')
return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
def quotecommand(cmd):
"""Build a command string suitable for os.popen* calls."""
if sys.version_info < (2, 7, 1):
# Python versions since 2.7.1 do this extra quoting themselves
return '"' + cmd + '"'
return cmd
def popen(command, mode='r'):
# Work around "popen spawned process may not write to stdout
# under windows"
# http://bugs.python.org/issue1366
command += " 2> %s" % os.devnull
return os.popen(quotecommand(command), mode)
def explainexit(code):
return _("exited with status %d") % code, code
# if you change this stub into a real check, please try to implement the
# username and groupname functions above, too.
def isowner(st):
return True
def findexe(command):
'''Find executable for command searching like cmd.exe does.
If command is a basename then PATH is searched for command.
PATH isn't searched if command is an absolute or relative path.
An extension from PATHEXT is found and added if not present.
If command isn't found None is returned.'''
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
if os.path.splitext(command)[1].lower() in pathexts:
pathexts = ['']
def findexisting(pathcommand):
'Will append extension (if needed) and return existing file'
for ext in pathexts:
executable = pathcommand + ext
if os.path.exists(executable):
return executable
return None
if os.sep in command:
return findexisting(command)
for path in os.environ.get('PATH', '').split(os.pathsep):
executable = findexisting(os.path.join(path, command))
if executable is not None:
return executable
return findexisting(os.path.expanduser(os.path.expandvars(command)))
_wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
def statfiles(files):
'''Stat each file in files. Yield each stat, or None if a file
does not exist or has a type we don't care about.
Cluster and cache stat per directory to minimize number of OS stat calls.'''
dircache = {} # dirname -> filename -> status | None if file does not exist
getkind = stat.S_IFMT
for nf in files:
nf = normcase(nf)
dir, base = os.path.split(nf)
if not dir:
dir = '.'
cache = dircache.get(dir, None)
if cache is None:
try:
dmap = dict([(normcase(n), s)
for n, k, s in osutil.listdir(dir, True)
if getkind(s.st_mode) in _wantedkinds])
except OSError, err:
# handle directory not found in Python version prior to 2.5
# Python <= 2.4 returns native Windows code 3 in errno
# Python >= 2.5 returns ENOENT and adds winerror field
# EINVAL is raised if dir is not a directory.
if err.errno not in (3, errno.ENOENT, errno.EINVAL,
errno.ENOTDIR):
raise
dmap = {}
cache = dircache.setdefault(dir, dmap)
yield cache.get(base, None)
def username(uid=None):
"""Return the name of the user with the given uid.
If uid is None, return the name of the current user."""
return None
def groupname(gid=None):
"""Return the name of the group with the given gid.
If gid is None, return the name of the current group."""
return None
def _removedirs(name):
"""special version of os.removedirs that does not remove symlinked
directories or junction points if they actually contain files"""
if osutil.listdir(name):
return
os.rmdir(name)
head, tail = os.path.split(name)
if not tail:
head, tail = os.path.split(head)
while head and tail:
try:
if osutil.listdir(head):
return
os.rmdir(head)
except (ValueError, OSError):
break
head, tail = os.path.split(head)
def unlinkpath(f, ignoremissing=False):
"""unlink and remove the directory if it is empty"""
try:
unlink(f)
except OSError, e:
if not (ignoremissing and e.errno == errno.ENOENT):
raise
# try removing directories that might now be empty
try:
_removedirs(os.path.dirname(f))
except OSError:
pass
def rename(src, dst):
'''atomically rename file src to dst, replacing dst if it exists'''
try:
os.rename(src, dst)
except OSError, e:
if e.errno != errno.EEXIST:
raise
unlink(dst)
os.rename(src, dst)
def gethgcmd():
return [sys.executable] + sys.argv[:1]
def groupmembers(name):
# Don't support groups on Windows for now
raise KeyError
def isexec(f):
return False
class cachestat(object):
def __init__(self, path):
pass
def cacheable(self):
return False
def lookupreg(key, valname=None, scope=None):
''' Look up a key/value name in the Windows registry.
valname: value name. If unspecified, the default value for the key
is used.
scope: optionally specify scope for registry lookup, this can be
a sequence of scopes to look up in order. Default (CURRENT_USER,
LOCAL_MACHINE).
'''
if scope is None:
scope = (_winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE)
elif not isinstance(scope, (list, tuple)):
scope = (scope,)
for s in scope:
try:
val = _winreg.QueryValueEx(_winreg.OpenKey(s, key), valname)[0]
# never let a Unicode string escape into the wild
return encoding.tolocal(val.encode('UTF-8'))
except EnvironmentError:
pass
expandglobs = True
def statislink(st):
'''check whether a stat result is a symlink'''
return False
def statisexec(st):
'''check whether a stat result is an executable file'''
return False
| apache-2.0 |
gaolichuang/py-task-framework | nova/android/api/androids.py | 1 | 6058 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
from nova.openstack.common.gettextutils import _
from nova import conductor
from nova import utils
from nova import db as db_api
from nova.android import agent as android_api
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.api.openstack.wsgi import Controller as wsgi_controller
ALIAS = "androids-plant"
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class AndroidShowTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('android', selector='android')
root.set('id')
root.set('user_id')
root.set('project_id')
root.set('android_state')
root.set('task_state')
root.set('launched_at')
root.set('terminated_at')
root.set('display_name')
root.set('uuid')
root.set('progress')
root.set('verdor')
root.set('host')
return xmlutil.MasterTemplate(root, 1)
class AndroidExtendController(wsgi.Controller):
def __init__(self):
self._android_api = android_api.API()
super(AndroidExtendController, self).__init__()
@wsgi.serializers(xml=AndroidShowTemplate)
@wsgi.response(201)
@wsgi.action('active')
def active(self, req, id, body):
context = req.environ['nova.context']
try:
instance = {}
instance = self._android_api.get(context,id)
self._android_api.active(context, instance)
except exception.AndroidNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return {'android':instance}
@wsgi.serializers(xml=AndroidShowTemplate)
@wsgi.response(201)
@wsgi.action('deactive')
def deactive(self, req, id, body):
context = req.environ['nova.context']
try:
instance = {}
instance = self._android_api.get(context,id)
self._android_api.deactive(context, instance)
except exception.AndroidNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return {'android':instance}
@wsgi.serializers(xml=AndroidShowTemplate)
@wsgi.response(201)
@wsgi.action('start')
def start(self, req, id, body):
context = req.environ['nova.context']
try:
instance = {}
instance = self._android_api.get(context,id)
self._android_api.start(context, instance)
except exception.AndroidNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return {'android':instance}
@wsgi.serializers(xml=AndroidShowTemplate)
@wsgi.response(201)
@wsgi.action('stop')
def stop(self, req, id, body):
context = req.environ['nova.context']
try:
instance = {}
instance = self._android_api.get(context,id)
self._android_api.stop(context, instance)
except exception.AndroidNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return {'android':instance}
class AndroidController(object):
def __init__(self):
self._android_api = android_api.API()
@wsgi.serializers(xml=AndroidShowTemplate)
@extensions.expected_errors((400, 409))
def create(self, req, body):
LOG.debug(_("Service create fuction body %s"%body))
if not wsgi_controller.is_valid_body(body, 'android'):
raise webob.exc.HTTPBadRequest('Invalid request body ')
vals = body['android']
name = vals.get('name',None)
verdor = vals.get('verdor',None)
if name == None or verdor == None:
raise webob.exc.HTTPBadRequest('Invalid request body not set name or verdor')
context = req.environ['nova.context']
LOG.debug(_("get name and verdor %s %s" % (name,verdor)))
service = self._android_api.create(context, name = name, verdor =verdor)
LOG.debug(_("android create %s"%service))
return {'android':service}
@wsgi.serializers(xml=AndroidShowTemplate)
@extensions.expected_errors(404)
@wsgi.response(204)
def delete(self, req, id, body = None):
context = req.environ['nova.context']
try:
instance = {}
instance = self._android_api.get(context,id)
self._android_api.destroy(context,instance)
except exception.AndroidNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
class Androids(extensions.V3APIExtensionBase):
"""Android control service."""
name = "AndroidsPlant"
alias = ALIAS
namespace = "http://docs.openstack.org/compute/ext/services/api/v3"
version = 1
def get_resources(self):
member_actions = {'action':'POST'}
resources = [extensions.ResourceExtension(ALIAS,
AndroidController(),
member_actions = member_actions)]
return resources
def get_controller_extensions(self):
controller = AndroidExtendController()
extension = extensions.ControllerExtension(self, ALIAS, controller)
return [extension] | apache-2.0 |
svenstaro/ansible | lib/ansible/modules/system/pamd.py | 48 | 20246 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Kenneth D. Evensen <kevensen@redhat.com>
#
# This file is part of Ansible (sort of)
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: pamd
author:
- "Kenneth D. Evensen (@kevensen)"
short_description: Manage PAM Modules
description:
- Edit PAM service's type, control, module path and module arguments.
In order for a PAM rule to be modified, the type, control and
module_path must match an existing rule. See man(5) pam.d for details.
version_added: "2.3"
options:
name:
required: true
description:
- The name generally refers to the PAM service file to
change, for example system-auth.
type:
required: true
description:
- The type of the PAM rule being modified. The type, control
and module_path all must match a rule to be modified.
control:
required: true
description:
- The control of the PAM rule being modified. This may be a
complicated control with brackets. If this is the case, be
sure to put "[bracketed controls]" in quotes. The type,
control and module_path all must match a rule to be modified.
module_path:
required: true
description:
- The module path of the PAM rule being modified. The type,
control and module_path all must match a rule to be modified.
new_type:
required: false
description:
- The type to assign to the new rule.
new_control:
required: false
description:
- The control to assign to the new rule.
new_module_path:
required: false
description:
- The control to assign to the new rule.
module_arguments:
required: false
description:
- When state is 'updated', the module_arguments will replace existing
module_arguments. When state is 'args_absent' args matching those
listed in module_arguments will be removed. When state is
'args_present' any args listed in module_arguments are added if
missing from the existing rule. Furthermore, if the module argument
takes a value denoted by '=', the value will be changed to that specified
in module_arguments.
state:
required: false
default: updated
choices:
- updated
- before
- after
- args_present
- args_absent
description:
- The default of 'updated' will modify an existing rule if type,
control and module_path all match an existing rule. With 'before',
the new rule will be inserted before a rule matching type, control
and module_path. Similarly, with 'after', the new rule will be inserted
after an existing rule matching type, control and module_path. With
either 'before' or 'after' new_type, new_control, and new_module_path
must all be specified. If state is 'args_absent' or 'args_present',
new_type, new_control, and new_module_path will be ignored.
path:
required: false
default: /etc/pam.d/
description:
- This is the path to the PAM service files
"""
EXAMPLES = """
- name: Update pamd rule's control in /etc/pam.d/system-auth
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
new_control: sufficient
- name: Update pamd rule's complex control in /etc/pam.d/system-auth
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
new_control: '[success=2 default=ignore]'
- name: Insert a new rule before an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
new_type: auth
new_control: sufficient
new_module_path: pam_faillock.so
state: before
- name: Insert a new rule after an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
new_type: auth
new_control: sufficient
new_module_path: pam_faillock.so
state: after
- name: Remove module arguments from an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: ''
state: updated
- name: Replace all module arguments in an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: 'preauth
silent
deny=3
unlock_time=604800
fail_interval=900'
state: updated
- name: Remove specific arguments from a rule
pamd:
name: system-auth
type: session control='[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments: 'crond quiet'
state: args_absent
- name: Ensure specific arguments are present in a rule
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments: 'crond quiet'
state: args_present
- name: Update specific argument value in a rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: 'fail_interval=300'
state: args_present
"""
RETURN = '''
dest:
description: path to pam.d service that was changed
returned: success
type: string
sample: "/etc/pam.d/system-auth"
...
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
# The PamdRule class encapsulates a rule in a pam.d service
class PamdRule(object):
def __init__(self, rule_type,
rule_control, rule_module_path,
rule_module_args=None):
self.rule_type = rule_type
self.rule_control = rule_control
self.rule_module_path = rule_module_path
try:
if (rule_module_args is not None and
type(rule_module_args) is list):
self.rule_module_args = rule_module_args
elif (rule_module_args is not None and
type(rule_module_args) is str):
self.rule_module_args = rule_module_args.split()
except AttributeError:
self.rule_module_args = []
@classmethod
def rulefromstring(cls, stringline):
split_line = stringline.split()
rule_type = split_line[0]
rule_control = split_line[1]
if rule_control.startswith('['):
rule_control = stringline[stringline.index('['):
stringline.index(']')+1]
if "]" in split_line[2]:
rule_module_path = split_line[3]
rule_module_args = split_line[4:]
else:
rule_module_path = split_line[2]
rule_module_args = split_line[3:]
return cls(rule_type, rule_control, rule_module_path, rule_module_args)
def get_module_args_as_string(self):
try:
if self.rule_module_args is not None:
return ' '.join(self.rule_module_args)
except AttributeError:
pass
return ''
def __str__(self):
return "%-10s\t%s\t%s %s" % (self.rule_type,
self.rule_control,
self.rule_module_path,
self.get_module_args_as_string())
# PamdService encapsulates an entire service and contains one or more rules
class PamdService(object):
def __init__(self, path, name, ansible):
self.path = path
self.name = name
self.check = ansible.check_mode
self.ansible = ansible
self.fname = self.path + "/" + self.name
self.preamble = []
self.rules = []
try:
for line in open(self.fname, 'r'):
if line.startswith('#') and not line.isspace():
self.preamble.append(line.rstrip())
elif not line.startswith('#') and not line.isspace():
self.rules.append(PamdRule.rulefromstring
(stringline=line.rstrip()))
except Exception:
e = get_exception()
self.ansible.fail_json(msg='Unable to open/read PAM module file ' +
'%s with error %s' % (self.fname, str(e)))
def __str__(self):
return self.fname
def update_rule(service, old_rule, new_rule):
changed = False
change_count = 0
result = {'action': 'update_rule'}
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if (new_rule.rule_type is not None and
new_rule.rule_type != rule.rule_type):
rule.rule_type = new_rule.rule_type
changed = True
if (new_rule.rule_control is not None and
new_rule.rule_control != rule.rule_control):
rule.rule_control = new_rule.rule_control
changed = True
if (new_rule.rule_module_path is not None and
new_rule.rule_module_path != rule.rule_module_path):
rule.rule_module_path = new_rule.rule_module_path
changed = True
try:
if (new_rule.rule_module_args is not None and
new_rule.rule_module_args !=
rule.rule_module_args):
rule.rule_module_args = new_rule.rule_module_args
changed = True
except AttributeError:
pass
if changed:
result['updated_rule_'+str(change_count)] = str(rule)
result['new_rule'] = str(new_rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def insert_before_rule(service, old_rule, new_rule):
index = 0
change_count = 0
result = {'action':
'insert_before_rule'}
changed = False
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if index == 0:
service.rules.insert(0, new_rule)
changed = True
elif (new_rule.rule_type != service.rules[index-1].rule_type or
new_rule.rule_control !=
service.rules[index-1].rule_control or
new_rule.rule_module_path !=
service.rules[index-1].rule_module_path):
service.rules.insert(index, new_rule)
changed = True
if changed:
result['new_rule'] = str(new_rule)
result['before_rule_'+str(change_count)] = str(rule)
change_count += 1
index += 1
result['change_count'] = change_count
return changed, result
def insert_after_rule(service, old_rule, new_rule):
index = 0
change_count = 0
result = {'action': 'insert_after_rule'}
changed = False
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if (new_rule.rule_type != service.rules[index+1].rule_type or
new_rule.rule_control !=
service.rules[index+1].rule_control or
new_rule.rule_module_path !=
service.rules[index+1].rule_module_path):
service.rules.insert(index+1, new_rule)
changed = True
if changed:
result['new_rule'] = str(new_rule)
result['after_rule_'+str(change_count)] = str(rule)
change_count += 1
index += 1
result['change_count'] = change_count
return changed, result
def remove_module_arguments(service, old_rule, module_args):
result = {'action': 'args_absent'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
for arg_to_remove in module_args:
for arg in rule.rule_module_args:
if arg == arg_to_remove:
rule.rule_module_args.remove(arg)
changed = True
result['removed_arg_'+str(change_count)] = arg
result['from_rule_'+str(change_count)] = str(rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def add_module_arguments(service, old_rule, module_args):
result = {'action': 'args_present'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
for arg_to_add in module_args:
if "=" in arg_to_add:
pre_string = arg_to_add[:arg_to_add.index('=')+1]
indicies = [i for i, arg
in enumerate(rule.rule_module_args)
if arg.startswith(pre_string)]
if len(indicies) == 0:
rule.rule_module_args.append(arg_to_add)
changed = True
result['added_arg_'+str(change_count)] = arg_to_add
result['to_rule_'+str(change_count)] = str(rule)
change_count += 1
else:
for i in indicies:
if rule.rule_module_args[i] != arg_to_add:
rule.rule_module_args[i] = arg_to_add
changed = True
result['updated_arg_' +
str(change_count)] = arg_to_add
result['in_rule_' +
str(change_count)] = str(rule)
change_count += 1
elif arg_to_add not in rule.rule_module_args:
rule.rule_module_args.append(arg_to_add)
changed = True
result['added_arg_'+str(change_count)] = arg_to_add
result['to_rule_'+str(change_count)] = str(rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def write_rules(service):
previous_rule = None
f = open(service.fname, 'w')
for amble in service.preamble:
f.write(amble+'\n')
for rule in service.rules:
if (previous_rule is not None and
previous_rule.rule_type != rule.rule_type):
f.write('\n')
f.write(str(rule)+'\n')
previous_rule = rule
f.close()
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='str'),
type=dict(required=True,
choices=['account', 'auth',
'password', 'session']),
control=dict(required=True, type='str'),
module_path=dict(required=True, type='str'),
new_type=dict(required=False,
choices=['account', 'auth',
'password', 'session']),
new_control=dict(required=False, type='str'),
new_module_path=dict(required=False, type='str'),
module_arguments=dict(required=False, type='list'),
state=dict(required=False, default="updated",
choices=['before', 'after', 'updated',
'args_absent', 'args_present']),
path=dict(required=False, default='/etc/pam.d', type='str')
),
supports_check_mode=True,
required_if=[
("state", "args_present", ["module_arguments"]),
("state", "args_absent", ["module_arguments"])
]
)
service = module.params['name']
old_type = module.params['type']
old_control = module.params['control']
old_module_path = module.params['module_path']
new_type = module.params['new_type']
new_control = module.params['new_control']
new_module_path = module.params['new_module_path']
module_arguments = module.params['module_arguments']
state = module.params['state']
path = module.params['path']
pamd = PamdService(path, service, module)
old_rule = PamdRule(old_type,
old_control,
old_module_path)
new_rule = PamdRule(new_type,
new_control,
new_module_path,
module_arguments)
try:
if state == 'updated':
change, result = update_rule(pamd,
old_rule,
new_rule)
elif state == 'before':
if (new_rule.rule_control is None or
new_rule.rule_type is None or
new_rule.rule_module_path is None):
module.fail_json(msg='When inserting a new rule before ' +
'or after an existing rule, new_type, ' +
'new_control and new_module_path must ' +
'all be set.')
change, result = insert_before_rule(pamd,
old_rule,
new_rule)
elif state == 'after':
if (new_rule.rule_control is None or
new_rule.rule_type is None or
new_rule.rule_module_path is None):
module.fail_json(msg='When inserting a new rule before' +
'or after an existing rule, new_type,' +
' new_control and new_module_path must' +
' all be set.')
change, result = insert_after_rule(pamd,
old_rule,
new_rule)
elif state == 'args_absent':
change, result = remove_module_arguments(pamd,
old_rule,
module_arguments)
elif state == 'args_present':
change, result = add_module_arguments(pamd,
old_rule,
module_arguments)
if not module.check_mode:
write_rules(pamd)
except Exception:
e = get_exception()
module.fail_json(msg='error running changing pamd: %s' % str(e))
facts = {}
facts['pamd'] = {'changed': change, 'result': result}
module.params['dest'] = pamd.fname
module.exit_json(changed=change, ansible_facts=facts)
if __name__ == '__main__':
main()
| gpl-3.0 |
Distrotech/urlgrabber | urlgrabber/byterange.py | 2 | 17143 | # This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
# This file is part of urlgrabber, a high-level cross-protocol url-grabber
# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
import os
import stat
import urllib
import urllib2
import rfc822
DEBUG = None
try:
from cStringIO import StringIO
except ImportError, msg:
from StringIO import StringIO
class RangeError(IOError):
"""Error raised when an unsatisfiable range is requested."""
pass
class HTTPRangeHandler(urllib2.BaseHandler):
"""Handler that enables HTTP Range headers.
This was extremely simple. The Range header is a HTTP feature to
begin with so all this class does is tell urllib2 that the
"206 Partial Content" response from the HTTP server is what we
expected.
Example:
import urllib2
import byterange
range_handler = range.HTTPRangeHandler()
opener = urllib2.build_opener(range_handler)
# install it
urllib2.install_opener(opener)
# create Request and set Range header
req = urllib2.Request('http://www.python.org/')
req.header['Range'] = 'bytes=30-50'
f = urllib2.urlopen(req)
"""
def http_error_206(self, req, fp, code, msg, hdrs):
# 206 Partial Content Response
r = urllib.addinfourl(fp, hdrs, req.get_full_url())
r.code = code
r.msg = msg
return r
def http_error_416(self, req, fp, code, msg, hdrs):
# HTTP's Range Not Satisfiable error
raise RangeError(9, 'Requested Range Not Satisfiable')
class HTTPSRangeHandler(HTTPRangeHandler):
""" Range Header support for HTTPS. """
def https_error_206(self, req, fp, code, msg, hdrs):
return self.http_error_206(req, fp, code, msg, hdrs)
def https_error_416(self, req, fp, code, msg, hdrs):
self.https_error_416(req, fp, code, msg, hdrs)
class RangeableFileObject:
"""File object wrapper to enable raw range handling.
This was implemented primarilary for handling range
specifications for file:// urls. This object effectively makes
a file object look like it consists only of a range of bytes in
the stream.
Examples:
# expose 10 bytes, starting at byte position 20, from
# /etc/aliases.
>>> fo = RangeableFileObject(file('/etc/passwd', 'r'), (20,30))
# seek seeks within the range (to position 23 in this case)
>>> fo.seek(3)
# tell tells where your at _within the range_ (position 3 in
# this case)
>>> fo.tell()
# read EOFs if an attempt is made to read past the last
# byte in the range. the following will return only 7 bytes.
>>> fo.read(30)
"""
def __init__(self, fo, rangetup):
"""Create a RangeableFileObject.
fo -- a file like object. only the read() method need be
supported but supporting an optimized seek() is
preferable.
rangetup -- a (firstbyte,lastbyte) tuple specifying the range
to work over.
The file object provided is assumed to be at byte offset 0.
"""
self.fo = fo
(self.firstbyte, self.lastbyte) = range_tuple_normalize(rangetup)
self.realpos = 0
self._do_seek(self.firstbyte)
def __getattr__(self, name):
"""This effectively allows us to wrap at the instance level.
Any attribute not found in _this_ object will be searched for
in self.fo. This includes methods."""
if hasattr(self.fo, name):
return getattr(self.fo, name)
raise AttributeError, name
def tell(self):
"""Return the position within the range.
This is different from fo.seek in that position 0 is the
first byte position of the range tuple. For example, if
this object was created with a range tuple of (500,899),
tell() will return 0 when at byte position 500 of the file.
"""
return (self.realpos - self.firstbyte)
def seek(self,offset,whence=0):
"""Seek within the byte range.
Positioning is identical to that described under tell().
"""
assert whence in (0, 1, 2)
if whence == 0: # absolute seek
realoffset = self.firstbyte + offset
elif whence == 1: # relative seek
realoffset = self.realpos + offset
elif whence == 2: # absolute from end of file
# XXX: are we raising the right Error here?
raise IOError('seek from end of file not supported.')
# do not allow seek past lastbyte in range
if self.lastbyte and (realoffset >= self.lastbyte):
realoffset = self.lastbyte
self._do_seek(realoffset - self.realpos)
def read(self, size=-1):
"""Read within the range.
This method will limit the size read based on the range.
"""
size = self._calc_read_size(size)
rslt = self.fo.read(size)
self.realpos += len(rslt)
return rslt
def readline(self, size=-1):
"""Read lines within the range.
This method will limit the size read based on the range.
"""
size = self._calc_read_size(size)
rslt = self.fo.readline(size)
self.realpos += len(rslt)
return rslt
def _calc_read_size(self, size):
"""Handles calculating the amount of data to read based on
the range.
"""
if self.lastbyte:
if size > -1:
if ((self.realpos + size) >= self.lastbyte):
size = (self.lastbyte - self.realpos)
else:
size = (self.lastbyte - self.realpos)
return size
def _do_seek(self,offset):
"""Seek based on whether wrapped object supports seek().
offset is relative to the current position (self.realpos).
"""
assert offset >= 0
if not hasattr(self.fo, 'seek'):
self._poor_mans_seek(offset)
else:
self.fo.seek(self.realpos + offset)
self.realpos+= offset
def _poor_mans_seek(self,offset):
"""Seek by calling the wrapped file objects read() method.
This is used for file like objects that do not have native
seek support. The wrapped objects read() method is called
to manually seek to the desired position.
offset -- read this number of bytes from the wrapped
file object.
raise RangeError if we encounter EOF before reaching the
specified offset.
"""
pos = 0
bufsize = 1024
while pos < offset:
if (pos + bufsize) > offset:
bufsize = offset - pos
buf = self.fo.read(bufsize)
if len(buf) != bufsize:
raise RangeError(9, 'Requested Range Not Satisfiable')
pos+= bufsize
class FileRangeHandler(urllib2.FileHandler):
"""FileHandler subclass that adds Range support.
This class handles Range headers exactly like an HTTP
server would.
"""
def open_local_file(self, req):
import mimetypes
import mimetools
host = req.get_host()
file = req.get_selector()
localfile = urllib.url2pathname(file)
stats = os.stat(localfile)
size = stats[stat.ST_SIZE]
modified = rfc822.formatdate(stats[stat.ST_MTIME])
mtype = mimetypes.guess_type(file)[0]
if host:
host, port = urllib.splitport(host)
if port or socket.gethostbyname(host) not in self.get_names():
raise urllib2.URLError('file not on local host')
fo = open(localfile,'rb')
brange = req.headers.get('Range',None)
brange = range_header_to_tuple(brange)
assert brange != ()
if brange:
(fb,lb) = brange
if lb == '': lb = size
if fb < 0 or fb > size or lb > size:
raise RangeError(9, 'Requested Range Not Satisfiable')
size = (lb - fb)
fo = RangeableFileObject(fo, (fb,lb))
headers = mimetools.Message(StringIO(
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
return urllib.addinfourl(fo, headers, 'file:'+file)
# FTP Range Support
# Unfortunately, a large amount of base FTP code had to be copied
# from urllib and urllib2 in order to insert the FTP REST command.
# Code modifications for range support have been commented as
# follows:
# -- range support modifications start/end here
from urllib import splitport, splituser, splitpasswd, splitattr, \
unquote, addclosehook, addinfourl
import ftplib
import socket
import sys
import mimetypes
import mimetools
class FTPRangeHandler(urllib2.FTPHandler):
def ftp_open(self, req):
host = req.get_host()
if not host:
raise IOError, ('ftp error', 'no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = unquote(user or '')
passwd = unquote(passwd or '')
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise urllib2.URLError(msg)
path, attrs = splitattr(req.get_selector())
dirs = path.split('/')
dirs = map(unquote, dirs)
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitattr(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
# -- range support modifications start here
rest = None
range_tup = range_header_to_tuple(req.headers.get('Range',None))
assert range_tup != ()
if range_tup:
(fb,lb) = range_tup
if fb > 0: rest = fb
# -- range support modifications end here
fp, retrlen = fw.retrfile(file, type, rest)
# -- range support modifications start here
if range_tup:
(fb,lb) = range_tup
if lb == '':
if retrlen is None or retrlen == 0:
raise RangeError(9, 'Requested Range Not Satisfiable due to unobtainable file length.')
lb = retrlen
retrlen = lb - fb
if retrlen < 0:
# beginning of range is larger than file
raise RangeError(9, 'Requested Range Not Satisfiable')
else:
retrlen = lb - fb
fp = RangeableFileObject(fp, (0,retrlen))
# -- range support modifications end here
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-Type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-Length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs):
fw = ftpwrapper(user, passwd, host, port, dirs)
return fw
class ftpwrapper(urllib.ftpwrapper):
# range support note:
# this ftpwrapper code is copied directly from
# urllib. The only enhancement is to add the rest
# argument and pass it on to ftp.ntransfercmd
def retrfile(self, file, type, rest=None):
self.endtransfer()
if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
else: cmd = 'TYPE ' + type; isdir = 0
try:
self.ftp.voidcmd(cmd)
except ftplib.all_errors:
self.init()
self.ftp.voidcmd(cmd)
conn = None
if file and not isdir:
# Use nlst to see if the file exists at all
try:
self.ftp.nlst(file)
except ftplib.error_perm, reason:
raise IOError, ('ftp error', reason), sys.exc_info()[2]
# Restore the transfer mode!
self.ftp.voidcmd(cmd)
# Try to retrieve as a file
try:
cmd = 'RETR ' + file
conn = self.ftp.ntransfercmd(cmd, rest)
except ftplib.error_perm, reason:
if str(reason)[:3] == '501':
# workaround for REST not supported error
fp, retrlen = self.retrfile(file, type)
fp = RangeableFileObject(fp, (rest,''))
return (fp, retrlen)
elif str(reason)[:3] != '550':
raise IOError, ('ftp error', reason), sys.exc_info()[2]
if not conn:
# Set transfer mode to ASCII!
self.ftp.voidcmd('TYPE A')
# Try a directory listing
if file: cmd = 'LIST ' + file
else: cmd = 'LIST'
conn = self.ftp.ntransfercmd(cmd)
self.busy = 1
# Pass back both a suitably decorated object and a retrieval length
return (addclosehook(conn[0].makefile('rb'),
self.endtransfer), conn[1])
####################################################################
# Range Tuple Functions
# XXX: These range tuple functions might go better in a class.
_rangere = None
def range_header_to_tuple(range_header):
"""Get a (firstbyte,lastbyte) tuple from a Range header value.
Range headers have the form "bytes=<firstbyte>-<lastbyte>". This
function pulls the firstbyte and lastbyte values and returns
a (firstbyte,lastbyte) tuple. If lastbyte is not specified in
the header value, it is returned as an empty string in the
tuple.
Return None if range_header is None
Return () if range_header does not conform to the range spec
pattern.
"""
global _rangere
if range_header is None: return None
if _rangere is None:
import re
_rangere = re.compile(r'^bytes=(\d{1,})-(\d*)')
match = _rangere.match(range_header)
if match:
tup = range_tuple_normalize(match.group(1,2))
if tup and tup[1]:
tup = (tup[0],tup[1]+1)
return tup
return ()
def range_tuple_to_header(range_tup):
"""Convert a range tuple to a Range header value.
Return a string of the form "bytes=<firstbyte>-<lastbyte>" or None
if no range is needed.
"""
if range_tup is None: return None
range_tup = range_tuple_normalize(range_tup)
if range_tup:
if range_tup[1]:
range_tup = (range_tup[0],range_tup[1] - 1)
return 'bytes=%s-%s' % range_tup
def range_tuple_normalize(range_tup):
"""Normalize a (first_byte,last_byte) range tuple.
Return a tuple whose first element is guaranteed to be an int
and whose second element will be '' (meaning: the last byte) or
an int. Finally, return None if the normalized tuple == (0,'')
as that is equivalent to retrieving the entire file.
"""
if range_tup is None: return None
# handle first byte
fb = range_tup[0]
if fb in (None,''): fb = 0
else: fb = int(fb)
# handle last byte
try: lb = range_tup[1]
except IndexError: lb = ''
else:
if lb is None: lb = ''
elif lb != '': lb = int(lb)
# check if range is over the entire file
if (fb,lb) == (0,''): return None
# check that the range is valid
if lb < fb: raise RangeError(9, 'Invalid byte range: %s-%s' % (fb,lb))
return (fb,lb)
| lgpl-2.1 |
mindnervestech/mnrp | addons/report_webkit/webkit_report.py | 234 | 16702 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
# Contributor(s) : Florent Xicluna (Wingo SA)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import subprocess
import os
import sys
from openerp import report
import tempfile
import time
import logging
from functools import partial
from report_helper import WebKitHelper
import openerp
from openerp.modules.module import get_module_resource
from openerp.report.report_sxw import *
from openerp import tools
from openerp.tools.translate import _
from openerp.osv.osv import except_osv
from urllib import urlencode, quote as quote
_logger = logging.getLogger(__name__)
try:
# We use a jinja2 sandboxed environment to render mako templates.
# Note that the rendering does not cover all the mako syntax, in particular
# arbitrary Python statements are not accepted, and not all expressions are
# allowed: only "public" attributes (not starting with '_') of objects may
# be accessed.
# This is done on purpose: it prevents incidental or malicious execution of
# Python code that may break the security of the server.
from jinja2.sandbox import SandboxedEnvironment
mako_template_env = SandboxedEnvironment(
block_start_string="<%",
block_end_string="%>",
variable_start_string="${",
variable_end_string="}",
comment_start_string="<%doc>",
comment_end_string="</%doc>",
line_statement_prefix="%",
line_comment_prefix="##",
trim_blocks=True, # do not output newline after blocks
autoescape=True, # XML/HTML automatic escaping
)
mako_template_env.globals.update({
'str': str,
'quote': quote,
'urlencode': urlencode,
})
except ImportError:
_logger.warning("jinja2 not available, templating features will not work!")
def mako_template(text):
"""Build a Mako template.
This template uses UTF-8 encoding
"""
return mako_template_env.from_string(text)
_extender_functions = {}
def webkit_report_extender(report_name):
"""
A decorator to define functions to extend the context used in a template rendering.
report_name must be the xml id of the desired report (it is mandatory to indicate the
module in that xml id).
The given function will be called at the creation of the report. The following arguments
will be passed to it (in this order):
- pool The model pool.
- cr The cursor.
- uid The user id.
- localcontext The context given to the template engine to render the templates for the
current report. This is the context that should be modified.
- context The OpenERP context.
"""
def fct1(fct):
lst = _extender_functions.get(report_name)
if not lst:
lst = []
_extender_functions[report_name] = lst
lst.append(fct)
return fct
return fct1
class WebKitParser(report_sxw):
"""Custom class that use webkit to render HTML reports
Code partially taken from report openoffice. Thanks guys :)
"""
def __init__(self, name, table, rml=False, parser=rml_parse,
header=True, store=False, register=True):
self.localcontext = {}
report_sxw.__init__(self, name, table, rml, parser,
header, store, register=register)
def get_lib(self, cursor, uid):
"""Return the lib wkhtml path"""
proxy = self.pool['ir.config_parameter']
webkit_path = proxy.get_param(cursor, uid, 'webkit_path')
if not webkit_path:
try:
defpath = os.environ.get('PATH', os.defpath).split(os.pathsep)
if hasattr(sys, 'frozen'):
defpath.append(os.getcwd())
if tools.config['root_path']:
defpath.append(os.path.dirname(tools.config['root_path']))
webkit_path = tools.which('wkhtmltopdf', path=os.pathsep.join(defpath))
except IOError:
webkit_path = None
if webkit_path:
return webkit_path
raise except_osv(
_('Wkhtmltopdf library path is not set'),
_('Please install executable on your system' \
' (sudo apt-get install wkhtmltopdf) or download it from here:' \
' http://code.google.com/p/wkhtmltopdf/downloads/list and set the' \
' path in the ir.config_parameter with the webkit_path key.' \
'Minimal version is 0.9.9')
)
def generate_pdf(self, comm_path, report_xml, header, footer, html_list, webkit_header=False):
"""Call webkit in order to generate pdf"""
if not webkit_header:
webkit_header = report_xml.webkit_header
fd, out_filename = tempfile.mkstemp(suffix=".pdf",
prefix="webkit.tmp.")
file_to_del = [out_filename]
if comm_path:
command = [comm_path]
else:
command = ['wkhtmltopdf']
command.append('--quiet')
# default to UTF-8 encoding. Use <meta charset="latin-1"> to override.
command.extend(['--encoding', 'utf-8'])
if header :
with tempfile.NamedTemporaryFile(suffix=".head.html",
delete=False) as head_file:
head_file.write(self._sanitize_html(header.encode('utf-8')))
file_to_del.append(head_file.name)
command.extend(['--header-html', head_file.name])
if footer :
with tempfile.NamedTemporaryFile(suffix=".foot.html",
delete=False) as foot_file:
foot_file.write(self._sanitize_html(footer.encode('utf-8')))
file_to_del.append(foot_file.name)
command.extend(['--footer-html', foot_file.name])
if webkit_header.margin_top :
command.extend(['--margin-top', str(webkit_header.margin_top).replace(',', '.')])
if webkit_header.margin_bottom :
command.extend(['--margin-bottom', str(webkit_header.margin_bottom).replace(',', '.')])
if webkit_header.margin_left :
command.extend(['--margin-left', str(webkit_header.margin_left).replace(',', '.')])
if webkit_header.margin_right :
command.extend(['--margin-right', str(webkit_header.margin_right).replace(',', '.')])
if webkit_header.orientation :
command.extend(['--orientation', str(webkit_header.orientation).replace(',', '.')])
if webkit_header.format :
command.extend(['--page-size', str(webkit_header.format).replace(',', '.')])
count = 0
for html in html_list :
with tempfile.NamedTemporaryFile(suffix="%d.body.html" %count,
delete=False) as html_file:
count += 1
html_file.write(self._sanitize_html(html.encode('utf-8')))
file_to_del.append(html_file.name)
command.append(html_file.name)
command.append(out_filename)
stderr_fd, stderr_path = tempfile.mkstemp(text=True)
file_to_del.append(stderr_path)
try:
status = subprocess.call(command, stderr=stderr_fd)
os.close(stderr_fd) # ensure flush before reading
stderr_fd = None # avoid closing again in finally block
fobj = open(stderr_path, 'r')
error_message = fobj.read()
fobj.close()
if not error_message:
error_message = _('No diagnosis message was provided')
else:
error_message = _('The following diagnosis message was provided:\n') + error_message
if status :
raise except_osv(_('Webkit error' ),
_("The command 'wkhtmltopdf' failed with error code = %s. Message: %s") % (status, error_message))
with open(out_filename, 'rb') as pdf_file:
pdf = pdf_file.read()
os.close(fd)
finally:
if stderr_fd is not None:
os.close(stderr_fd)
for f_to_del in file_to_del:
try:
os.unlink(f_to_del)
except (OSError, IOError), exc:
_logger.error('cannot remove file %s: %s', f_to_del, exc)
return pdf
def translate_call(self, parser_instance, src):
"""Translate String."""
ir_translation = self.pool['ir.translation']
name = self.tmpl and 'addons/' + self.tmpl or None
res = ir_translation._get_source(parser_instance.cr, parser_instance.uid,
name, 'report', parser_instance.localcontext.get('lang', 'en_US'), src)
if res == src:
# no translation defined, fallback on None (backward compatibility)
res = ir_translation._get_source(parser_instance.cr, parser_instance.uid,
None, 'report', parser_instance.localcontext.get('lang', 'en_US'), src)
if not res :
return src
return res
# override needed to keep the attachments storing procedure
def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None):
"""generate the PDF"""
# just try to find an xml id for the report
cr = cursor
pool = openerp.registry(cr.dbname)
found_xml_ids = pool["ir.model.data"].search(cr, uid, [["model", "=", "ir.actions.report.xml"], \
["res_id", "=", report_xml.id]], context=context)
xml_id = None
if found_xml_ids:
xml_id = pool["ir.model.data"].read(cr, uid, found_xml_ids[0], ["module", "name"])
xml_id = "%s.%s" % (xml_id["module"], xml_id["name"])
if context is None:
context={}
htmls = []
if report_xml.report_type != 'webkit':
return super(WebKitParser,self).create_single_pdf(cursor, uid, ids, data, report_xml, context=context)
parser_instance = self.parser(cursor,
uid,
self.name2,
context=context)
self.pool = pool
objs = self.getObjects(cursor, uid, ids, context)
parser_instance.set_context(objs, data, ids, report_xml.report_type)
template = False
if report_xml.report_file :
path = get_module_resource(*report_xml.report_file.split('/'))
if path and os.path.exists(path) :
template = file(path).read()
if not template and report_xml.report_webkit_data :
template = report_xml.report_webkit_data
if not template :
raise except_osv(_('Error!'), _('Webkit report template not found!'))
header = report_xml.webkit_header.html
footer = report_xml.webkit_header.footer_html
if not header and report_xml.use_global_header:
raise except_osv(
_('No header defined for this Webkit report!'),
_('Please set a header in company settings.')
)
if not report_xml.use_global_header :
header = ''
default_head = get_module_resource('report_webkit', 'default_header.html')
with open(default_head,'r') as f:
header = f.read()
css = report_xml.webkit_header.css
if not css :
css = ''
translate_call = partial(self.translate_call, parser_instance)
body_mako_tpl = mako_template(template)
helper = WebKitHelper(cursor, uid, report_xml.id, context)
parser_instance.localcontext['helper'] = helper
parser_instance.localcontext['css'] = css
parser_instance.localcontext['_'] = translate_call
# apply extender functions
additional = {}
if xml_id in _extender_functions:
for fct in _extender_functions[xml_id]:
fct(pool, cr, uid, parser_instance.localcontext, context)
if report_xml.precise_mode:
ctx = dict(parser_instance.localcontext)
for obj in parser_instance.localcontext['objects']:
ctx['objects'] = [obj]
try :
html = body_mako_tpl.render(dict(ctx))
htmls.append(html)
except Exception, e:
msg = u"%s" % e
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
else:
try :
html = body_mako_tpl.render(dict(parser_instance.localcontext))
htmls.append(html)
except Exception, e:
msg = u"%s" % e
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
head_mako_tpl = mako_template(header)
try :
head = head_mako_tpl.render(dict(parser_instance.localcontext, _debug=False))
except Exception, e:
raise except_osv(_('Webkit render!'), u"%s" % e)
foot = False
if footer :
foot_mako_tpl = mako_template(footer)
try :
foot = foot_mako_tpl.render(dict(parser_instance.localcontext))
except Exception, e:
msg = u"%s" % e
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
if report_xml.webkit_debug :
try :
deb = head_mako_tpl.render(dict(parser_instance.localcontext, _debug=tools.ustr("\n".join(htmls))))
except Exception, e:
msg = u"%s" % e
_logger.error(msg)
raise except_osv(_('Webkit render!'), msg)
return (deb, 'html')
bin = self.get_lib(cursor, uid)
pdf = self.generate_pdf(bin, report_xml, head, foot, htmls)
return (pdf, 'pdf')
def create(self, cursor, uid, ids, data, context=None):
"""We override the create function in order to handle generator
Code taken from report openoffice. Thanks guys :) """
pool = openerp.registry(cursor.dbname)
ir_obj = pool['ir.actions.report.xml']
report_xml_ids = ir_obj.search(cursor, uid,
[('report_name', '=', self.name[7:])], context=context)
if report_xml_ids:
report_xml = ir_obj.browse(cursor, uid, report_xml_ids[0], context=context)
else:
return super(WebKitParser, self).create(cursor, uid, ids, data, context)
setattr(report_xml, 'use_global_header', self.header if report_xml.header else False)
if report_xml.report_type != 'webkit':
return super(WebKitParser, self).create(cursor, uid, ids, data, context)
result = self.create_source_pdf(cursor, uid, ids, data, report_xml, context)
if not result:
return (False,False)
return result
def _sanitize_html(self, html):
"""wkhtmltopdf expects the html page to declare a doctype.
"""
if html and html[:9].upper() != "<!DOCTYPE":
html = "<!DOCTYPE html>\n" + html
return html
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jcchin/Hyperloop_v2 | src/hyperloop/Python/structural_optimization.py | 4 | 17480 | from __future__ import print_function
import numpy as np
import matplotlib.pylab as plt
from openmdao.api import IndepVarComp, Component, Group, Problem, ExecComp, ScipyOptimizer
class StructuralOptimization(Component):
"""
Notes
-----
Estimates tube tunnel cost and pylon material cost
Optimizes tunnel thickness, pylon radius, and pylon spacing
Many parameters are currently taken from hyperloop alpha, will eventually pull from mission trajectory
Params
------
tube_area : float
Inner tube radius. Default is 3.8013 m**2
rho_tube : float
Density of tube material. Default is 7820 kg/m**3
E_tube : float
Young's modulus of tube material. Default value is 200e9 Pa
v_tube : float
Poisson's ratio of tube material. Default value is .3
Su_tube : float
Ultimate strength of tube material. Default value is 152e6 Pa
sf : float
Tube safety factor. Default value is 1.5
g : float
Gravitational acceleration. Default value is 9.81 m/s**2
unit_cost_tube : float
Cost of tube material per unit mass. Default value is .33 USD/kg
p_tunnel : float
Pressure of air in tube. Default value is 850 Pa. Value will come from vacuum component
p_ambient : float
Pressure of atmosphere. Default value is 101.3e3 Pa.
alpha_tube : float
Coefficient of thermal expansion of tube material. Default value is 0.0
dT_tube : float
Difference in tunnel temperature as compared ot a reference temperature. Default value is 0.0
m_pod : float
total mass of pod. Default value is 3100 kg. Value will come from weight component
r : float
Radius of tube. Default value is 1.1 m. Value will come from aero module
t : float
Thickness of the tube. Default value is 50 mm. Value is optimized in problem driver.
rho_pylon : float
Density of pylon material. Default value is 2400 kg/m**3
E_pylon : float
Young's modulus of pylon material. Default value is 41e9 Pa
v_pylon : float
Poisson's ratio of pylon material. Default value is .2
Su_pylon : float
Ultimate strength of pylon material. Default value is 40e6 Pa
unit_cost_pylon : float
Cost of pylon material per unit mass. Default value is .05 USD/kg
h : float
Height of each pylon. Default value is 10 m.
r_pylon : float
Radius of each pylon. Default value is 1 m. Value will be optimized in problem driver
vac_weight : float
Total weight of vacuums. Default value is 1500.0 kg. Value will come from vacuum component
Returns
-------
m_pylon : float
mass of individual pylon in kg/pylon
m_prime: float
Calculates mass per unit length of tube in kg/m
von_mises : float
Von Mises stress in the tube in Pa
total_material_cost : float
returns total cost of tube and pylon materials per unit distance in USD/m
R : float
Returns vertical component of force on each pylon in N
delta : float
Maximum deflection of tube between pylons in m
dx : float
outputs distance in between pylons in m
t_crit :
Minimum tube thickness to satisfy vacuum tube buckling condition in m
Notes
-----
[1] USA. NASA. Buckling of Thin-Walled Circular Cylinders. N.p.: n.p., n.d. Web. 13 June 2016.
"""
def __init__(self):
super(StructuralOptimization, self).__init__()
#Define material properties of tube
self.add_param('rho_tube',
val=7820.0,
units='kg/m**3',
desc='density of steel')
self.add_param('E_tube',
val=200.0 * (10**9),
units='Pa',
desc='Young\'s Modulus of tube')
self.add_param('v_tube', val=.3, desc='Poisson\'s ratio of tube')
self.add_param('Su_tube',
val=152.0e6,
units='Pa',
desc='ultimate strength of tube')
self.add_param('sf', val=1.5, desc='safety factor')
self.add_param('g', val=9.81, units='m/s**2', desc='gravity')
self.add_param('unit_cost_tube',
val=.3307,
units='USD/kg',
desc='cost of tube materials per unit mass')
self.add_param('p_tunnel',
val=100.0,
units='Pa',
desc='Tunnel Pressure')
self.add_param('p_ambient',
val=101300.0,
units='Pa',
desc='Ambient Pressure')
self.add_param('alpha_tube',
val=0.0,
desc='Coefficient of Thermal Expansion of tube')
self.add_param(
'dT_tube', val=0.0,
units='K', desc='Temperature change')
self.add_param('m_pod', val=3100.0, units='kg', desc='mass of pod')
self.add_param('tube_area', val=3.8013, units='m**2', desc='inner tube area')
#self.add_param('r', val=1.1, units='m', desc='inner tube radius')
self.add_param('t', val=.05, units='m', desc='tube thickness')
#self.add_param('dx', val = 500.0, units = 'm', desc = 'distance between pylons')
#Define pylon material properties
self.add_param('rho_pylon',
val=2400.0,
units='kg/m**3',
desc='density of pylon material')
self.add_param('E_pylon',
val=41.0 * (10**9),
units='Pa',
desc='Young\'s Modulus of pylon')
self.add_param('v_pylon', val=.2, desc='Poisson\'s ratio of pylon')
self.add_param('Su_pylon',
val=40.0 * (10**6),
units='Pa',
desc='ultimate strength_pylon')
self.add_param('unit_cost_pylon',
val=.05,
units='USD/kg',
desc='cost of pylon materials per unit mass')
self.add_param('h', val=10.0, units='m', desc='height of pylon')
self.add_param('r_pylon', val=1.1, units='m', desc='inner tube radius')
self.add_param('vac_weight', val=1500.0, units='kg', desc='vacuum weight')
#Define outputs
self.add_output('m_pylon',
val=0.0,
units='kg',
desc='total mass of the pylon')
self.add_output('m_prime',
val=100.0,
units='kg/m',
desc='total mass of the tube per unit length')
self.add_output('von_mises',
val=0.0,
units='Pa',
desc='max Von Mises Stress')
self.add_output('total_material_cost',
val=0.0,
units='USD/m',
desc='cost of materials')
self.add_output('R', val=0.0, units='N', desc='Force on pylon')
self.add_output('delta',
val=0.0,
units='m',
desc='max deflection inbetween pylons')
self.add_output('dx',
val=500.0,
units='m',
desc='distance between pylons')
self.add_output('t_crit',
val=0.0,
units='m',
desc='Minimum tunnel thickness for buckling')
def solve_nonlinear(self, params, unknowns, resids):
'''total material cost = ($/kg_tunnel)*m_prime + ($/kg_pylon)*m_pylon*(1/dx)
m_prime = mass of tunnel per unit length = rho_tube*pi*((r+t)^2-r^2)
m_pylon = mass of single pylon = rho_pylon*pi*(r_pylon^2)*h
Constraint equations derived from yield on buckling conditions
'''
rho_tube = params['rho_tube']
E_tube = params['E_tube']
v_tube = params['v_tube']
alpha_tube = params['alpha_tube']
dT_tube = params['dT_tube']
unit_cost_tube = params['unit_cost_tube']
g = params['g']
tube_area = params['tube_area']
#r = params['r']
t = params['t']
m_pod = params['m_pod']
p_tunnel = params['p_tunnel']
p_ambient = params['p_ambient']
Su_pylon = params['Su_pylon']
sf = params['sf']
rho_pylon = params['rho_pylon']
E_pylon = params['E_pylon']
r_pylon = params['r_pylon']
unit_cost_pylon = params['unit_cost_pylon']
h = params['h']
vac_weight = params['vac_weight']
#Compute intermediate variable
r = np.sqrt(tube_area/np.pi)
#print(r)
q = rho_tube * np.pi * ((
(r + t)**2) - (r**2)) * g #Calculate distributed load
dp = p_ambient - p_tunnel #Calculate delta pressure
I_tube = (np.pi / 4.0) * ((
(r + t)**4) - (r**4)) #Calculate moment of inertia of tube
m_prime = rho_tube * np.pi * ((
(r + t)**2) - (r**2)) #Calculate mass per unit length
dx = ((2 * (Su_pylon / sf) * np.pi *
(r_pylon**2)) - m_pod * g) / (m_prime * g) #Calculate dx
M = (q * (
(dx**2) / 8.0)) + (m_pod * g * (dx / 2.0)) #Calculate max moment
sig_theta = (dp * r) / t #Calculate hoop stress
sig_axial = ((dp * r) / (2 * t)) + (
(M * r) / I_tube
) + alpha_tube * E_tube * dT_tube #Calculate axial stress
von_mises = np.sqrt((((sig_theta**2) + (sig_axial**2) + (
(sig_axial - sig_theta)**2)) /
2.0)) #Calculate Von Mises stress
m_pylon = rho_pylon * np.pi * (r_pylon**
2) * h #Calculate mass of single pylon
# unknowns['total_material_cost'] = (unit_cost_tube * (rho_tube * np.pi * ((
# (r + t)**2) - (r**2)))) + (unit_cost_pylon * m_pylon * (1 / (
# ((2 * (Su_pylon / sf) * np.pi * (r_pylon**2)) - m_pod * g) /
# (m_prime * g))))
unknowns['total_material_cost'] = (unit_cost_tube * (rho_tube * np.pi * ((
(r + t)**2) - (r**2)))) + (unit_cost_pylon * m_pylon)/dx
unknowns['m_prime'] = m_prime
unknowns['von_mises'] = von_mises
unknowns['delta'] = (5.0 * q * (dx**4)) / (384.0 * E_tube * I_tube)
unknowns['m_pylon'] = m_pylon
unknowns['R'] = .5 * m_prime * dx * g + .5 * m_pod * g
unknowns['dx'] = dx
unknowns['t_crit'] = r * ((
(4.0 * dp * (1.0 - (v_tube**2))) / E_tube)**(1.0 / 3.0))
if __name__ == '__main__':
top = Problem()
root = top.root = Group()
params = (#('r', 1.1, {'units': 'm'}),
('tube_area', 53.134589, {'units': 'm**2'}),
('t', 5.0, {'units': 'm'}),
('r_pylon', 1.1, {'units': 'm'}),
('Su_tube', 152.0e6, {'units': 'Pa'}),
('sf', 1.5),
('p_ambient', 850.0, {'units': 'Pa'}),
('p_tunnel', 101300.0, {'units': 'Pa'}),
('v_tube', .3),
('rho_tube', 7820.0, {'units': 'kg/m**3'}),
('rho_pylon', 2400.0, {'units': 'Pa'}),
('Su_pylon', 40.0e6, {'units': 'Pa'}),
('E_pylon', 41.0e9, {'units': 'Pa'}),
('h', 10.0, {'units': 'm'}),
('m_pod', 3100.0, {'units': 'kg'})
)
root.add('input_vars', IndepVarComp(params))
root.add('p', StructuralOptimization())
root.add('con1', ExecComp(
'c1 = ((Su_tube/sf) - von_mises)')) #Impose yield stress constraint for tube
root.add('con2', ExecComp(
'c2 = t - t_crit')) #Impose buckling constraint for tube dx = ((pi**3)*E_pylon*(r_pylon**4))/(8*(h**2)*rho_tube*pi*(((r+t)**2)-(r**2))*g)
#root.connect('input_vars.r', 'p.r')
root.connect('input_vars.tube_area', 'p.tube_area')
root.connect('input_vars.t', 'p.t')
root.connect('input_vars.r_pylon', 'p.r_pylon')
root.connect('input_vars.Su_tube', 'con1.Su_tube')
root.connect('input_vars.sf', 'con1.sf')
root.connect('p.von_mises', 'con1.von_mises')
root.connect('input_vars.t', 'con2.t')
root.connect('p.t_crit', 'con2.t_crit')
root.p.deriv_options['type'] = "cs"
# root.p.deriv_options['form'] = 'forward'
root.p.deriv_options['step_size'] = 1.0e-10
top.driver = ScipyOptimizer()
top.driver.options['optimizer'] = 'SLSQP'
top.driver.add_desvar('input_vars.t', lower=.001, scaler=100.0)
top.driver.add_desvar('input_vars.r_pylon', lower=.1)
top.driver.add_objective('p.total_material_cost', scaler = 1.0e-4)
top.driver.add_constraint('con1.c1', lower=0.0, scaler=1000.0)
top.driver.add_constraint('con2.c2', lower=0.0)
top.setup()
top['p.p_tunnel'] = 850.0
# top['p.m_pod']= 10000.0
top['p.h'] = 10.0
import csv
# f = open('/Users/kennethdecker/Desktop/Paper figures/land_structural_trades.csv', 'wt')
# writer = csv.writer(f)
# writer.writerow(('A_tube', 'm=10000', 'm=15000', 'm=20000', 'cost'))
m_pod = np.linspace(10000.0, 20000, num = 3)
A_tube = np.linspace(20.0, 50.0, num = 30)
dx = np.zeros((len(m_pod), len(A_tube)))
t_tube = np.zeros((len(m_pod), len(A_tube)))
r_pylon = np.zeros((len(m_pod), len(A_tube)))
cost = np.zeros((1, len(A_tube)))
for i in range(len(A_tube)):
for j in range(len(m_pod)):
top['input_vars.tube_area'] = A_tube[i]
top['p.m_pod'] = m_pod[j]
top.run()
dx[j,i] = top['p.dx']
t_tube[j,i] = top['p.t']
r_pylon[j,i] = top['p.r_pylon']
cost[0,i] = top['p.total_material_cost']
# writer.writerow((A_tube[i], dx[0,i], dx[1,i], dx[2,i], cost[0,i]))
# f.close()
plt.hold(True)
# plt.subplot(211)
line1, = plt.plot(A_tube, dx[0,:], 'b-', linewidth = 2.0, label = 'pod mass = 10000 kg')
line2, = plt.plot(A_tube, dx[1,:], 'r-', linewidth = 2.0, label = 'pod mass = 15000 kg')
line3, = plt.plot(A_tube, dx[2,:], 'g-', linewidth = 2.0, label = 'pod mass = 20000 kg')
plt.xlabel('Tube Area (m^2)', fontsize = 12, fontweight = 'bold')
plt.ylabel('Pylon Spacing (m)', fontsize = 12, fontweight = 'bold')
plt.grid('on')
plt.legend(handles = [line1, line2, line3], loc = 1)
plt.show()
plt.subplot(211)
line1, = plt.plot(A_tube, t_tube[0,:], 'b-', linewidth = 2.0, label = 'm_pod = 10000 kg')
line2, = plt.plot(A_tube, t_tube[1,:], 'r-', linewidth = 2.0, label = 'm_pod = 15000 kg')
line3, = plt.plot(A_tube, t_tube[2,:], 'g-', linewidth = 2.0, label = 'm_pod = 20000 kg')
# plt.xlabel('Tube Area (m^2)', fontsize = 12, fontweight = 'bold')
plt.ylabel('tube thickness (m)', fontsize = 12, fontweight = 'bold')
plt.grid('on')
plt.legend(handles = [line1, line2, line3], loc = 1)
plt.subplot(212)
line1, = plt.plot(A_tube, r_pylon[0,:], 'b-', linewidth = 2.0, label = 'm_pod = 10000 kg')
line2, = plt.plot(A_tube, r_pylon[1,:], 'r-', linewidth = 2.0, label = 'm_pod = 15000 kg')
line3, = plt.plot(A_tube, r_pylon[2,:], 'g-', linewidth = 2.0, label = 'm_pod = 20000 kg')
plt.xlabel('Tube Area (m^2)', fontsize = 12, fontweight = 'bold')
plt.ylabel('Pylon Radius (m)', fontsize = 12, fontweight = 'bold')
plt.grid('on')
plt.show()
plt.plot(A_tube, cost[0,:], 'r-', linewidth = 2.0)
plt.xlabel('Tube Area (m^2)', fontsize = 12, fontweight = 'bold')
plt.ylabel('Material Cost per Meter (USD/m)', fontsize = 12, fontweight = 'bold')
plt.show()
# plt.plot(A_tube, dx[0,:])
# plt.xlabel('Tube Area')
# plt.ylabel('pylon spacing')
# plt.show()
# plt.plot(A_tube, total_material_cost[0,:])
# plt.xlabel('Tube Area')
# plt.ylabel('Cost per unit length')
# plt.show()
R_buckle = ((np.pi**3) * top['p.E_tube'] *
(top['p.r_pylon']**4)) / (16 * (top['p.h']**2))
print('Optimizer pylon radius %f' % top['p.r_pylon'])
if top['p.R'] < R_buckle:
print('Pylon buckling constraint is satisfied')
else:
r_pylon_new = ((R_buckle * 16 * (top['p.h']**2)) / (
(np.pi**3) * top['p.E_tube']))**.25
print(
'Optimizer value did not satisfy pylon buckling condition. Pylon radius set to minimum buckling value')
print('new pylon radius is %f m' % r_pylon_new)
print('\n')
print('total material cost per m is $%6.2f/km' %
(top['p.total_material_cost'] * (1.0e3)))
print('pylon radius is %6.3f m' % top['p.r_pylon'])
print('tube thickness is %6.4f mm' % (top['p.t'] * (1.0e3)))
print('mass per unit length is %6.2f kg/m' % top['p.m_prime'])
print('vertical force on each pylon is %6.2f kN' % (top['p.R'] / (1.0e3)))
print('Von Mises stress is %6.3f MPa' % (top['p.von_mises'] / (1.0e6)))
print('distance between pylons is %6.2f m' % top['p.dx'])
print('max deflection is %6.4f mm' % (top['p.delta'] * (1.0e3)))
print('\n')
print('con1 = %f' % top['con1.c1'])
print('con2 = %f' % top['con2.c2'])
if top['con1.c1'] < 0.0:
print('con1 not satisfied')
elif top['con2.c2'] < 0.0:
print('con2 not satisfied')
else:
print('Yield constraints are satisfied') | apache-2.0 |
blorgon9000/pyopus | demo/evaluation/01-simulator-spectre/runme.py | 1 | 3218 | # Test Spectre simulator interface
from pyopus.simulator import simulatorClass
if __name__=='__main__':
# Job list for simulator
jobList=[
{ # First job - op analysis
'name': 'dcop',
'definitions': [
{ 'file': 'cmos180n.scs', 'section': 'tm' },
{ 'file': 'opamp.scs' }
],
'params': {
'vdd': 1.8,
'temperature': 25
},
'options': {
'reltol': 0.001
},
'saves': [
'p("vdd", "pwr")', 'all()'
],
'command': 'op()'
},
{ # Second job - op analysis with different temperature
'name': 'dcop100',
'definitions': [
{ 'file': 'cmos180n.scs', 'section': 'tm' },
{ 'file': 'opamp.scs' }
],
'params': {
'vdd': 1.6,
'temperature': 100
},
'options': {
'reltol': 0.001
},
'saves': [
],
'command': 'op()'
},
{ # Third job - op analysis with different supply voltage
'name': 'dcopv33',
'definitions': [
{ 'file': 'cmos180n.scs', 'section': 'tm' },
{ 'file': 'opamp.scs' }
],
'params': {
'vdd': 2.0,
'temperature': 25
},
'options': {
'reltol': 0.001
},
'saves': [
],
'command': 'op()'
},
{ # Fourth job - op analysis with different library
'name': 'dcopff',
'definitions': [
{ 'file': 'cmos180n.scs', 'section': 'ws' },
{ 'file': 'opamp.scs' }
],
'params': {
'vdd': 2.0,
'temperature': 25
},
'options': {
'reltol': 0.001
},
'saves': [
],
'command': 'op()'
},
{ # Fifth job - op analysis with different library
'name': 'dcopff100',
'definitions': [
{ 'file': 'cmos180n.scs', 'section': 'ws' },
{ 'file': 'opamp.scs' }
],
'params': {
'vdd': 2.0,
'temperature': 100
},
'options': {
'reltol': 0.001
},
'saves': [
],
'command': 'op()'
}
]
# Input parameters
inParams={
'mirr_w': 7.46e-005,
'mirr_l': 5.63e-007
}
# Create simulator
sim=simulatorClass("Spectre")(debug=1, saveSplit=False)
# Set job list and optimize it
sim.setJobList(jobList)
# Print optimized job groups
ngroups=sim.jobGroupCount()
print("\nJob Groups:")
for i in range(ngroups):
group=sim.jobGroup(i)
gstr=''
for j in group:
gstr+=" %d (%s), " % (j, jobList[j]['name'])
print(" %d: %s" % (i, gstr))
print("")
# Set input parameters
sim.setInputParameters(inParams)
# Go through all job groups, write file, run it and collect results
for i in range(ngroups):
# Delete old loaded results (free memory).
sim.resetResults()
# Run jobs in job group.
(jobIndices, status)=sim.runJobGroup(i)
print("")
for j in jobIndices:
name=jobList[i]['name']
sim.collectResults([j], status)
if sim.activateResult(j) is not None:
print("Job %d (%s): Vout=%e" % (j, name, sim.res_voltage("out")))
print("Job %d (%s): Isrc=%e" % (j, name, sim.res_current("vdd")))
try:
print("Job %d (%s): Pvdd=%e" % (j, name, sim.res_property("vdd", "pwr")))
except:
pass
else:
print("Job %d (%s): no results" % (j, name))
print("")
sim.cleanup()
| gpl-3.0 |
SpotlightKid/aescrypt | test_aescrypt.py | 1 | 4393 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test suite for aescrypt.py."""
from __future__ import print_function, unicode_literals
from io import BytesIO
from aescrypt import encrypt, decrypt
from Crypto.Cipher import AES
from nose.tools import raises
password = 'q1w2e3r4'
plaintext = """\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque at euismod
tortor, quis finibus mauris. Suspendisse dui augue, hendrerit at porttitor
viverra, pulvinar ut velit. Quisque facilisis felis sed felis vestibulum, sit
amet varius est vulputate. Curabitur venenatis dapibus risus, a molestie magna
lobortis et. Donec a nulla in ligula sagittis dapibus et quis velit. Curabitur
tincidunt faucibus lorem in viverra. Sed diam diam, suscipit sit amet quam nec,
cursus sollicitudin est. Vestibulum condimentum gravida sem eget tincidunt.
Nulla tincidunt massa in consectetur blandit. Ut sed nunc sed neque posuere
porttitor. Fusce et libero pretium, facilisis ante eget, fermentum enim. Sed
dignissim libero quis ultricies iaculis. Nunc eu lobortis tellus. Nam et cursus
ligula. Sed vitae consequat nisl. Cras tempor nisl non metus commodo, vitae
scelerisque neque congue.
"""
infn = 'test_input.txt'
encfn = 'test_input.txt.enc'
outfn = 'test_output.txt'
def test_roundtrip():
"""AES file encryption/decryption roundtrip produces identical files."""
with open(infn, 'rb') as infile, open(encfn, 'wb') as outfile:
encrypt(infile, outfile, password)
with open(encfn, 'rb') as infile, open(outfn, 'wb') as outfile:
decrypt(infile, outfile, password)
with open(infn, 'rb') as original, open(outfn, 'rb') as copy:
assert original.read() == copy.read()
@raises(ValueError)
def test_bad_decrypt():
"""Trying to decrypt invalid input raises ValueError."""
with BytesIO(plaintext[:256].encode()) as infile, BytesIO() as outfile:
decrypt(infile, outfile, password)
def test_key_size():
"""Key sizes of 128, 192 and 256 bit produce valid ciphertexts."""
infile = BytesIO(plaintext.encode())
for key_size in AES.key_size:
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, key_size=key_size)
infile.seek(0)
ciphertext = cipherfile.getvalue()
assert len(ciphertext) % 16 == 0
cipherfile.seek(0)
outfile = BytesIO()
decrypt(cipherfile, outfile, password, key_size=key_size)
decrypted = outfile.getvalue().decode('utf-8')
assert decrypted == plaintext
def test_salt_marker():
"""Setting the salt marker produces valid header."""
marker = b'test'
infile = BytesIO(plaintext.encode())
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, salt_marker=marker)
ciphertext = cipherfile.getvalue()
assert ciphertext[:4] == marker and ciphertext[6:10] == marker
@raises(ValueError)
def test_salt_marker_empty():
"""Passing empty salt marker raises ValueError."""
marker = b''
infile = BytesIO(plaintext.encode())
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, salt_marker=marker)
@raises(ValueError)
def test_salt_marker_toolong():
"""Passing too long salt marker raises ValueError."""
marker = b'iamlong'
infile = BytesIO(plaintext.encode())
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, salt_marker=marker)
@raises(TypeError)
def test_salt_marker_notbytes():
"""Passing not bytes-type salt marker raises TypeError."""
marker = '$'
infile = BytesIO(plaintext.encode())
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, salt_marker=marker)
def test_kdf_iterations():
"""Passed kdf_iterations are set correctly in header."""
infile = BytesIO(plaintext.encode())
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, kdf_iterations=1000)
assert cipherfile.getvalue()[1:3] == b'\x03\xe8'
@raises(ValueError)
def test_kdf_iterations_tolow():
"""Setting kdf_iterations too low raises ValueError."""
infile = BytesIO(plaintext.encode())
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, kdf_iterations=0)
@raises(ValueError)
def test_kdf_iterations_tohigh():
"""Setting kdf_iterations too high raises ValueError."""
infile = BytesIO(plaintext.encode())
cipherfile = BytesIO()
encrypt(infile, cipherfile, password, kdf_iterations=65536)
| mit |
ArcherSys/ArcherSys | Lib/site-packages/jupyter_core/tests/dotipython/profile_default/ipython_kernel_config.py | 24 | 15358 | # Configuration file for ipython-kernel.
c = get_config()
#------------------------------------------------------------------------------
# IPKernelApp configuration
#------------------------------------------------------------------------------
# IPython: an enhanced interactive Python shell.
# IPKernelApp will inherit config from: BaseIPythonApplication, Application,
# InteractiveShellApp, ConnectionFileMixin
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.IPKernelApp.hide_initial_ns = True
# The importstring for the DisplayHook factory
# c.IPKernelApp.displayhook_class = 'IPython.kernel.zmq.displayhook.ZMQDisplayHook'
# A list of dotted module names of IPython extensions to load.
# c.IPKernelApp.extensions = []
# Execute the given command string.
# c.IPKernelApp.code_to_run = ''
# redirect stderr to the null device
# c.IPKernelApp.no_stderr = False
# The date format used by logging formatters for %(asctime)s
# c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to create profile dir if it doesn't exist
# c.IPKernelApp.auto_create = False
# Reraise exceptions encountered loading IPython extensions?
# c.IPKernelApp.reraise_ipython_extension_failures = False
# Set the log level by value or name.
# c.IPKernelApp.log_level = 30
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.IPKernelApp.exec_PYTHONSTARTUP = True
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.IPKernelApp.pylab = None
# Run the module as a script.
# c.IPKernelApp.module_to_run = ''
# The importstring for the OutStream factory
# c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream'
# dotted module name of an IPython extension to load.
# c.IPKernelApp.extra_extension = ''
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.IPKernelApp.verbose_crash = False
# Whether to overwrite existing config files when copying
# c.IPKernelApp.overwrite = False
# The IPython profile to use.
# c.IPKernelApp.profile = 'default'
# List of files to run at IPython startup.
# c.IPKernelApp.exec_files = []
# The Logging format template
# c.IPKernelApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.IPKernelApp.copy_config_files = False
# set the stdin (ROUTER) port [default: random]
# c.IPKernelApp.stdin_port = 0
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.IPKernelApp.extra_config_file = ''
# lines of code to run at IPython startup.
# c.IPKernelApp.exec_lines = []
# set the control (ROUTER) port [default: random]
# c.IPKernelApp.control_port = 0
# set the heartbeat port [default: random]
# c.IPKernelApp.hb_port = 0
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.IPKernelApp.gui = None
# A file to be run
# c.IPKernelApp.file_to_run = ''
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.IPKernelApp.ipython_dir = ''
# kill this process if its parent dies. On Windows, the argument specifies the
# HANDLE of the parent process, otherwise it is simply boolean.
# c.IPKernelApp.parent_handle = 0
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.IPKernelApp.matplotlib = None
# set the iopub (PUB) port [default: random]
# c.IPKernelApp.iopub_port = 0
# redirect stdout to the null device
# c.IPKernelApp.no_stdout = False
#
# c.IPKernelApp.transport = 'tcp'
# JSON file in which to store connection info [default: kernel-<pid>.json]
#
# This file will contain the IP, ports, and authentication key needed to connect
# clients to this kernel. By default, this file will be created in the security
# dir of the current profile, but can be specified by absolute path.
# c.IPKernelApp.connection_file = ''
# The Kernel subclass to be used.
#
# This should allow easy re-use of the IPKernelApp entry point to configure and
# launch kernels other than IPython's own.
# c.IPKernelApp.kernel_class = <class 'IPython.kernel.zmq.ipkernel.IPythonKernel'>
# ONLY USED ON WINDOWS Interrupt this process when the parent is signaled.
# c.IPKernelApp.interrupt = 0
# set the shell (ROUTER) port [default: random]
# c.IPKernelApp.shell_port = 0
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.IPKernelApp.pylab_import_all = True
# Set the kernel's IP address [default localhost]. If the IP address is
# something other than localhost, then Consoles on other machines will be able
# to connect to the Kernel, so be careful!
# c.IPKernelApp.ip = ''
#------------------------------------------------------------------------------
# IPythonKernel configuration
#------------------------------------------------------------------------------
# IPythonKernel will inherit config from: Kernel
#
# c.IPythonKernel._execute_sleep = 0.0005
# Whether to use appnope for compatiblity with OS X App Nap.
#
# Only affects OS X >= 10.9.
# c.IPythonKernel._darwin_app_nap = True
#
# c.IPythonKernel._poll_interval = 0.05
#------------------------------------------------------------------------------
# ZMQInteractiveShell configuration
#------------------------------------------------------------------------------
# A subclass of InteractiveShell for ZMQ.
# ZMQInteractiveShell will inherit config from: InteractiveShell
#
# c.ZMQInteractiveShell.object_info_string_level = 0
#
# c.ZMQInteractiveShell.separate_out = ''
# Automatically call the pdb debugger after every exception.
# c.ZMQInteractiveShell.pdb = False
#
# c.ZMQInteractiveShell.ipython_dir = ''
#
# c.ZMQInteractiveShell.history_length = 10000
#
# c.ZMQInteractiveShell.readline_remove_delims = '-/~'
# If True, anything that would be passed to the pager will be displayed as
# regular output instead.
# c.ZMQInteractiveShell.display_page = False
# Deprecated, use PromptManager.in2_template
# c.ZMQInteractiveShell.prompt_in2 = ' .\\D.: '
#
# c.ZMQInteractiveShell.separate_in = '\n'
# Start logging to the default log file in overwrite mode. Use `logappend` to
# specify a log file to **append** logs to.
# c.ZMQInteractiveShell.logstart = False
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.ZMQInteractiveShell.cache_size = 1000
#
# c.ZMQInteractiveShell.wildcards_case_sensitive = True
# The name of the logfile to use.
# c.ZMQInteractiveShell.logfile = ''
# 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
# c.ZMQInteractiveShell.ast_node_interactivity = 'last_expr'
#
# c.ZMQInteractiveShell.debug = False
#
# c.ZMQInteractiveShell.quiet = False
# Save multi-line entries as one entry in readline history
# c.ZMQInteractiveShell.multiline_history = True
# Deprecated, use PromptManager.in_template
# c.ZMQInteractiveShell.prompt_in1 = 'In [\\#]: '
# Enable magic commands to be called without the leading %.
# c.ZMQInteractiveShell.automagic = True
# The part of the banner to be printed before the profile
# c.ZMQInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.ZMQInteractiveShell.autocall = 0
#
# c.ZMQInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# Set the color scheme (NoColor, Linux, or LightBG).
# c.ZMQInteractiveShell.colors = 'LightBG'
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.ZMQInteractiveShell.color_info = True
# Show rewritten input, e.g. for autocall.
# c.ZMQInteractiveShell.show_rewritten_input = True
#
# c.ZMQInteractiveShell.xmode = 'Context'
#
# c.ZMQInteractiveShell.separate_out2 = ''
# The part of the banner to be printed after the profile
# c.ZMQInteractiveShell.banner2 = ''
# Start logging to the given file in append mode. Use `logfile` to specify a log
# file to **overwrite** logs to.
# c.ZMQInteractiveShell.logappend = ''
# Don't call post-execute functions that have failed in the past.
# c.ZMQInteractiveShell.disable_failing_post_execute = False
# Deprecated, use PromptManager.out_template
# c.ZMQInteractiveShell.prompt_out = 'Out[\\#]: '
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.ZMQInteractiveShell.deep_reload = False
# Deprecated, use PromptManager.justify
# c.ZMQInteractiveShell.prompts_pad_left = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.ZMQInteractiveShell.ast_transformers = []
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = ''
#------------------------------------------------------------------------------
# Session configuration
#------------------------------------------------------------------------------
# Object for handling serialization and sending of messages.
#
# The Session object handles building messages and sending them with ZMQ sockets
# or ZMQStream objects. Objects can communicate with each other over the
# network via Session objects, and only need to work with the dict-based IPython
# message spec. The Session will handle serialization/deserialization, security,
# and metadata.
#
# Sessions support configurable serialization via packer/unpacker traits, and
# signing with HMAC digests via the key/keyfile traits.
#
# Parameters ----------
#
# debug : bool
# whether to trigger extra debugging statements
# packer/unpacker : str : 'json', 'pickle' or import_string
# importstrings for methods to serialize message parts. If just
# 'json' or 'pickle', predefined JSON and pickle packers will be used.
# Otherwise, the entire importstring must be used.
#
# The functions must accept at least valid JSON input, and output *bytes*.
#
# For example, to use msgpack:
# packer = 'msgpack.packb', unpacker='msgpack.unpackb'
# pack/unpack : callables
# You can also set the pack/unpack callables for serialization directly.
# session : bytes
# the ID of this Session object. The default is to generate a new UUID.
# username : unicode
# username added to message headers. The default is to ask the OS.
# key : bytes
# The key used to initialize an HMAC signature. If unset, messages
# will not be signed or checked.
# keyfile : filepath
# The file containing a key. If this is set, `key` will be initialized
# to the contents of the file.
# The digest scheme used to construct the message signatures. Must have the form
# 'hmac-HASH'.
# c.Session.signature_scheme = 'hmac-sha256'
# The maximum number of digests to remember.
#
# The digest history will be culled when it exceeds this value.
# c.Session.digest_history_size = 65536
# The name of the unpacker for unserializing messages. Only used with custom
# functions for `packer`.
# c.Session.unpacker = 'json'
# The name of the packer for serializing messages. Should be one of 'json',
# 'pickle', or an import name for a custom callable serializer.
# c.Session.packer = 'json'
# Username for the Session. Default is your system username.
# c.Session.username = 'minrk'
# Debug output in the Session
# c.Session.debug = False
# path to file containing execution key.
# c.Session.keyfile = ''
# The maximum number of items for a container to be introspected for custom
# serialization. Containers larger than this are pickled outright.
# c.Session.item_threshold = 64
# Threshold (in bytes) beyond which an object's buffer should be extracted to
# avoid pickling.
# c.Session.buffer_threshold = 1024
# The UUID identifying this session.
# c.Session.session = ''
# Threshold (in bytes) beyond which a buffer should be sent without copying.
# c.Session.copy_threshold = 65536
# execution key, for signing messages.
# c.Session.key = b''
# Metadata dictionary, which serves as the default top-level metadata dict for
# each message.
# c.Session.metadata = {}
| mit |
analurandis/Tur | backend/venv/Lib/site-packages/git/test/test_repo.py | 2 | 27790 | # test_repo.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
from git.test.lib import (
patch,
TestBase,
with_rw_repo,
fixture,
assert_false,
assert_equal,
assert_true,
raises
)
from git import (
InvalidGitRepositoryError,
Repo,
NoSuchPathError,
Head,
Commit,
Tree,
IndexFile,
Git,
Reference,
GitDB,
Submodule,
GitCmdObjectDB,
Remote,
BadName,
GitCommandError
)
from git.repo.fun import touch
from git.util import join_path_native
from git.exc import BadObject
from gitdb.util import bin_to_hex
from git.compat import string_types
from gitdb.test.lib import with_rw_directory
import os
import sys
import tempfile
import shutil
from io import BytesIO
class TestRepo(TestBase):
@raises(InvalidGitRepositoryError)
def test_new_should_raise_on_invalid_repo_location(self):
Repo(tempfile.gettempdir())
@raises(NoSuchPathError)
def test_new_should_raise_on_non_existant_path(self):
Repo("repos/foobar")
@with_rw_repo('0.3.2.1')
def test_repo_creation_from_different_paths(self, rw_repo):
r_from_gitdir = Repo(rw_repo.git_dir)
assert r_from_gitdir.git_dir == rw_repo.git_dir
assert r_from_gitdir.git_dir.endswith('.git')
assert not rw_repo.git.working_dir.endswith('.git')
assert r_from_gitdir.git.working_dir == rw_repo.git.working_dir
def test_description(self):
txt = "Test repository"
self.rorepo.description = txt
assert_equal(self.rorepo.description, txt)
def test_heads_should_return_array_of_head_objects(self):
for head in self.rorepo.heads:
assert_equal(Head, head.__class__)
def test_heads_should_populate_head_data(self):
for head in self.rorepo.heads:
assert head.name
assert isinstance(head.commit, Commit)
# END for each head
assert isinstance(self.rorepo.heads.master, Head)
assert isinstance(self.rorepo.heads['master'], Head)
def test_tree_from_revision(self):
tree = self.rorepo.tree('0.1.6')
assert len(tree.hexsha) == 40
assert tree.type == "tree"
assert self.rorepo.tree(tree) == tree
# try from invalid revision that does not exist
self.failUnlessRaises(BadName, self.rorepo.tree, 'hello world')
def test_commit_from_revision(self):
commit = self.rorepo.commit('0.1.4')
assert commit.type == 'commit'
assert self.rorepo.commit(commit) == commit
def test_commits(self):
mc = 10
commits = list(self.rorepo.iter_commits('0.1.6', max_count=mc))
assert len(commits) == mc
c = commits[0]
assert_equal('9a4b1d4d11eee3c5362a4152216376e634bd14cf', c.hexsha)
assert_equal(["c76852d0bff115720af3f27acdb084c59361e5f6"], [p.hexsha for p in c.parents])
assert_equal("ce41fc29549042f1aa09cc03174896cf23f112e3", c.tree.hexsha)
assert_equal("Michael Trier", c.author.name)
assert_equal("mtrier@gmail.com", c.author.email)
assert_equal(1232829715, c.authored_date)
assert_equal(5 * 3600, c.author_tz_offset)
assert_equal("Michael Trier", c.committer.name)
assert_equal("mtrier@gmail.com", c.committer.email)
assert_equal(1232829715, c.committed_date)
assert_equal(5 * 3600, c.committer_tz_offset)
assert_equal("Bumped version 0.1.6\n", c.message)
c = commits[1]
assert isinstance(c.parents, tuple)
def test_trees(self):
mc = 30
num_trees = 0
for tree in self.rorepo.iter_trees('0.1.5', max_count=mc):
num_trees += 1
assert isinstance(tree, Tree)
# END for each tree
assert num_trees == mc
def _assert_empty_repo(self, repo):
# test all kinds of things with an empty, freshly initialized repo.
# It should throw good errors
# entries should be empty
assert len(repo.index.entries) == 0
# head is accessible
assert repo.head
assert repo.head.ref
assert not repo.head.is_valid()
# we can change the head to some other ref
head_ref = Head.from_path(repo, Head.to_full_path('some_head'))
assert not head_ref.is_valid()
repo.head.ref = head_ref
# is_dirty can handle all kwargs
for args in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
assert not repo.is_dirty(*args)
# END for each arg
# we can add a file to the index ( if we are not bare )
if not repo.bare:
pass
# END test repos with working tree
def test_init(self):
prev_cwd = os.getcwd()
os.chdir(tempfile.gettempdir())
git_dir_rela = "repos/foo/bar.git"
del_dir_abs = os.path.abspath("repos")
git_dir_abs = os.path.abspath(git_dir_rela)
try:
# with specific path
for path in (git_dir_rela, git_dir_abs):
r = Repo.init(path=path, bare=True)
assert isinstance(r, Repo)
assert r.bare is True
assert not r.has_separate_working_tree()
assert os.path.isdir(r.git_dir)
self._assert_empty_repo(r)
# test clone
clone_path = path + "_clone"
rc = r.clone(clone_path)
self._assert_empty_repo(rc)
try:
shutil.rmtree(clone_path)
except OSError:
# when relative paths are used, the clone may actually be inside
# of the parent directory
pass
# END exception handling
# try again, this time with the absolute version
rc = Repo.clone_from(r.git_dir, clone_path)
self._assert_empty_repo(rc)
shutil.rmtree(git_dir_abs)
try:
shutil.rmtree(clone_path)
except OSError:
# when relative paths are used, the clone may actually be inside
# of the parent directory
pass
# END exception handling
# END for each path
os.makedirs(git_dir_rela)
os.chdir(git_dir_rela)
r = Repo.init(bare=False)
assert r.bare is False
assert not r.has_separate_working_tree()
self._assert_empty_repo(r)
finally:
try:
shutil.rmtree(del_dir_abs)
except OSError:
pass
os.chdir(prev_cwd)
# END restore previous state
def test_bare_property(self):
self.rorepo.bare
def test_daemon_export(self):
orig_val = self.rorepo.daemon_export
self.rorepo.daemon_export = not orig_val
assert self.rorepo.daemon_export == (not orig_val)
self.rorepo.daemon_export = orig_val
assert self.rorepo.daemon_export == orig_val
def test_alternates(self):
cur_alternates = self.rorepo.alternates
# empty alternates
self.rorepo.alternates = []
assert self.rorepo.alternates == []
alts = ["other/location", "this/location"]
self.rorepo.alternates = alts
assert alts == self.rorepo.alternates
self.rorepo.alternates = cur_alternates
def test_repr(self):
assert repr(self.rorepo).startswith('<git.Repo ')
def test_is_dirty_with_bare_repository(self):
orig_value = self.rorepo._bare
self.rorepo._bare = True
assert_false(self.rorepo.is_dirty())
self.rorepo._bare = orig_value
def test_is_dirty(self):
self.rorepo._bare = False
for index in (0, 1):
for working_tree in (0, 1):
for untracked_files in (0, 1):
assert self.rorepo.is_dirty(index, working_tree, untracked_files) in (True, False)
# END untracked files
# END working tree
# END index
orig_val = self.rorepo._bare
self.rorepo._bare = True
assert self.rorepo.is_dirty() is False
self.rorepo._bare = orig_val
def test_head(self):
assert self.rorepo.head.reference.object == self.rorepo.active_branch.object
def test_index(self):
index = self.rorepo.index
assert isinstance(index, IndexFile)
def test_tag(self):
assert self.rorepo.tag('refs/tags/0.1.5').commit
def test_archive(self):
tmpfile = tempfile.mktemp(suffix='archive-test')
stream = open(tmpfile, 'wb')
self.rorepo.archive(stream, '0.1.6', path='doc')
assert stream.tell()
stream.close()
os.remove(tmpfile)
@patch.object(Git, '_call_process')
def test_should_display_blame_information(self, git):
git.return_value = fixture('blame')
b = self.rorepo.blame('master', 'lib/git.py')
assert_equal(13, len(b))
assert_equal(2, len(b[0]))
# assert_equal(25, reduce(lambda acc, x: acc + len(x[-1]), b))
assert_equal(hash(b[0][0]), hash(b[9][0]))
c = b[0][0]
assert_true(git.called)
assert_equal('634396b2f541a9f2d58b00be1a07f0c358b999b3', c.hexsha)
assert_equal('Tom Preston-Werner', c.author.name)
assert_equal('tom@mojombo.com', c.author.email)
assert_equal(1191997100, c.authored_date)
assert_equal('Tom Preston-Werner', c.committer.name)
assert_equal('tom@mojombo.com', c.committer.email)
assert_equal(1191997100, c.committed_date)
assert_equal('initial grit setup', c.message)
# test the 'lines per commit' entries
tlist = b[0][1]
assert_true(tlist)
assert_true(isinstance(tlist[0], string_types))
assert_true(len(tlist) < sum(len(t) for t in tlist)) # test for single-char bug
# BINARY BLAME
git.return_value = fixture('blame_binary')
blames = self.rorepo.blame('master', 'rps')
assert len(blames) == 2
def test_blame_real(self):
c = 0
nml = 0 # amount of multi-lines per blame
for item in self.rorepo.head.commit.tree.traverse(
predicate=lambda i, d: i.type == 'blob' and i.path.endswith('.py')):
c += 1
for b in self.rorepo.blame(self.rorepo.head, item.path):
nml += int(len(b[1]) > 1)
# END for each item to traverse
assert c, "Should have executed at least one blame command"
assert nml, "There should at least be one blame commit that contains multiple lines"
@patch.object(Git, '_call_process')
def test_blame_complex_revision(self, git):
git.return_value = fixture('blame_complex_revision')
res = self.rorepo.blame("HEAD~10..HEAD", "README.md")
assert len(res) == 1
assert len(res[0][1]) == 83, "Unexpected amount of parsed blame lines"
def test_untracked_files(self):
base = self.rorepo.working_tree_dir
files = (join_path_native(base, "__test_myfile"),
join_path_native(base, "__test_other_file"))
num_recently_untracked = 0
try:
for fpath in files:
fd = open(fpath, "wb")
fd.close()
# END for each filename
untracked_files = self.rorepo.untracked_files
num_recently_untracked = len(untracked_files)
# assure we have all names - they are relative to the git-dir
num_test_untracked = 0
for utfile in untracked_files:
num_test_untracked += join_path_native(base, utfile) in files
assert len(files) == num_test_untracked
finally:
for fpath in files:
if os.path.isfile(fpath):
os.remove(fpath)
# END handle files
assert len(self.rorepo.untracked_files) == (num_recently_untracked - len(files))
def test_config_reader(self):
reader = self.rorepo.config_reader() # all config files
assert reader.read_only
reader = self.rorepo.config_reader("repository") # single config file
assert reader.read_only
def test_config_writer(self):
for config_level in self.rorepo.config_level:
try:
writer = self.rorepo.config_writer(config_level)
assert not writer.read_only
writer.release()
except IOError:
# its okay not to get a writer for some configuration files if we
# have no permissions
pass
# END for each config level
def test_config_level_paths(self):
for config_level in self.rorepo.config_level:
assert self.rorepo._get_config_path(config_level)
# end for each config level
def test_creation_deletion(self):
# just a very quick test to assure it generally works. There are
# specialized cases in the test_refs module
head = self.rorepo.create_head("new_head", "HEAD~1")
self.rorepo.delete_head(head)
tag = self.rorepo.create_tag("new_tag", "HEAD~2")
self.rorepo.delete_tag(tag)
writer = self.rorepo.config_writer()
writer.release()
remote = self.rorepo.create_remote("new_remote", "git@server:repo.git")
self.rorepo.delete_remote(remote)
def test_comparison_and_hash(self):
# this is only a preliminary test, more testing done in test_index
assert self.rorepo == self.rorepo and not (self.rorepo != self.rorepo)
assert len(set((self.rorepo, self.rorepo))) == 1
@with_rw_directory
def test_tilde_and_env_vars_in_repo_path(self, rw_dir):
ph = os.environ['HOME']
try:
os.environ['HOME'] = rw_dir
Repo.init(os.path.join('~', 'test.git'), bare=True)
os.environ['FOO'] = rw_dir
Repo.init(os.path.join('$FOO', 'test.git'), bare=True)
finally:
os.environ['HOME'] = ph
del os.environ['FOO']
# end assure HOME gets reset to what it was
def test_git_cmd(self):
# test CatFileContentStream, just to be very sure we have no fencepost errors
# last \n is the terminating newline that it expects
l1 = b"0123456789\n"
l2 = b"abcdefghijklmnopqrstxy\n"
l3 = b"z\n"
d = l1 + l2 + l3 + b"\n"
l1p = l1[:5]
# full size
# size is without terminating newline
def mkfull():
return Git.CatFileContentStream(len(d) - 1, BytesIO(d))
ts = 5
def mktiny():
return Git.CatFileContentStream(ts, BytesIO(d))
# readlines no limit
s = mkfull()
lines = s.readlines()
assert len(lines) == 3 and lines[-1].endswith(b'\n')
assert s._stream.tell() == len(d) # must have scrubbed to the end
# realines line limit
s = mkfull()
lines = s.readlines(5)
assert len(lines) == 1
# readlines on tiny sections
s = mktiny()
lines = s.readlines()
assert len(lines) == 1 and lines[0] == l1p
assert s._stream.tell() == ts + 1
# readline no limit
s = mkfull()
assert s.readline() == l1
assert s.readline() == l2
assert s.readline() == l3
assert s.readline() == ''
assert s._stream.tell() == len(d)
# readline limit
s = mkfull()
assert s.readline(5) == l1p
assert s.readline() == l1[5:]
# readline on tiny section
s = mktiny()
assert s.readline() == l1p
assert s.readline() == ''
assert s._stream.tell() == ts + 1
# read no limit
s = mkfull()
assert s.read() == d[:-1]
assert s.read() == ''
assert s._stream.tell() == len(d)
# read limit
s = mkfull()
assert s.read(5) == l1p
assert s.read(6) == l1[5:]
assert s._stream.tell() == 5 + 6 # its not yet done
# read tiny
s = mktiny()
assert s.read(2) == l1[:2]
assert s._stream.tell() == 2
assert s.read() == l1[2:ts]
assert s._stream.tell() == ts + 1
def _assert_rev_parse_types(self, name, rev_obj):
rev_parse = self.rorepo.rev_parse
if rev_obj.type == 'tag':
rev_obj = rev_obj.object
# tree and blob type
obj = rev_parse(name + '^{tree}')
assert obj == rev_obj.tree
obj = rev_parse(name + ':CHANGES')
assert obj.type == 'blob' and obj.path == 'CHANGES'
assert rev_obj.tree['CHANGES'] == obj
def _assert_rev_parse(self, name):
"""tries multiple different rev-parse syntaxes with the given name
:return: parsed object"""
rev_parse = self.rorepo.rev_parse
orig_obj = rev_parse(name)
if orig_obj.type == 'tag':
obj = orig_obj.object
else:
obj = orig_obj
# END deref tags by default
# try history
rev = name + "~"
obj2 = rev_parse(rev)
assert obj2 == obj.parents[0]
self._assert_rev_parse_types(rev, obj2)
# history with number
ni = 11
history = [obj.parents[0]]
for pn in range(ni):
history.append(history[-1].parents[0])
# END get given amount of commits
for pn in range(11):
rev = name + "~%i" % (pn + 1)
obj2 = rev_parse(rev)
assert obj2 == history[pn]
self._assert_rev_parse_types(rev, obj2)
# END history check
# parent ( default )
rev = name + "^"
obj2 = rev_parse(rev)
assert obj2 == obj.parents[0]
self._assert_rev_parse_types(rev, obj2)
# parent with number
for pn, parent in enumerate(obj.parents):
rev = name + "^%i" % (pn + 1)
assert rev_parse(rev) == parent
self._assert_rev_parse_types(rev, parent)
# END for each parent
return orig_obj
@with_rw_repo('HEAD', bare=False)
def test_rw_rev_parse(self, rwrepo):
# verify it does not confuse branches with hexsha ids
ahead = rwrepo.create_head('aaaaaaaa')
assert(rwrepo.rev_parse(str(ahead)) == ahead.commit)
def test_rev_parse(self):
rev_parse = self.rorepo.rev_parse
# try special case: This one failed at some point, make sure its fixed
assert rev_parse("33ebe").hexsha == "33ebe7acec14b25c5f84f35a664803fcab2f7781"
# start from reference
num_resolved = 0
for ref_no, ref in enumerate(Reference.iter_items(self.rorepo)):
path_tokens = ref.path.split("/")
for pt in range(len(path_tokens)):
path_section = '/'.join(path_tokens[-(pt + 1):])
try:
obj = self._assert_rev_parse(path_section)
assert obj.type == ref.object.type
num_resolved += 1
except (BadName, BadObject):
print("failed on %s" % path_section)
# is fine, in case we have something like 112, which belongs to remotes/rname/merge-requests/112
pass
# END exception handling
# END for each token
if ref_no == 3 - 1:
break
# END for each reference
assert num_resolved
# it works with tags !
tag = self._assert_rev_parse('0.1.4')
assert tag.type == 'tag'
# try full sha directly ( including type conversion )
assert tag.object == rev_parse(tag.object.hexsha)
self._assert_rev_parse_types(tag.object.hexsha, tag.object)
# multiple tree types result in the same tree: HEAD^{tree}^{tree}:CHANGES
rev = '0.1.4^{tree}^{tree}'
assert rev_parse(rev) == tag.object.tree
assert rev_parse(rev + ':CHANGES') == tag.object.tree['CHANGES']
# try to get parents from first revision - it should fail as no such revision
# exists
first_rev = "33ebe7acec14b25c5f84f35a664803fcab2f7781"
commit = rev_parse(first_rev)
assert len(commit.parents) == 0
assert commit.hexsha == first_rev
self.failUnlessRaises(BadName, rev_parse, first_rev + "~")
self.failUnlessRaises(BadName, rev_parse, first_rev + "^")
# short SHA1
commit2 = rev_parse(first_rev[:20])
assert commit2 == commit
commit2 = rev_parse(first_rev[:5])
assert commit2 == commit
# todo: dereference tag into a blob 0.1.7^{blob} - quite a special one
# needs a tag which points to a blob
# ref^0 returns commit being pointed to, same with ref~0, and ^{}
tag = rev_parse('0.1.4')
for token in (('~0', '^0', '^{}')):
assert tag.object == rev_parse('0.1.4%s' % token)
# END handle multiple tokens
# try partial parsing
max_items = 40
for i, binsha in enumerate(self.rorepo.odb.sha_iter()):
assert rev_parse(bin_to_hex(binsha)[:8 - (i % 2)].decode('ascii')).binsha == binsha
if i > max_items:
# this is rather slow currently, as rev_parse returns an object
# which requires accessing packs, it has some additional overhead
break
# END for each binsha in repo
# missing closing brace commit^{tree
self.failUnlessRaises(ValueError, rev_parse, '0.1.4^{tree')
# missing starting brace
self.failUnlessRaises(ValueError, rev_parse, '0.1.4^tree}')
# REVLOG
#######
head = self.rorepo.head
# need to specify a ref when using the @ syntax
self.failUnlessRaises(BadObject, rev_parse, "%s@{0}" % head.commit.hexsha)
# uses HEAD.ref by default
assert rev_parse('@{0}') == head.commit
if not head.is_detached:
refspec = '%s@{0}' % head.ref.name
assert rev_parse(refspec) == head.ref.commit
# all additional specs work as well
assert rev_parse(refspec + "^{tree}") == head.commit.tree
assert rev_parse(refspec + ":CHANGES").type == 'blob'
# END operate on non-detached head
# position doesn't exist
self.failUnlessRaises(IndexError, rev_parse, '@{10000}')
# currently, nothing more is supported
self.failUnlessRaises(NotImplementedError, rev_parse, "@{1 week ago}")
# the last position
assert rev_parse('@{1}') != head.commit
def test_repo_odbtype(self):
target_type = GitDB
if sys.version_info[:2] < (2, 5):
target_type = GitCmdObjectDB
assert isinstance(self.rorepo.odb, target_type)
def test_submodules(self):
assert len(self.rorepo.submodules) == 1 # non-recursive
assert len(list(self.rorepo.iter_submodules())) >= 2
assert isinstance(self.rorepo.submodule("gitdb"), Submodule)
self.failUnlessRaises(ValueError, self.rorepo.submodule, "doesn't exist")
@with_rw_repo('HEAD', bare=False)
def test_submodule_update(self, rwrepo):
# fails in bare mode
rwrepo._bare = True
self.failUnlessRaises(InvalidGitRepositoryError, rwrepo.submodule_update)
rwrepo._bare = False
# test create submodule
sm = rwrepo.submodules[0]
sm = rwrepo.create_submodule("my_new_sub", "some_path", join_path_native(self.rorepo.working_tree_dir, sm.path))
assert isinstance(sm, Submodule)
# note: the rest of this functionality is tested in test_submodule
@with_rw_repo('HEAD')
def test_git_file(self, rwrepo):
# Move the .git directory to another location and create the .git file.
real_path_abs = os.path.abspath(join_path_native(rwrepo.working_tree_dir, '.real'))
os.rename(rwrepo.git_dir, real_path_abs)
git_file_path = join_path_native(rwrepo.working_tree_dir, '.git')
open(git_file_path, 'wb').write(fixture('git_file'))
# Create a repo and make sure it's pointing to the relocated .git directory.
git_file_repo = Repo(rwrepo.working_tree_dir)
assert os.path.abspath(git_file_repo.git_dir) == real_path_abs
# Test using an absolute gitdir path in the .git file.
open(git_file_path, 'wb').write(('gitdir: %s\n' % real_path_abs).encode('ascii'))
git_file_repo = Repo(rwrepo.working_tree_dir)
assert os.path.abspath(git_file_repo.git_dir) == real_path_abs
def test_file_handle_leaks(self):
def last_commit(repo, rev, path):
commit = next(repo.iter_commits(rev, path, max_count=1))
commit.tree[path]
# This is based on this comment
# https://github.com/gitpython-developers/GitPython/issues/60#issuecomment-23558741
# And we expect to set max handles to a low value, like 64
# You should set ulimit -n X, see .travis.yml
# The loops below would easily create 500 handles if these would leak (4 pipes + multiple mapped files)
for i in range(64):
for repo_type in (GitCmdObjectDB, GitDB):
repo = Repo(self.rorepo.working_tree_dir, odbt=repo_type)
last_commit(repo, 'master', 'git/test/test_base.py')
# end for each repository type
# end for each iteration
def test_remote_method(self):
self.failUnlessRaises(ValueError, self.rorepo.remote, 'foo-blue')
assert isinstance(self.rorepo.remote(name='origin'), Remote)
@with_rw_directory
def test_empty_repo(self, rw_dir):
"""Assure we can handle empty repositories"""
r = Repo.init(rw_dir, mkdir=False)
# It's ok not to be able to iterate a commit, as there is none
self.failUnlessRaises(ValueError, r.iter_commits)
assert r.active_branch.name == 'master'
assert not r.active_branch.is_valid(), "Branch is yet to be born"
# actually, when trying to create a new branch without a commit, git itself fails
# We should, however, not fail ungracefully
self.failUnlessRaises(BadName, r.create_head, 'foo')
self.failUnlessRaises(BadName, r.create_head, 'master')
# It's expected to not be able to access a tree
self.failUnlessRaises(ValueError, r.tree)
new_file_path = os.path.join(rw_dir, "new_file.ext")
touch(new_file_path)
r.index.add([new_file_path])
r.index.commit("initial commit")
# Now a branch should be creatable
nb = r.create_head('foo')
assert nb.is_valid()
def test_merge_base(self):
repo = self.rorepo
c1 = 'f6aa8d1'
c2 = repo.commit('d46e3fe')
c3 = '763ef75'
self.failUnlessRaises(ValueError, repo.merge_base)
self.failUnlessRaises(ValueError, repo.merge_base, 'foo')
# two commit merge-base
res = repo.merge_base(c1, c2)
assert isinstance(res, list) and len(res) == 1 and isinstance(res[0], Commit)
assert res[0].hexsha.startswith('3936084')
for kw in ('a', 'all'):
res = repo.merge_base(c1, c2, c3, **{kw: True})
assert isinstance(res, list) and len(res) == 1
# end for eaech keyword signalling all merge-bases to be returned
# Test for no merge base - can't do as we have
self.failUnlessRaises(GitCommandError, repo.merge_base, c1, 'ffffff')
| mit |
brandond/ansible | lib/ansible/modules/remote_management/ucs/ucs_disk_group_policy.py | 21 | 16473 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: ucs_disk_group_policy
short_description: Configures disk group policies on Cisco UCS Manager
description:
- Configures disk group policies on Cisco UCS Manager.
- Examples can be used with the L(UCS Platform Emulator,https://communities.cisco.com/ucspe).
extends_documentation_fragment: ucs
options:
state:
description:
- Desired state of the disk group policy.
- If C(present), will verify that the disk group policy is present and will create if needed.
- If C(absent), will verify that the disk group policy is absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the disk group policy.
This name can be between 1 and 16 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the policy is created.
required: yes
description:
description:
- The user-defined description of the storage profile.
Enter up to 256 characters.
"You can use any characters or spaces except the following:"
"` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
raid_level:
description:
- "The RAID level for the disk group policy. This can be one of the following:"
- "stripe - UCS Manager shows RAID 0 Striped"
- "mirror - RAID 1 Mirrored"
- "mirror-stripe - RAID 10 Mirrored and Striped"
- "stripe-parity - RAID 5 Striped Parity"
- "stripe-dual-parity - RAID 6 Striped Dual Parity"
- "stripe-parity-stripe - RAID 50 Striped Parity and Striped"
- "stripe-dual-parity-stripe - RAID 60 Striped Dual Parity and Striped"
choices: [stripe, mirror, mirror-stripe, stripe-parity, stripe-dual-parity, stripe-parity-stripe, stripe-dual-parity-stripe]
default: stripe
configuration_mode:
description:
- "Disk group configuration mode. Choose one of the following:"
- "automatic - Automatically configures the disks in the disk group."
- "manual - Enables you to manually configure the disks in the disk group."
choices: [automatic, manual]
default: automatic
num_drives:
description:
- Specify the number of drives for the disk group.
- This can be from 0 to 24.
- Option only applies when configuration mode is automatic.
default: 1
drive_type:
description:
- Specify the drive type to use in the drive group.
- "This can be one of the following:"
- "unspecified — Selects the first available drive type, and applies that to all drives in the group."
- "HDD — Hard disk drive"
- "SSD — Solid state drive"
- Option only applies when configuration mode is automatic.
choices: [unspecified, HDD, SSD]
default: unspecified
num_ded_hot_spares:
description:
- Specify the number of hot spares for the disk group.
- This can be from 0 to 24.
- Option only applies when configuration mode is automatic.
default: unspecified
num_glob_hot_spares:
description:
- Specify the number of global hot spares for the disk group.
- This can be from 0 to 24.
- Option only applies when configuration mode is automatic.
default: unspecified
min_drive_size:
description:
- Specify the minimum drive size or unspecified to allow all drive sizes.
- This can be from 0 to 10240 GB.
- Option only applies when configuration mode is automatic.
default: 'unspecified'
use_remaining_disks:
description:
- Specifies whether you can use all the remaining disks in the disk group or not.
- Option only applies when configuration mode is automatic.
choices: ['yes', 'no']
default: 'no'
manual_disks:
description:
- List of manually configured disks.
- Options are only used when you choose manual configuration_mode.
suboptions:
name:
description:
- The name of the local LUN.
required: yes
slot_num:
description:
- The slot number of the specific disk.
role:
description:
- "The role of the disk. This can be one of the following:"
- "normal - Normal"
- "ded-hot-spare - Dedicated Hot Spare"
- "glob-hot-spare - Glob Hot Spare"
span_id:
description:
- The Span ID of the specific disk.
default: 'unspecified'
state:
description:
- If C(present), will verify disk slot is configured within policy.
If C(absent), will verify disk slot is absent from policy.
choices: [ present, absent ]
default: present
virtual_drive:
description:
- Configuraiton of virtual drive options.
suboptions:
access_policy:
description:
- Configure access policy to virtual drive.
choices: [blocked, hidden, platform-default, read-only, read-write, transport-ready]
default: platform-default
drive_cache:
description:
- Configure drive caching.
choices: [disable, enable, no-change, platform-default]
default: platform-default
io_policy:
description:
- Direct or Cached IO path.
choices: [cached, direct, platform-default]
default: platform-default
read_policy:
description:
- Read access policy to virtual drive.
choices: [normal, platform-default, read-ahead]
default: platform-default
strip_size:
description:
- Virtual drive strip size.
choices: [ present, absent ]
default: platform-default
write_cache_policy:
description:
- Write back cache policy.
choices: [always-write-back, platform-default, write-back-good-bbu, write-through]
default: platform-default
org_dn:
description:
- The distinguished name (dn) of the organization where the resource is assigned.
default: org-root
requirements:
- ucsmsdk
author:
- Sindhu Sudhir (@sisudhir)
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
- Brett Johnson (@sdbrett)
version_added: '2.8'
'''
EXAMPLES = r'''
- name: Configure Disk Group Policy
ucs_disk_group_policy:
hostname: 172.16.143.150
username: admin
password: password
name: DEE-DG
raid_level: mirror
configuration_mode: manual
manual_disks:
- slot_num: '1'
role: normal
- slot_num: '2'
role: normal
- name: Remove Disk Group Policy
ucs_disk_group_policy:
name: DEE-DG
hostname: 172.16.143.150
username: admin
password: password
state: absent
- name: Remove Disk from Policy
ucs_disk_group_policy:
hostname: 172.16.143.150
username: admin
password: password
name: DEE-DG
description: Testing Ansible
raid_level: stripe
configuration_mode: manual
manual_disks:
- slot_num: '1'
role: normal
- slot_num: '2'
role: normal
state: absent
virtual_drive:
access_policy: platform-default
io_policy: direct
strip_size: 64KB
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def configure_disk_policy(ucs, module, dn):
from ucsmsdk.mometa.lstorage.LstorageDiskGroupConfigPolicy import LstorageDiskGroupConfigPolicy
from ucsmsdk.mometa.lstorage.LstorageDiskGroupQualifier import LstorageDiskGroupQualifier
from ucsmsdk.mometa.lstorage.LstorageLocalDiskConfigRef import LstorageLocalDiskConfigRef
if not module.check_mode:
try:
# create if mo does not already exist
mo = LstorageDiskGroupConfigPolicy(
parent_mo_or_dn=module.params['org_dn'],
name=module.params['name'],
descr=module.params['description'],
raid_level=module.params['raid_level'],
)
if module.params['configuration_mode'] == 'automatic':
LstorageDiskGroupQualifier(
parent_mo_or_dn=mo,
num_drives=module.params['num_drives'],
drive_type=module.params['drive_type'],
use_remaining_disks=module.params['use_remaining_disks'],
num_ded_hot_spares=module.params['num_ded_hot_spares'],
num_glob_hot_spares=module.params['num_glob_hot_spares'],
min_drive_size=module.params['min_drive_size'],
)
else: # configuration_mode == 'manual'
for disk in module.params['manual_disks']:
if disk['state'] == 'absent':
child_dn = dn + '/slot-' + disk['slot_num']
mo_1 = ucs.login_handle.query_dn(child_dn)
if mo_1:
ucs.login_handle.remove_mo(mo_1)
else: # state == 'present'
LstorageLocalDiskConfigRef(
parent_mo_or_dn=mo,
slot_num=disk['slot_num'],
role=disk['role'],
span_id=disk['span_id'],
)
if module.params['virtual_drive']:
_configure_virtual_drive(module, mo)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
except Exception as e: # generic Exception handling because SDK can throw a variety
ucs.result['msg'] = "setup error: %s " % str(e)
module.fail_json(**ucs.result)
ucs.result['changed'] = True
def check_disk_policy_props(ucs, module, mo, dn):
props_match = True
# check top-level mo props
kwargs = dict(descr=module.params['description'])
kwargs['raid_level'] = module.params['raid_level']
if mo.check_prop_match(**kwargs):
# top-level props match, check next level mo/props
if module.params['configuration_mode'] == 'automatic':
child_dn = dn + '/disk-group-qual'
mo_1 = ucs.login_handle.query_dn(child_dn)
if mo_1:
kwargs = dict(num_drives=module.params['num_drives'])
kwargs['drive_type'] = module.params['drive_type']
kwargs['use_remaining_disks'] = module.params['use_remaining_disks']
kwargs['num_ded_hot_spares'] = module.params['num_ded_hot_spares']
kwargs['num_glob_hot_spares'] = module.params['num_glob_hot_spares']
kwargs['min_drive_size'] = module.params['min_drive_size']
props_match = mo_1.check_prop_match(**kwargs)
else: # configuration_mode == 'manual'
for disk in module.params['manual_disks']:
child_dn = dn + '/slot-' + disk['slot_num']
mo_1 = ucs.login_handle.query_dn(child_dn)
if mo_1:
if disk['state'] == 'absent':
props_match = False
else: # state == 'present'
kwargs = dict(slot_num=disk['slot_num'])
kwargs['role'] = disk['role']
kwargs['span_id'] = disk['span_id']
if not mo_1.check_prop_match(**kwargs):
props_match = False
break
if props_match:
if module.params['virtual_drive']:
props_match = check_virtual_drive_props(ucs, module, dn)
else:
props_match = False
return props_match
def check_virtual_drive_props(ucs, module, dn):
child_dn = dn + '/virtual-drive-def'
mo_1 = ucs.login_handle.query_dn(child_dn)
return mo_1.check_prop_match(**module.params['virtual_drive'])
def _configure_virtual_drive(module, mo):
from ucsmsdk.mometa.lstorage.LstorageVirtualDriveDef import LstorageVirtualDriveDef
LstorageVirtualDriveDef(parent_mo_or_dn=mo, **module.params['virtual_drive'])
def _virtual_drive_argument_spec():
return dict(
access_policy=dict(type='str', default='platform-default',
choices=["blocked", "hidden", "platform-default", "read-only", "read-write",
"transport-ready"]),
drive_cache=dict(type='str', default='platform-default',
choices=["disable", "enable", "no-change", "platform-default"]),
io_policy=dict(type='str', default='platform-default',
choices=["cached", "direct", "platform-default"]),
read_policy=dict(type='str', default='platform-default',
choices=["normal", "platform-default", "read-ahead"]),
strip_size=dict(type='str', default='platform-default',
choices=["1024KB", "128KB", "16KB", "256KB", "32KB", "512KB", "64KB", "8KB",
"platform-default"]),
write_cache_policy=dict(type='str', default='platform-default',
choices=["always-write-back", "platform-default", "write-back-good-bbu",
"write-through"]),
)
def main():
manual_disk = dict(
slot_num=dict(type='str', required=True),
role=dict(type='str', default='normal', choices=['normal', 'ded-hot-spare', 'glob-hot-spare']),
span_id=dict(type='str', default='unspecified'),
state=dict(type='str', default='present', choices=['present', 'absent']),
)
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str', required=True),
description=dict(type='str', aliases=['descr'], default=''),
raid_level=dict(
type='str',
default='stripe',
choices=[
'stripe',
'mirror',
'mirror-stripe',
'stripe-parity',
'stripe-dual-parity',
'stripe-parity-stripe',
'stripe-dual-parity-stripe',
],
),
num_drives=dict(type='str', default='1'),
configuration_mode=dict(type='str', default='automatic', choices=['automatic', 'manual']),
num_ded_hot_spares=dict(type='str', default='unspecified'),
num_glob_hot_spares=dict(type='str', default='unspecified'),
drive_type=dict(type='str', default='unspecified', choices=['unspecified', 'HDD', 'SSD']),
use_remaining_disks=dict(type='str', default='no', choices=['yes', 'no']),
min_drive_size=dict(type='str', default='unspecified'),
manual_disks=dict(type='list', elements='dict', options=manual_disk),
state=dict(type='str', default='present', choices=['present', 'absent']),
virtual_drive=dict(type='dict', options=_virtual_drive_argument_spec()),
)
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
)
ucs = UCSModule(module)
# UCSModule creation above verifies ucsmsdk is present and exits on failure.
# Additional imports are done below or in called functions.
ucs.result['changed'] = False
props_match = False
# dn is <org_dn>/disk-group-config-<name>
dn = module.params['org_dn'] + '/disk-group-config-' + module.params['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
if module.params['state'] == 'absent':
# mo must exist but all properties do not have to match
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
ucs.result['changed'] = True
else: # state == 'present'
props_match = check_disk_policy_props(ucs, module, mo, dn)
if module.params['state'] == 'present' and not props_match:
configure_disk_policy(ucs, module, dn)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
vlachoudis/sl4a | python/src/Lib/distutils/version.py | 50 | 11486 | #
# distutils/version.py
#
# Implements multiple version numbering conventions for the
# Python Module Distribution Utilities.
#
# $Id: version.py 71270 2009-04-05 21:11:43Z georg.brandl $
#
"""Provides classes to represent module version numbers (one class for
each style of version numbering). There are currently two such classes
implemented: StrictVersion and LooseVersion.
Every version number class implements the following interface:
* the 'parse' method takes a string and parses it to some internal
representation; if the string is an invalid version number,
'parse' raises a ValueError exception
* the class constructor takes an optional string argument which,
if supplied, is passed to 'parse'
* __str__ reconstructs the string that was passed to 'parse' (or
an equivalent string -- ie. one that will generate an equivalent
version number instance)
* __repr__ generates Python code to recreate the version number instance
* __cmp__ compares the current instance with either another instance
of the same class or a string (which will be parsed to an instance
of the same class, thus must follow the same rules)
"""
import string, re
from types import StringType
class Version:
"""Abstract base class for version numbering classes. Just provides
constructor (__init__) and reproducer (__repr__), because those
seem to be the same for all version numbering classes.
"""
def __init__ (self, vstring=None):
if vstring:
self.parse(vstring)
def __repr__ (self):
return "%s ('%s')" % (self.__class__.__name__, str(self))
# Interface for version-number classes -- must be implemented
# by the following classes (the concrete ones -- Version should
# be treated as an abstract class).
# __init__ (string) - create and take same action as 'parse'
# (string parameter is optional)
# parse (string) - convert a string representation to whatever
# internal representation is appropriate for
# this style of version numbering
# __str__ (self) - convert back to a string; should be very similar
# (if not identical to) the string supplied to parse
# __repr__ (self) - generate Python code to recreate
# the instance
# __cmp__ (self, other) - compare two version numbers ('other' may
# be an unparsed version string, or another
# instance of your version class)
class StrictVersion (Version):
"""Version numbering for anal retentives and software idealists.
Implements the standard interface for version number classes as
described above. A version number consists of two or three
dot-separated numeric components, with an optional "pre-release" tag
on the end. The pre-release tag consists of the letter 'a' or 'b'
followed by a number. If the numeric components of two version
numbers are equal, then one with a pre-release tag will always
be deemed earlier (lesser) than one without.
The following are valid version numbers (shown in the order that
would be obtained by sorting according to the supplied cmp function):
0.4 0.4.0 (these two are equivalent)
0.4.1
0.5a1
0.5b3
0.5
0.9.6
1.0
1.0.4a3
1.0.4b1
1.0.4
The following are examples of invalid version numbers:
1
2.7.2.2
1.3.a4
1.3pl1
1.3c4
The rationale for this version numbering system will be explained
in the distutils documentation.
"""
version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
re.VERBOSE)
def parse (self, vstring):
match = self.version_re.match(vstring)
if not match:
raise ValueError, "invalid version number '%s'" % vstring
(major, minor, patch, prerelease, prerelease_num) = \
match.group(1, 2, 4, 5, 6)
if patch:
self.version = tuple(map(string.atoi, [major, minor, patch]))
else:
self.version = tuple(map(string.atoi, [major, minor]) + [0])
if prerelease:
self.prerelease = (prerelease[0], string.atoi(prerelease_num))
else:
self.prerelease = None
def __str__ (self):
if self.version[2] == 0:
vstring = string.join(map(str, self.version[0:2]), '.')
else:
vstring = string.join(map(str, self.version), '.')
if self.prerelease:
vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
return vstring
def __cmp__ (self, other):
if isinstance(other, StringType):
other = StrictVersion(other)
compare = cmp(self.version, other.version)
if (compare == 0): # have to compare prerelease
# case 1: neither has prerelease; they're equal
# case 2: self has prerelease, other doesn't; other is greater
# case 3: self doesn't have prerelease, other does: self is greater
# case 4: both have prerelease: must compare them!
if (not self.prerelease and not other.prerelease):
return 0
elif (self.prerelease and not other.prerelease):
return -1
elif (not self.prerelease and other.prerelease):
return 1
elif (self.prerelease and other.prerelease):
return cmp(self.prerelease, other.prerelease)
else: # numeric versions don't match --
return compare # prerelease stuff doesn't matter
# end class StrictVersion
# The rules according to Greg Stein:
# 1) a version number has 1 or more numbers separated by a period or by
# sequences of letters. If only periods, then these are compared
# left-to-right to determine an ordering.
# 2) sequences of letters are part of the tuple for comparison and are
# compared lexicographically
# 3) recognize the numeric components may have leading zeroes
#
# The LooseVersion class below implements these rules: a version number
# string is split up into a tuple of integer and string components, and
# comparison is a simple tuple comparison. This means that version
# numbers behave in a predictable and obvious way, but a way that might
# not necessarily be how people *want* version numbers to behave. There
# wouldn't be a problem if people could stick to purely numeric version
# numbers: just split on period and compare the numbers as tuples.
# However, people insist on putting letters into their version numbers;
# the most common purpose seems to be:
# - indicating a "pre-release" version
# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
# - indicating a post-release patch ('p', 'pl', 'patch')
# but of course this can't cover all version number schemes, and there's
# no way to know what a programmer means without asking him.
#
# The problem is what to do with letters (and other non-numeric
# characters) in a version number. The current implementation does the
# obvious and predictable thing: keep them as strings and compare
# lexically within a tuple comparison. This has the desired effect if
# an appended letter sequence implies something "post-release":
# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
#
# However, if letters in a version number imply a pre-release version,
# the "obvious" thing isn't correct. Eg. you would expect that
# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
# implemented here, this just isn't so.
#
# Two possible solutions come to mind. The first is to tie the
# comparison algorithm to a particular set of semantic rules, as has
# been done in the StrictVersion class above. This works great as long
# as everyone can go along with bondage and discipline. Hopefully a
# (large) subset of Python module programmers will agree that the
# particular flavour of bondage and discipline provided by StrictVersion
# provides enough benefit to be worth using, and will submit their
# version numbering scheme to its domination. The free-thinking
# anarchists in the lot will never give in, though, and something needs
# to be done to accommodate them.
#
# Perhaps a "moderately strict" version class could be implemented that
# lets almost anything slide (syntactically), and makes some heuristic
# assumptions about non-digits in version number strings. This could
# sink into special-case-hell, though; if I was as talented and
# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
# just as happy dealing with things like "2g6" and "1.13++". I don't
# think I'm smart enough to do it right though.
#
# In any case, I've coded the test suite for this module (see
# ../test/test_version.py) specifically to fail on things like comparing
# "1.2a2" and "1.2". That's not because the *code* is doing anything
# wrong, it's because the simple, obvious design doesn't match my
# complicated, hairy expectations for real-world version numbers. It
# would be a snap to fix the test suite to say, "Yep, LooseVersion does
# the Right Thing" (ie. the code matches the conception). But I'd rather
# have a conception that matches common notions about version numbers.
class LooseVersion (Version):
"""Version numbering for anarchists and software realists.
Implements the standard interface for version number classes as
described above. A version number consists of a series of numbers,
separated by either periods or strings of letters. When comparing
version numbers, the numeric components will be compared
numerically, and the alphabetic components lexically. The following
are all valid version numbers, in no particular order:
1.5.1
1.5.2b2
161
3.10a
8.02
3.4j
1996.07.12
3.2.pl0
3.1.1.6
2g6
11g
0.960923
2.2beta29
1.13++
5.5.kw
2.0b1pl0
In fact, there is no such thing as an invalid version number under
this scheme; the rules for comparison are simple and predictable,
but may not always give the results you want (for some definition
of "want").
"""
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
def __init__ (self, vstring=None):
if vstring:
self.parse(vstring)
def parse (self, vstring):
# I've given up on thinking I can reconstruct the version string
# from the parsed tuple -- so I just store the string here for
# use by __str__
self.vstring = vstring
components = filter(lambda x: x and x != '.',
self.component_re.split(vstring))
for i in range(len(components)):
try:
components[i] = int(components[i])
except ValueError:
pass
self.version = components
def __str__ (self):
return self.vstring
def __repr__ (self):
return "LooseVersion ('%s')" % str(self)
def __cmp__ (self, other):
if isinstance(other, StringType):
other = LooseVersion(other)
return cmp(self.version, other.version)
# end class LooseVersion
| apache-2.0 |
therealAJ/torch | vision/analysis.py | 1 | 1069 | import config
import httplib
import urllib
import base64
import sys
import requests
# #### HTTP Headers + Encoding the URL
def vision(filepath):
file = open(filepath, 'rb').read()
files = {'file': file}
headers = {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key': config.api_key
}
params = urllib.urlencode ({
## I will tweak these lolssss
'visualFeatures': 'Categories,Tags,Description',
'language': 'en'
})
# #### Image URL and API Call
#body = "{'url': 'http://data.whicdn.com/images/21298747/thumb.jpg'}"
body = file
try:
conn = httplib.HTTPSConnection('westus.api.cognitive.microsoft.com')
conn.request("POST", "/vision/v1.0/analyze?%s" % params, body, headers)
response = conn.getresponse()
data = response.read()
print(data)
conn.close()
return data
except Exception as e:
print("[Errno {0}] {1}".format(e.message, e.message))
# For DEMO purposes
vision('demo.jpg')
| mit |
popazerty/obh-test1 | lib/python/Screens/InfoBarGenerics.py | 4 | 114010 | from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, \
iPlayableService, eServiceReference, eEPGCache, eActionMap
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class SecondInfoBar(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = None
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.secondInfoBarScreen = ""
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.secondInfoBarScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def __onHide(self):
self.__state = self.STATE_HIDDEN
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.secondInfoBarScreen and config.usage.show_second_infobar.value and not self.secondInfoBarScreen.shown:
self.show()
self.secondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen and self.secondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(3000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.Timer.start(1000, True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.handleServiceName()
if len(self.numberString) >= 5:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(3000, True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
})
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order")
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order")
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Move Picture in Picture")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
self.servicelist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
if self.session.pipshown:
slist = self.servicelist
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentServiceReference()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceReference() or self.servicelist.servicelist.getCurrent()
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = self.servicelist.getCurrentServicePath()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceReference() or self.servicelist.servicelist.getCurrent()
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = self.servicelist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
currentServicePath = self.servicelist.getCurrentServicePath()
currentBouquet = self.servicelist and self.servicelist.getRoot()
self.servicelist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if self.servicelist.dopipzap:
# This unfortunately won't work with subservices
self.servicelist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5", maxSize=False, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection=0
elif selection < 0:
selection=n-1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.session.nav.playService(newservice, False)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
tlist = []
idx = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
idx += 1
if self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.session.nav.playService(service[1], False)
def addSubserviceToBouquetCallback(self, service):
if len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
if begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime) or config.usage.inactivity_timer_blocktime_extra.value and\
(begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)
| gpl-2.0 |
desmovalvo/virtualsib-part2bis | tools/explo2/SS_HelloWorld_reactiveNode.py | 5 | 1712 | import Node
import time
import sys
# Create a node instance
# Programmer can give any name
# The infrastucture will assign unigue names ???
node = Node.ParticipantNode("HelloWorld reactive")
# Discover Smart Spaces around you
# Use the technologies used at the "vertical business domain"
# E.g. mDNS, UPnP, UDDI, Bluetooth SDP
# Connect to the selected smart space
# In this simple example we use localhost
#ss_handle = ("X", (Node.TCPConnector, ("127.0.0.1", 10011)))
ss_handle = node.discover()
print ss_handle
if not node.join(ss_handle):
sys.exit('Could not join to Smart Space')
print "--- Member of SS:", node.member_of
# end connenct
# Class structure to be called as subscription fires
class MsgHandler:
def __init__(self):
self.results = []
def handle(self, added, removed):
print "Newly created:", added
self.results.extend(added)
for i in self.results:
print "State of the space:", str(i)
print str(i[0][2])
print "HelloWorld"
node.CloseSubscribeTransaction(rs)
print "Closing reactive session"
node.leave(ss_handle)
sys.exit()
# Create a reactive-state (reactive) session with the
# smart space
rs = node.CreateSubscribeTransaction(ss_handle)
result = rs.subscribe_rdf([(('God', 'hasCreated', 'World'),'literal')], MsgHandler())
if result != []:
print "It seem The God has already done his job thus..."
print "HelloWorld"
node.CloseSubscribeTransaction(rs)
print "Unsubscribed"
node.leave(ss_handle)
inp = raw_input("Press any key if you are bored to wait\n")
node.CloseSubscribeTransaction(rs)
print "Unsubscribed"
node.leave(ss_handle)
sys.exit()
| lgpl-3.0 |
huguesv/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py | 13 | 2322 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
@six.add_metaclass(abc.ABCMeta)
class Ed448PublicKey(object):
@classmethod
def from_public_bytes(cls, data):
from cryptography.hazmat.backends.openssl.backend import backend
if not backend.ed448_supported():
raise UnsupportedAlgorithm(
"ed448 is not supported by this version of OpenSSL.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
)
return backend.ed448_load_public_bytes(data)
@abc.abstractmethod
def public_bytes(self, encoding, format):
"""
The serialized bytes of the public key.
"""
@abc.abstractmethod
def verify(self, signature, data):
"""
Verify the signature.
"""
@six.add_metaclass(abc.ABCMeta)
class Ed448PrivateKey(object):
@classmethod
def generate(cls):
from cryptography.hazmat.backends.openssl.backend import backend
if not backend.ed448_supported():
raise UnsupportedAlgorithm(
"ed448 is not supported by this version of OpenSSL.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
)
return backend.ed448_generate_key()
@classmethod
def from_private_bytes(cls, data):
from cryptography.hazmat.backends.openssl.backend import backend
if not backend.ed448_supported():
raise UnsupportedAlgorithm(
"ed448 is not supported by this version of OpenSSL.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM
)
return backend.ed448_load_private_bytes(data)
@abc.abstractmethod
def public_key(self):
"""
The Ed448PublicKey derived from the private key.
"""
@abc.abstractmethod
def sign(self, data):
"""
Signs the data.
"""
@abc.abstractmethod
def private_bytes(self, encoding, format, encryption_algorithm):
"""
The serialized bytes of the private key.
"""
| apache-2.0 |
jswope00/GAI | cms/djangoapps/contentstore/features/component.py | 2 | 6265 | # pylint: disable=C0111
# pylint: disable=W0621
# Lettuce formats proposed definitions for unimplemented steps with the
# argument name "step" instead of "_step" and pylint does not like that.
# pylint: disable=W0613
from lettuce import world, step
from nose.tools import assert_true, assert_in # pylint: disable=E0611
DISPLAY_NAME = "Display Name"
@step(u'I add this type of single step component:$')
def add_a_single_step_component(step):
for step_hash in step.hashes:
component = step_hash['Component']
assert_in(component, ['Discussion', 'Video'])
world.create_component_instance(
step=step,
category='{}'.format(component.lower()),
)
@step(u'I see this type of single step component:$')
def see_a_single_step_component(step):
for step_hash in step.hashes:
component = step_hash['Component']
assert_in(component, ['Discussion', 'Video'])
component_css = 'div.xmodule_{}Module'.format(component)
assert_true(world.is_css_present(component_css),
"{} couldn't be found".format(component))
@step(u'I add this type of( Advanced)? (HTML|Problem) component:$')
def add_a_multi_step_component(step, is_advanced, category):
for step_hash in step.hashes:
world.create_component_instance(
step=step,
category='{}'.format(category.lower()),
component_type=step_hash['Component'],
is_advanced=bool(is_advanced),
)
@step(u'I see (HTML|Problem) components in this order:')
def see_a_multi_step_component(step, category):
# Wait for all components to finish rendering
selector = 'li.component div.xblock-student_view'
world.wait_for(lambda _: len(world.css_find(selector)) == len(step.hashes))
for idx, step_hash in enumerate(step.hashes):
if category == 'HTML':
html_matcher = {
'Text':
'\n \n',
'Announcement':
'<p> Words of encouragement! This is a short note that most students will read. </p>',
'Zooming Image':
'<h2>ZOOMING DIAGRAMS</h2>',
'E-text Written in LaTeX':
'<h2>Example: E-text page</h2>',
}
actual_html = world.css_html(selector, index=idx)
assert_in(html_matcher[step_hash['Component']], actual_html)
else:
actual_text = world.css_text(selector, index=idx)
assert_in(step_hash['Component'].upper(), actual_text)
@step(u'I see a "([^"]*)" Problem component$')
def see_a_problem_component(step, category):
component_css = 'div.xmodule_CapaModule'
assert_true(world.is_css_present(component_css),
'No problem was added to the unit.')
problem_css = 'li.component div.xblock-student_view'
actual_text = world.css_text(problem_css)
assert_in(category.upper(), actual_text)
@step(u'I add a "([^"]*)" "([^"]*)" component$')
def add_component_category(step, component, category):
assert category in ('single step', 'HTML', 'Problem', 'Advanced Problem')
given_string = 'I add this type of {} component:'.format(category)
step.given('{}\n{}\n{}'.format(given_string, '|Component|', '|{}|'.format(component)))
@step(u'I delete all components$')
def delete_all_components(step):
count = len(world.css_find('ol.components li.component'))
step.given('I delete "' + str(count) + '" component')
@step(u'I delete "([^"]*)" component$')
def delete_components(step, number):
world.wait_for_xmodule()
delete_btn_css = 'a.delete-button'
prompt_css = 'div#prompt-warning'
btn_css = '{} a.button.action-primary'.format(prompt_css)
saving_mini_css = 'div#page-notification .wrapper-notification-mini'
for _ in range(int(number)):
world.css_click(delete_btn_css)
assert_true(
world.is_css_present('{}.is-shown'.format(prompt_css)),
msg='Waiting for the confirmation prompt to be shown')
# Pressing the button via css was not working reliably for the last component
# when run in Chrome.
if world.browser.driver_name is 'Chrome':
world.browser.execute_script("$('{}').click()".format(btn_css))
else:
world.css_click(btn_css)
# Wait for the saving notification to pop up then disappear
if world.is_css_present('{}.is-shown'.format(saving_mini_css)):
world.css_find('{}.is-hiding'.format(saving_mini_css))
@step(u'I see no components')
def see_no_components(steps):
assert world.is_css_not_present('li.component')
@step(u'I delete a component')
def delete_one_component(step):
world.css_click('a.delete-button')
@step(u'I edit and save a component')
def edit_and_save_component(step):
world.css_click('.edit-button')
world.css_click('.save-button')
@step(u'I duplicate the (first|second|third) component$')
def duplicated_component(step, ordinal):
ord_map = {
"first": 0,
"second": 1,
"third": 2,
}
index = ord_map[ordinal]
duplicate_btn_css = 'a.duplicate-button'
world.css_click(duplicate_btn_css, int(index))
@step(u'I see a Problem component with display name "([^"]*)" in position "([^"]*)"$')
def see_component_in_position(step, display_name, index):
component_css = 'div.xmodule_CapaModule'
def find_problem(_driver):
return world.css_text(component_css, int(index)).startswith(display_name.upper())
world.wait_for(find_problem, timeout_msg='Did not find the duplicated problem')
@step(u'I see the display name is "([^"]*)"')
def check_component_display_name(step, display_name):
label = world.css_text(".component-header")
assert display_name == label
@step(u'I change the display name to "([^"]*)"')
def change_display_name(step, display_name):
world.edit_component_and_select_settings()
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, display_name)
world.save_component()
@step(u'I unset the display name')
def unset_display_name(step):
world.edit_component_and_select_settings()
world.revert_setting_entry(DISPLAY_NAME)
world.save_component()
| agpl-3.0 |
dreibh/planetlab-lxc-plcapi | PLC/Methods/UpdateNodeGroup.py | 1 | 1428 | from PLC.Faults import *
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.NodeGroups import NodeGroup, NodeGroups
from PLC.Auth import Auth
can_update = lambda field_value: field_value[0] in ['groupname','value']
class UpdateNodeGroup(Method):
"""
Updates a custom node group.
Returns 1 if successful, faults otherwise.
"""
roles = ['admin']
nodegroup_fields = dict(list(filter(can_update, list(NodeGroup.fields.items()))))
accepts = [
Auth(),
Mixed(NodeGroup.fields['nodegroup_id'],
NodeGroup.fields['groupname']),
nodegroup_fields
]
returns = Parameter(int, '1 if successful')
def call(self, auth, nodegroup_id_or_name, nodegroup_fields):
nodegroup_fields = dict(list(filter(can_update, list(nodegroup_fields.items()))))
# Get nodegroup information
nodegroups = NodeGroups(self.api, [nodegroup_id_or_name])
if not nodegroups:
raise PLCInvalidArgument("No such nodegroup %r"%nodegroup_id_or_name)
nodegroup = nodegroups[0]
nodegroup.update(nodegroup_fields)
nodegroup.sync()
# Logging variables
self.event_objects = {'NodeGroup': [nodegroup['nodegroup_id']]}
self.message = 'Node group %d updated: %s' % \
(nodegroup['nodegroup_id'], ", ".join(list(nodegroup_fields.keys())))
return 1
| bsd-3-clause |
Dino0631/RedRain-Bot | lib/youtube_dl/extractor/rmcdecouverte.py | 31 | 1704 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .brightcove import BrightcoveLegacyIE
from ..compat import (
compat_parse_qs,
compat_urlparse,
)
class RMCDecouverteIE(InfoExtractor):
_VALID_URL = r'https?://rmcdecouverte\.bfmtv\.com/mediaplayer-replay.*?\bid=(?P<id>\d+)'
_TEST = {
'url': 'http://rmcdecouverte.bfmtv.com/mediaplayer-replay/?id=13502&title=AQUAMEN:LES%20ROIS%20DES%20AQUARIUMS%20:UN%20DELICIEUX%20PROJET',
'info_dict': {
'id': '5419055995001',
'ext': 'mp4',
'title': 'UN DELICIEUX PROJET',
'description': 'md5:63610df7c8b1fc1698acd4d0d90ba8b5',
'uploader_id': '1969646226001',
'upload_date': '20170502',
'timestamp': 1493745308,
},
'params': {
'skip_download': True,
},
'skip': 'only available for a week',
}
BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/1969646226001/default_default/index.html?videoId=%s'
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
brightcove_legacy_url = BrightcoveLegacyIE._extract_brightcove_url(webpage)
if brightcove_legacy_url:
brightcove_id = compat_parse_qs(compat_urlparse.urlparse(
brightcove_legacy_url).query)['@videoPlayer'][0]
else:
brightcove_id = self._search_regex(
r'data-video-id=["\'](\d+)', webpage, 'brightcove id')
return self.url_result(
self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, 'BrightcoveNew',
brightcove_id)
| gpl-3.0 |
USGSDenverPychron/pychron | pychron/canvas/canvas2D/base_data_canvas.py | 1 | 7538 | # ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =============enthought library imports=======================
from chaco.api import LinePlot, LinearMapper, DataView, ArrayDataSource
from chaco.tools.api import ZoomTool, PanTool
from enable.api import Pointer
from traits.api import Tuple, Bool, on_trait_change
# =============standard library imports ========================
from numpy import hstack
# =============local library imports ==========================
class BaseDataCanvas(DataView):
"""
"""
# fill_padding = True
# bgcolor = (0.9, 0.9, 1.0)
# bgcolor = (0, 1.0, 0)
# border_visible = True
# use_backbuffer = True
# bgcolor = 'lightblue'
unified_draw = True
x_range = Tuple
y_range = Tuple
view_x_range = Tuple
view_y_range = Tuple
select_pointer = Pointer('hand')
normal_pointer = Pointer('arrow')
cross_pointer = Pointer('cross')
show_axes = Bool(True)
show_grids = Bool(True)
use_zoom = Bool(True)
use_pan = Bool(True)
plot = None
def cmap_plot(self, z):
from chaco.array_plot_data import ArrayPlotData
from chaco.plot import Plot
from chaco.default_colormaps import color_map_name_dict
pd = ArrayPlotData()
pd.set_data('cmapdata', z)
p = Plot(pd, padding=0)
p.img_plot('cmapdata',
xbounds=(-25, 25),
ybounds=(-25, 25),
colormap=color_map_name_dict['hot'])
self.add(p)
return pd
def line_plot(self, x, y, new_plot=True):
if self.plot is None or new_plot:
if isinstance(x, (float, int)):
x = [x]
if isinstance(y, (float, int)):
y = [y]
self.plot = LinePlot(
index=ArrayDataSource(x),
value=ArrayDataSource(y),
index_mapper=LinearMapper(range=self.index_range),
value_mapper=LinearMapper(range=self.value_range))
self.add(self.plot)
else:
datax = self.plot.index.get_data()
datay = self.plot.value.get_data()
nx = hstack((datax, [x]))
ny = hstack((datay, [y]))
self.plot.index.set_data(nx)
self.plot.value.set_data(ny)
def reset_plots(self):
self.plot = None
for c in self.components[:1]:
self.remove(c)
self.request_redraw()
def __init__(self, *args, **kw):
"""
"""
super(BaseDataCanvas, self).__init__(*args, **kw)
if 'x_range' not in kw:
self.x_range = (-25, 25)
if 'y_range' not in kw:
self.y_range = (-25, 25)
if 'view_x_range' not in kw:
self.view_x_range = (-25, 25)
if 'view_y_range' not in kw:
self.view_y_range = (-25, 25)
# plot=BaseXYPlot
plot = LinePlot
sp = plot(index=ArrayDataSource(self.y_range),
value=ArrayDataSource(self.x_range),
index_mapper=LinearMapper(range=self.index_range),
value_mapper=LinearMapper(range=self.value_range))
self.index_range.sources.append(sp.index)
self.value_range.sources.append(sp.value)
sp.visible = False
self.add(sp)
if self.use_zoom:
self.add_zoom()
if self.use_pan:
self.add_pan()
self.index_mapper.on_trait_change(self.update, 'updated')
self.value_mapper.on_trait_change(self.update, 'updated')
# set the view range
self.set_mapper_limits('x', self.view_x_range)
self.set_mapper_limits('y', self.view_y_range)
# if not self.show_axes:
# self.value_axis.visible = False
# self.index_axis.visible = False
self.value_axis.visible = self.show_axes
self.index_axis.visible = self.show_axes
self.x_grid.visible = self.show_grids
self.y_grid.visible = self.show_grids
@on_trait_change('view_x_range')
def _update_xrange(self):
self.set_mapper_limits('x', self.view_x_range)
@on_trait_change('view_y_range')
def _update_yrange(self):
self.set_mapper_limits('y', self.view_y_range)
@on_trait_change('show_grids')
def change_grid_visibility(self):
try:
self.x_grid.visible = self.show_grids
self.y_grid.visible = self.show_grids
self.request_redraw()
except AttributeError:
pass
def set_mapper_limits(self, mapper, limits, pad=0):
"""
"""
mapper = getattr(self, '{}_mapper'.format(mapper))
if mapper is not None:
mapper.range.low_setting = limits[0] - pad
mapper.range.high_setting = limits[1] + pad
self.request_redraw()
def get_mapper_limits(self, mapper):
mapper = getattr(self, '{}_mapper'.format(mapper))
return mapper.range.low, mapper.range.high
def update(self, *args, **kw):
"""
"""
pass
def add_pan(self):
"""
"""
p = PanTool(self)
self.tools.append(p)
def add_zoom(self):
"""
"""
z = ZoomTool(component=self, always_on=False, tool_mode='box',
max_zoom_out_factor=1,
max_zoom_in_factor=10000)
# b=BroadcasterTool()
# b.tools.append(z)
self.overlays.append(z)
# self.tools.append(b)
def get_wh(self, *args):
return self._get_wh(*args)
def _get_wh(self, w, h):
"""
"""
wh, oo = self.map_screen([(w, h), (0, 0)])
w = wh[0] - oo[0]
h = wh[1] - oo[1]
return w, h
def _vertical_line(self, gc, x, y1, y2, color=(0, 0, 0)):
"""
"""
p1 = (x, y1)
p2 = (x, y2)
self.line_segment(gc, p1, p2, color)
def _horizontal_line(self, gc, y, x1, x2, color=(0, 0, 0)):
"""
"""
p1 = (x1, y)
p2 = (x2, y)
self.line_segment(gc, p1, p2, color)
def _line_segment(self, gc, p1, p2, color=None):
if color is not None:
gc.set_stroke_color(color)
gc.move_to(*p1)
gc.line_to(*p2)
gc.draw_path()
# def _draw_underlay(self, gc, *args, **kw):
# """
# """
# pass
#
# def _draw_underlay(self, *args, **kw):
# super(BaseDataCanvas, self)._draw_underlay(*args, **kw)
# self._draw_hook(*args, **kw)
# def draw(self, *args, **kw):
# """
# """
#
# super(BaseDataCanvas, self).draw(*args, **kw)
# self._draw_hook(*args, **kw)
# ====================EOF==================
| apache-2.0 |
hfp/tensorflow-xsmm | tensorflow/compiler/tests/reduce_window_test.py | 22 | 3522 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for xla.reduce_window."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.compiler.tf2xla.python import xla
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import googletest
class ReduceWindowTest(xla_test.XLATestCase):
"""Test cases for xla.reduce_window."""
def _reduce_window(self, operand, init, reducer, **kwargs):
with self.cached_session():
placeholder = array_ops.placeholder(operand.dtype)
with self.test_scope():
output = xla.reduce_window(placeholder, init, reducer, **kwargs)
return output.eval(feed_dict={placeholder: operand})
def testReduceWindow(self):
# TODO(b/77644762): float16 and float64 ReduceWindow are unimplemented.
for dtype in set(self.numeric_types).intersection(
set([dtypes.bfloat16.as_numpy_dtype, np.float32])):
@function.Defun(dtype, dtype)
def sum_reducer(x, y):
return x + y
@function.Defun(dtype, dtype)
def mul_reducer(x, y):
return x * y
self.assertAllClose(
np.array([3, 5, 7, 9, 11, 13], dtype=dtype),
self._reduce_window(
np.array([1, 2, 3, 4, 5, 6, 7], dtype=dtype),
0.0,
sum_reducer,
window_dimensions=[2]))
self.assertAllClose(
np.array([3, 7, 11], dtype=dtype),
self._reduce_window(
np.array([1, 2, 3, 4, 5, 6, 7], dtype=dtype),
0.0,
sum_reducer,
window_dimensions=[2],
window_strides=[2]))
self.assertAllClose(
np.array([1, 4, 7], dtype=dtype),
self._reduce_window(
np.array([1, 2, 3, 4, 5, 6, 7], dtype=dtype),
0.0,
sum_reducer,
window_dimensions=[1],
window_strides=[3]))
self.assertAllClose(
np.array([[24, 36, 24], [96, 0, 0]], dtype=dtype),
self._reduce_window(
np.array([[1, 2, 3, 4], [4, 3, 2, 1], [2, 4, 0, 1]], dtype=dtype),
1.0,
mul_reducer,
window_dimensions=[2, 2],
window_strides=[1, 1]))
self.assertAllClose(
np.array([[0, 0, 0], [5, 10, 5], [2, 4, 1], [0, 0, 0]], dtype=dtype),
self._reduce_window(
np.array([[1, 2, 3, 4], [4, 3, 2, 1], [2, 4, 0, 1]], dtype=dtype),
0.0,
sum_reducer,
window_dimensions=[2, 2],
window_strides=[2, 2],
padding=[[2, 3], [1, 2]]))
if __name__ == '__main__':
googletest.main()
| apache-2.0 |
tokibito/django-csvutils | csvutils/forms.py | 1 | 2017 | #:coding=utf-8:
import csv
from django import forms
from django.forms.formsets import (
TOTAL_FORM_COUNT, INITIAL_FORM_COUNT, MAX_NUM_FORM_COUNT,
DELETION_FIELD_NAME, BaseFormSet, formset_factory
)
from django.forms.models import modelformset_factory, BaseModelFormSet
from utils import UnicodeReader
def csv_formset_factory(csvfile, form, formset=None, can_delete=False, update=False, encoding=None):
"""
Creates a formset instance that can be used to validate the given CSV file.
csvfile = request.FILES["uploaded_file"]
formset_ins = csv_formset_factory(csvfile, form=MyModelForm)
if formset_ins.is_valid():
my_model_list = formset_ins.save()
"""
if hasattr(form._meta, "model"): # ModelForm
formset_cls = modelformset_factory(
model=form._meta.model,
form=form,
formset=formset or BaseModelFormSet,
extra=0,
can_delete=can_delete,
)
else:
formset_cls = formset_factory(
form=form,
formset=formset or BaseFormSet,
extra=0,
can_delete=can_delete
)
fieldnames = form.base_fields.keys()
if can_delete:
# The last field is a boolean delete flag
fieldnames.append(DELETION_FIELD_NAME)
data = {}
reader = csv.DictReader(csvfile, fieldnames=fieldnames)
reader.reader = UnicodeReader(csvfile, encoding=encoding)
form_prefix = formset_cls.get_default_prefix()
form_count = 0
for index, data_dict in enumerate(reader):
data.update(dict([
("%s-%d-%s" % (form_prefix, index, field_name), value)
for field_name, value in data_dict.iteritems()
]))
form_count += 1
data['%s-%s' % (form_prefix, TOTAL_FORM_COUNT)] = form_count
data['%s-%s' % (form_prefix, INITIAL_FORM_COUNT)] = form_count if update else 0
data['%s-%s' % (form_prefix, MAX_NUM_FORM_COUNT)] = form_count if update else 0
return formset_cls(data)
| bsd-3-clause |
HPCGISLab/TwitterMethods | tweetproc/geo.py | 1 | 1741 | #!/usr/bin/python
"""
Copyright (c) 2014 High-Performance Computing and GIS (HPCGIS) Laboratory. All rights reserved.
Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
Authors and contributors: Eric Shook (eshook@kent.edu);
Website: http://hpcgis.geog.kent.edu
"""
import json
import codecs
from util import *
import io
def geo(infilename,outfilename):
print("Starting: geo(",infilename,",",outfilename,")")
sanitycheck(infilename,outfilename)
outfile=io.open(outfilename, "w", encoding="utf-8",errors='ignore')
with io.open(infilename,'r',encoding="utf-8",errors='ignore') as infile:
for line in infile:
# Skip empty lines
if(line.isspace()):
continue
try:
tweet = json.loads(line)
# Check existence of multiple geo tags in a tweet or retweet
if "object" in tweet and "geo" in tweet["object"]:
outfile.write(line)
continue
if "object" in tweet and "location" in tweet["object"]:
outfile.write(line)
continue
if "geo" in tweet:
outfile.write(line)
continue
if "location" in tweet:
outfile.write(line)
continue
except:
print " [ ERROR ] Exception raised"
print(" [ ERROR ] geo(",infilename,",",outfilename,")")
print(" occurred on line:",line)
uprint(line)
error(infilename,outfilename,"Exception in processing tweet")
raise
outfile.close()
| bsd-3-clause |
massot/odoo | addons/auth_signup/controllers/main.py | 165 | 6011 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import logging
import werkzeug
import openerp
from openerp.addons.auth_signup.res_users import SignupError
from openerp.addons.web.controllers.main import ensure_db
from openerp import http
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class AuthSignupHome(openerp.addons.web.controllers.main.Home):
@http.route()
def web_login(self, *args, **kw):
ensure_db()
response = super(AuthSignupHome, self).web_login(*args, **kw)
response.qcontext.update(self.get_auth_signup_config())
if request.httprequest.method == 'GET' and request.session.uid and request.params.get('redirect'):
# Redirect if already logged in and redirect param is present
return http.redirect_with_hash(request.params.get('redirect'))
return response
@http.route('/web/signup', type='http', auth='public', website=True)
def web_auth_signup(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('signup_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
except (SignupError, AssertionError), e:
qcontext['error'] = _(e.message)
return request.render('auth_signup.signup', qcontext)
@http.route('/web/reset_password', type='http', auth='public', website=True)
def web_auth_reset_password(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('reset_password_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
if qcontext.get('token'):
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
else:
login = qcontext.get('login')
assert login, "No login provided."
res_users = request.registry.get('res.users')
res_users.reset_password(request.cr, openerp.SUPERUSER_ID, login)
qcontext['message'] = _("An email has been sent with credentials to reset your password")
except SignupError:
qcontext['error'] = _("Could not reset your password")
_logger.exception('error when resetting password')
except Exception, e:
qcontext['error'] = _(e.message)
return request.render('auth_signup.reset_password', qcontext)
def get_auth_signup_config(self):
"""retrieve the module config (which features are enabled) for the login page"""
icp = request.registry.get('ir.config_parameter')
return {
'signup_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.allow_uninvited') == 'True',
'reset_password_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.reset_password') == 'True',
}
def get_auth_signup_qcontext(self):
""" Shared helper returning the rendering context for signup and reset password """
qcontext = request.params.copy()
qcontext.update(self.get_auth_signup_config())
if qcontext.get('token'):
try:
# retrieve the user info (name, login or email) corresponding to a signup token
res_partner = request.registry.get('res.partner')
token_infos = res_partner.signup_retrieve_info(request.cr, openerp.SUPERUSER_ID, qcontext.get('token'))
for k, v in token_infos.items():
qcontext.setdefault(k, v)
except:
qcontext['error'] = _("Invalid signup token")
return qcontext
def do_signup(self, qcontext):
""" Shared helper that creates a res.partner out of a token """
values = dict((key, qcontext.get(key)) for key in ('login', 'name', 'password'))
assert any([k for k in values.values()]), "The form was not properly filled in."
assert values.get('password') == qcontext.get('confirm_password'), "Passwords do not match; please retype them."
self._signup_with_values(qcontext.get('token'), values)
request.cr.commit()
def _signup_with_values(self, token, values):
db, login, password = request.registry['res.users'].signup(request.cr, openerp.SUPERUSER_ID, values, token)
request.cr.commit() # as authenticate will use its own cursor we need to commit the current transaction
uid = request.session.authenticate(db, login, password)
if not uid:
raise SignupError(_('Authentification Failed.'))
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dims/test-infra | gubernator/filters_test.py | 14 | 6842 | #!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import unittest
import urllib
import filters
import jinja2
def linkify(inp, commit):
return str(filters.do_linkify_stacktrace(
inp, commit, 'kubernetes/kubernetes'))
class HelperTest(unittest.TestCase):
def test_timestamp(self):
self.assertEqual(
'<span class="timestamp" data-epoch="1461100940">'
'2016-04-19 21:22</span>',
filters.do_timestamp(1461100940))
def test_duration(self):
for duration, expected in {
3.56: '3.56s',
13.6: '13s',
78.2: '1m18s',
60 * 62 + 3: '1h2m',
}.iteritems():
self.assertEqual(expected, filters.do_duration(duration))
def test_linkify_safe(self):
self.assertEqual('<a>',
linkify('<a>', '3'))
def test_linkify(self):
linked = linkify(
"/go/src/k8s.io/kubernetes/test/example.go:123", 'VERSION')
self.assertIn('<a href="https://github.com/kubernetes/kubernetes/blob/'
'VERSION/test/example.go#L123">', linked)
def test_linkify_trailing(self):
linked = linkify(
" /go/src/k8s.io/kubernetes/test/example.go:123 +0x1ad", 'VERSION')
self.assertIn('github.com', linked)
def test_linkify_unicode(self):
# Check that Unicode characters pass through cleanly.
linked = filters.do_linkify_stacktrace(u'\u883c', 'VERSION', '')
self.assertEqual(linked, u'\u883c')
def test_maybe_linkify(self):
for inp, expected in [
(3, 3),
({"a": "b"}, {"a": "b"}),
("", ""),
("whatever", "whatever"),
("http://example.com",
jinja2.Markup('<a href="http://example.com">http://example.com</a>')),
("http://&",
jinja2.Markup('<a href="http://&">http://&</a>')),
]:
self.assertEqual(filters.do_maybe_linkify(inp), expected)
def test_slugify(self):
self.assertEqual('k8s-test-foo', filters.do_slugify('[k8s] Test Foo'))
def test_testcmd(self):
for name, expected in (
('k8s.io/kubernetes/pkg/api/errors TestErrorNew',
'go test -v k8s.io/kubernetes/pkg/api/errors -run TestErrorNew$'),
('[k8s.io] Proxy [k8s.io] works',
"go run hack/e2e.go -v --test --test_args='--ginkgo.focus="
"Proxy\\s\\[k8s\\.io\\]\\sworks$'"),
('//pkg/foo/bar:go_default_test',
'bazel test //pkg/foo/bar:go_default_test'),
('verify typecheck', 'make verify WHAT=typecheck')):
print 'test name:', name
self.assertEqual(filters.do_testcmd(name), expected)
def test_classify_size(self):
self.assertEqual(filters.do_classify_size(
{'labels': {'size/FOO': 1}}), 'FOO')
self.assertEqual(filters.do_classify_size(
{'labels': {}, 'additions': 70, 'deletions': 20}), 'M')
def test_render_status_basic(self):
payload = {'status': {'ci': ['pending', '', '']}}
self.assertEqual(str(filters.do_render_status(payload, '')),
'<span class="text-pending octicon octicon-primitive-dot" title="pending tests">'
'</span>Pending')
def test_render_status_complex(self):
def expect(payload, expected, user=''):
# strip the excess html from the result down to the text class,
# the opticon class, and the rendered text
result = str(filters.do_render_status(payload, user))
result = re.sub(r'<span class="text-|octicon octicon-| title="[^"]*"|</span>',
'', result)
result = result.replace('">', ' ')
self.assertEqual(result, expected)
statuses = lambda *xs: {str(n): [x, '', ''] for n, x in enumerate(xs)}
expect({'status': {}}, 'Pending')
expect({'status': statuses('pending')}, 'pending primitive-dot Pending')
expect({'status': statuses('failure')}, 'failure x Pending')
expect({'status': statuses('success')}, 'success check Pending')
expect({'status': statuses('pending', 'success')}, 'pending primitive-dot Pending')
expect({'status': statuses('failure', 'pending', 'success')}, 'failure x Pending')
expect({'status': {'ci': ['success', '', ''],
'Submit Queue': ['pending', '', 'does not have LGTM']}}, 'success check Pending')
expect({'status': {'ci': ['success', '', ''],
'tide': ['pending', '', '']}}, 'success check Pending')
expect({'status': {'ci': ['success', '', ''],
'code-review/reviewable': ['pending', '', '10 files left']}}, 'success check Pending')
expect({'status': {'ci': ['success', '', '']}, 'labels': ['lgtm']}, 'success check LGTM')
expect({'attn': {'foo': 'Needs Rebase'}}, 'Needs Rebase', user='foo')
expect({'attn': {'foo': 'Needs Rebase'}, 'labels': {'lgtm'}}, 'LGTM', user='foo')
expect({'author': 'u', 'labels': ['lgtm']}, 'LGTM', 'u')
expect({'author': 'b', 'labels': ['lgtm'], 'approvers': ['u'],
'attn': {'u': 'needs approval'}},
'Needs Approval', 'u')
def test_tg_url(self):
self.assertEqual(
filters.do_tg_url('a#b'),
'https://testgrid.k8s.io/a#b')
self.assertEqual(
filters.do_tg_url('a#b', '[low] test'),
'https://testgrid.k8s.io/a#b&include-filter-by-regex=%s' %
urllib.quote('^Overall$|\\[low\\]\\ test'))
def test_gcs_browse_url(self):
self.assertEqual(
filters.do_gcs_browse_url('/k8s/foo'),
'https://gcsweb.k8s.io/gcs/k8s/foo/')
self.assertEqual(
filters.do_gcs_browse_url('/k8s/bar/'),
'https://gcsweb.k8s.io/gcs/k8s/bar/')
def test_pod_name(self):
self.assertEqual(filters.do_parse_pod_name("start pod 'client-c6671' to"), 'client-c6671')
self.assertEqual(filters.do_parse_pod_name('tripod "blah"'), '')
# exercise pathological case
self.assertEqual(filters.do_parse_pod_name('abcd pode ' * 10000), '')
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
tangp3/gpdb | gpMgmt/bin/gppylib/system/environment.py | 20 | 3823 | #!/usr/bin/env python
#
# Copyright (c) Greenplum Inc 2010. All Rights Reserved.
#
from gppylib import gplog, pgconf
from gppylib.commands import gp
from gppylib.db import catalog, dbconn
from gppylib.utils import toNonNoneString, checkNotNone
logger = gplog.get_default_logger()
class GpMasterEnvironment:
"""
Encapsulates information about the environment in which the script is running AND about the
master database.
In the future we should make it possible to build this object on segments, or when the master data directory
has not been built.
"""
def __init__(self, masterDataDir, readFromMasterCatalog, timeout=None, retries=None):
"""
masterDataDir: if None then we try to find it from the system environment
readFromMasterCatalog: if True then we will connect to the master in utility mode and fetch some more
data from there (like collation settings)
"""
if masterDataDir is None:
self.__masterDataDir = gp.get_masterdatadir()
else: self.__masterDataDir = masterDataDir
logger.debug("Obtaining master's port from master data directory")
pgconf_dict = pgconf.readfile(self.__masterDataDir + "/postgresql.conf")
self.__masterPort = pgconf_dict.int('port')
logger.debug("Read from postgresql.conf port=%s" % self.__masterPort)
self.__masterMaxConnections = pgconf_dict.int('max_connections')
logger.debug("Read from postgresql.conf max_connections=%s" % self.__masterMaxConnections)
self.__gpHome = gp.get_gphome()
self.__gpVersion = gp.GpVersion.local('local GP software version check',self.__gpHome)
logger.info("local Greenplum Version: '%s'" % self.__gpVersion)
# read collation settings from master
if readFromMasterCatalog:
dbUrl = dbconn.DbURL(port=self.__masterPort, dbname='template1', timeout=timeout, retries=retries)
conn = dbconn.connect(dbUrl, utility=True)
(self.__lcCollate, self.__lcMonetary, self.__lcNumeric) = catalog.getCollationSettings(conn)
# MPP-13807, read/show the master's database version too
self.__pgVersion = dbconn.execSQLForSingletonRow(conn, "select version();")[0]
logger.info("master Greenplum Version: '%s'" % self.__pgVersion)
conn.close()
checkNotNone("lc_collate", self.__lcCollate)
checkNotNone("lc_monetary", self.__lcMonetary)
checkNotNone("lc_numeric", self.__lcNumeric)
else:
self.__lcCollate = None
self.__lcMonetary = None
self.__lcNumeric = None
self.__pgVersion = None
def getGpHome(self): return self.__gpHome
def getGpVersion(self): return self.__gpVersion
def getPgVersion(self): return self.__pgVersion
def getLcCollate(self):
checkNotNone("lc_collate", self.__lcCollate) # make sure we were initialized with "readFromMasterCatalog"
return self.__lcCollate
def getLcMonetary(self):
checkNotNone("lc_monetary", self.__lcMonetary) # make sure we were initialized with "readFromMasterCatalog"
return self.__lcMonetary
def getLcNumeric(self):
checkNotNone("lc_numeric", self.__lcNumeric) # make sure we were initialized with "readFromMasterCatalog"
return self.__lcNumeric
def getLocaleData(self):
checkNotNone("lc_numeric", self.__lcNumeric) # make sure we were initialized with "readFromMasterCatalog"
return ":".join([self.__lcCollate, self.__lcMonetary, self.__lcNumeric])
def getMasterDataDir(self): return self.__masterDataDir
def getMasterMaxConnections(self) : return self.__masterMaxConnections
def getMasterPort(self) : return self.__masterPort
| apache-2.0 |
Irene-Li/Presentations | node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py | 1509 | 17165 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 and later, non-Express have a x64-x86 cross that we want
# to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
import _winreg
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
return _winreg.QueryValueEx(hkey, value)[0]
except WindowsError:
return None
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
project_version='14.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
if version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if not allow_fallback:
raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
| mit |
PetePriority/home-assistant | homeassistant/components/group/reproduce_state.py | 11 | 1149 | """Module that groups code required to handle state restore for component."""
from typing import Iterable, Optional
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.loader import bind_hass
@bind_hass
async def async_reproduce_states(hass: HomeAssistantType,
states: Iterable[State],
context: Optional[Context] = None) -> None:
"""Reproduce component states."""
from . import get_entity_ids
from homeassistant.helpers.state import async_reproduce_state
states_copy = []
for state in states:
members = get_entity_ids(hass, state.entity_id)
for member in members:
states_copy.append(
State(member,
state.state,
state.attributes,
last_changed=state.last_changed,
last_updated=state.last_updated,
context=state.context))
await async_reproduce_state(hass, states_copy, blocking=True,
context=context)
| apache-2.0 |
felipenaselva/felipe.repository | plugin.video.streamhub/resources/lib/modules/log_utils.py | 30 | 3921 | """
tknorris shared module
Copyright (C) 2016 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import time
import cProfile
import StringIO
import pstats
import json
import xbmc
from resources.lib.modules import control
from xbmc import LOGDEBUG, LOGERROR, LOGFATAL, LOGINFO, LOGNONE, LOGNOTICE, LOGSEVERE, LOGWARNING # @UnusedImport
name = control.addonInfo('name')
def log(msg, level=LOGDEBUG):
req_level = level
# override message level to force logging when addon logging turned on
if control.setting('addon_debug') == 'true' and level == LOGDEBUG:
level = LOGNOTICE
try:
if isinstance(msg, unicode):
msg = '%s (ENCODED)' % (msg.encode('utf-8'))
xbmc.log('[%s] %s' % (name, msg), level)
except Exception as e:
try:
xbmc.log('Logging Failure: %s' % (e), level)
except:
pass # just give up
class Profiler(object):
def __init__(self, file_path, sort_by='time', builtins=False):
self._profiler = cProfile.Profile(builtins=builtins)
self.file_path = file_path
self.sort_by = sort_by
def profile(self, f):
def method_profile_on(*args, **kwargs):
try:
self._profiler.enable()
result = self._profiler.runcall(f, *args, **kwargs)
self._profiler.disable()
return result
except Exception as e:
log('Profiler Error: %s' % (e), LOGWARNING)
return f(*args, **kwargs)
def method_profile_off(*args, **kwargs):
return f(*args, **kwargs)
if _is_debugging():
return method_profile_on
else:
return method_profile_off
def __del__(self):
self.dump_stats()
def dump_stats(self):
if self._profiler is not None:
s = StringIO.StringIO()
params = (self.sort_by,) if isinstance(self.sort_by, basestring) else self.sort_by
ps = pstats.Stats(self._profiler, stream=s).sort_stats(*params)
ps.print_stats()
if self.file_path is not None:
with open(self.file_path, 'w') as f:
f.write(s.getvalue())
def trace(method):
def method_trace_on(*args, **kwargs):
start = time.time()
result = method(*args, **kwargs)
end = time.time()
log('{name!r} time: {time:2.4f}s args: |{args!r}| kwargs: |{kwargs!r}|'.format(name=method.__name__, time=end - start, args=args, kwargs=kwargs), LOGDEBUG)
return result
def method_trace_off(*args, **kwargs):
return method(*args, **kwargs)
if _is_debugging():
return method_trace_on
else:
return method_trace_off
def _is_debugging():
command = {'jsonrpc': '2.0', 'id': 1, 'method': 'Settings.getSettings', 'params': {'filter': {'section': 'system', 'category': 'logging'}}}
js_data = execute_jsonrpc(command)
for item in js_data.get('result', {}).get('settings', {}):
if item['id'] == 'debug.showloginfo':
return item['value']
return False
def execute_jsonrpc(command):
if not isinstance(command, basestring):
command = json.dumps(command)
response = control.jsonrpc(command)
return json.loads(response)
| gpl-2.0 |
agaffney/ansible | hacking/build_library/build_ansible/command_plugins/file_deprecated_issues.py | 63 | 5460 | # -*- coding: utf-8 -*-
# (c) 2017, Matt Martz <matt@sivel.net>
# (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import argparse
import os
import time
from collections import defaultdict
from ansible.release import __version__ as ansible_version
# Pylint doesn't understand Python3 namespace modules.
from ..commands import Command # pylint: disable=relative-beyond-top-level
from .. import errors # pylint: disable=relative-beyond-top-level
ANSIBLE_MAJOR_VERSION = '.'.join(ansible_version.split('.')[:2])
def get_token(token_file):
if token_file:
return token_file.read().strip()
token = os.getenv('GITHUB_TOKEN').strip()
if not token:
raise errors.MissingUserInput(
'Please provide a file containing a github oauth token with public_repo scope'
' via the --github-token argument or set the GITHUB_TOKEN env var with your'
' github oauth token'
)
return token
def parse_deprecations(problems_file_handle):
deprecated = defaultdict(list)
deprecation_errors = problems_file_handle.read()
for line in deprecation_errors.splitlines():
path = line.split(':')[0]
if path.endswith('__init__.py'):
component = os.path.basename(os.path.dirname(path))
else:
component, dummy = os.path.splitext(os.path.basename(path).lstrip('_'))
title = (
'%s contains deprecated call to be removed in %s' %
(component, ANSIBLE_MAJOR_VERSION)
)
deprecated[component].append(
dict(title=title, path=path, line=line)
)
return deprecated
def find_project_todo_column(repo, project_name):
project = None
for project in repo.projects():
if project.name.lower() == project_name:
break
else:
raise errors.InvalidUserInput('%s was an invalid project name' % project_name)
for project_column in project.columns():
column_name = project_column.name.lower()
if 'todo' in column_name or 'backlog' in column_name or 'to do' in column_name:
return project_column
raise Exception('Unable to determine the todo column in'
' project %s' % project_name)
def create_issues(deprecated, body_tmpl, repo):
issues = []
for component, items in deprecated.items():
title = items[0]['title']
path = '\n'.join(set((i['path']) for i in items))
line = '\n'.join(i['line'] for i in items)
body = body_tmpl % dict(component=component, path=path,
line=line,
version=ANSIBLE_MAJOR_VERSION)
issue = repo.create_issue(title, body=body, labels=['deprecated'])
print(issue)
issues.append(issue)
# Sleep a little, so that the API doesn't block us
time.sleep(0.5)
return issues
class FileDeprecationTickets(Command):
name = 'file-deprecation-tickets'
@classmethod
def init_parser(cls, add_parser):
parser = add_parser(cls.name, description='File tickets to cleanup deprecated features for'
' the next release')
parser.add_argument('--template', default='deprecated_issue_template.md',
type=argparse.FileType('r'),
help='Path to markdown file template to be used for issue '
'body. Default: %(default)s')
parser.add_argument('--project-name', default='', type=str,
help='Name of a github project to assign all issues to')
parser.add_argument('--github-token', type=argparse.FileType('r'),
help='Path to file containing a github token with public_repo scope.'
' This token in this file will be used to open the deprcation'
' tickets and add them to the github project. If not given,'
' the GITHUB_TOKEN environment variable will be tried')
parser.add_argument('problems', type=argparse.FileType('r'),
help='Path to file containing pylint output for the '
'ansible-deprecated-version check')
@staticmethod
def main(args):
try:
from github3 import GitHub
except ImportError:
raise errors.DependencyError(
'This command needs the github3.py library installed to work'
)
token = get_token(args.github_token)
args.github_token.close()
deprecated = parse_deprecations(args.problems)
args.problems.close()
body_tmpl = args.template.read()
args.template.close()
project_name = args.project_name.strip().lower()
gh_conn = GitHub(token=token)
repo = gh_conn.repository('abadger', 'ansible')
if project_name:
project_column = find_project_todo_column(repo, project_name)
issues = create_issues(deprecated, body_tmpl, repo)
if project_column:
for issue in issues:
project_column.create_card_with_issue(issue)
time.sleep(0.5)
return 0
| gpl-3.0 |
2014c2g4/2015cda_g7 | static/Brython3.1.0-20150301-090019/Lib/sys.py | 109 | 4959 | # hack to return special attributes
from _sys import *
from javascript import JSObject
has_local_storage=__BRYTHON__.has_local_storage
has_session_storage = __BRYTHON__.has_session_storage
has_json=__BRYTHON__.has_json
argv = ['__main__']
base_exec_prefix = __BRYTHON__.brython_path
base_prefix = __BRYTHON__.brython_path
builtin_module_names=__BRYTHON__.builtin_module_names
byteorder='little'
def exc_info():
exc = __BRYTHON__.exception_stack[-1]
return (exc.__class__,exc,exc.traceback)
exec_prefix = __BRYTHON__.brython_path
executable = __BRYTHON__.brython_path+'/brython.js'
def exit(i=None):
raise SystemExit('')
class flag_class:
def __init__(self):
self.debug=0
self.inspect=0
self.interactive=0
self.optimize=0
self.dont_write_bytecode=0
self.no_user_site=0
self.no_site=0
self.ignore_environment=0
self.verbose=0
self.bytes_warning=0
self.quiet=0
self.hash_randomization=1
flags=flag_class()
def getfilesystemencoding(*args,**kw):
"""getfilesystemencoding() -> string
Return the encoding used to convert Unicode filenames in
operating system filenames."""
return 'utf-8'
maxsize=2147483647
maxunicode=1114111
path = __BRYTHON__.path
#path_hooks = list(JSObject(__BRYTHON__.path_hooks))
meta_path=__BRYTHON__.meta_path
platform="brython"
prefix = __BRYTHON__.brython_path
version = '.'.join(str(x) for x in __BRYTHON__.version_info[:3])
version += " (default, %s) \n[Javascript 1.5] on Brython" % __BRYTHON__.compiled_date
hexversion = 0x03000000 # python 3.0
class __version_info(object):
def __init__(self, version_info):
self.version_info = version_info
self.major = version_info[0]
self.minor = version_info[1]
self.micro = version_info[2]
self.releaselevel = version_info[3]
self.serial = version_info[4]
def __getitem__(self, index):
if isinstance(self.version_info[index], list):
return tuple(self.version_info[index])
return self.version_info[index]
def hexversion(self):
try:
return '0%d0%d0%d' % (self.major, self.minor, self.micro)
finally: #probably some invalid char in minor (rc, etc)
return '0%d0000' % (self.major)
def __str__(self):
_s="sys.version(major=%d, minor=%d, micro=%d, releaselevel='%s', serial=%d)"
return _s % (self.major, self.minor, self.micro,
self.releaselevel, self.serial)
#return str(self.version_info)
def __eq__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) == other
raise Error("Error! I don't know how to compare!")
def __ge__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) >= other
raise Error("Error! I don't know how to compare!")
def __gt__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) > other
raise Error("Error! I don't know how to compare!")
def __le__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) <= other
raise Error("Error! I don't know how to compare!")
def __lt__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) < other
raise Error("Error! I don't know how to compare!")
def __ne__(self,other):
if isinstance(other, tuple):
return (self.major, self.minor, self.micro) != other
raise Error("Error! I don't know how to compare!")
#eventually this needs to be the real python version such as 3.0, 3.1, etc
version_info=__version_info(__BRYTHON__.version_info)
class _implementation:
def __init__(self):
self.name='brython'
self.version = __version_info(__BRYTHON__.implementation)
self.hexversion = self.version.hexversion()
self.cache_tag=None
def __repr__(self):
return "namespace(name='%s' version=%s hexversion='%s')" % (self.name, self.version, self.hexversion)
def __str__(self):
return "namespace(name='%s' version=%s hexversion='%s')" % (self.name, self.version, self.hexversion)
implementation=_implementation()
class _hash_info:
def __init__(self):
self.width=32,
self.modulus=2147483647
self.inf=314159
self.nan=0
self.imag=1000003
self.algorithm='siphash24'
self.hash_bits=64
self.seed_bits=128
cutoff=0
def __repr(self):
#fix me
return "sys.hash_info(width=32, modulus=2147483647, inf=314159, nan=0, imag=1000003, algorithm='siphash24', hash_bits=64, seed_bits=128, cutoff=0)"
hash_info=_hash_info()
warnoptions=[]
def getfilesystemencoding():
return 'utf-8'
#delete objects not in python sys module namespace
del JSObject
del _implementation
| gpl-3.0 |
ccomb/OpenUpgrade | addons/board/controllers.py | 44 | 1981 | # -*- coding: utf-8 -*-
from xml.etree import ElementTree
from openerp.addons.web.controllers.main import load_actions_from_ir_values
from openerp.addons.web.http import Controller, route, request
class Board(Controller):
@route('/board/add_to_dashboard', type='json', auth='user')
def add_to_dashboard(self, menu_id, action_id, context_to_save, domain, view_mode, name=''):
# FIXME move this method to board.board model
dashboard_action = load_actions_from_ir_values('action', 'tree_but_open',
[('ir.ui.menu', menu_id)], False)
if dashboard_action:
action = dashboard_action[0][2]
if action['res_model'] == 'board.board' and action['views'][0][1] == 'form':
# Maybe should check the content instead of model board.board ?
view_id = action['views'][0][0]
board = request.session.model(action['res_model']).fields_view_get(view_id, 'form')
if board and 'arch' in board:
xml = ElementTree.fromstring(board['arch'])
column = xml.find('./board/column')
if column is not None:
new_action = ElementTree.Element('action', {
'name': str(action_id),
'string': name,
'view_mode': view_mode,
'context': str(context_to_save),
'domain': str(domain)
})
column.insert(0, new_action)
arch = ElementTree.tostring(xml, 'utf-8')
return request.session.model('ir.ui.view.custom').create({
'user_id': request.session.uid,
'ref_id': view_id,
'arch': arch
}, request.context)
return False
| agpl-3.0 |
edulramirez/nova | nova/tests/functional/db/test_cell_mapping.py | 43 | 3091 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from nova import context
from nova import exception
from nova.objects import cell_mapping
from nova import test
from nova.tests import fixtures
class CellMappingTestCase(test.NoDBTestCase):
def setUp(self):
super(CellMappingTestCase, self).setUp()
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project')
self.mapping_obj = cell_mapping.CellMapping()
self.uuid = uuidutils.generate_uuid()
sample_mapping = {'uuid': '',
'name': 'fake-cell',
'transport_url': 'rabbit:///',
'database_connection': 'mysql+pymysql:///'}
def _create_mapping(self, **kwargs):
args = self.sample_mapping.copy()
if 'uuid' not in kwargs:
args['uuid'] = self.uuid
args.update(kwargs)
return self.mapping_obj._create_in_db(self.context, args)
def test_get_by_uuid(self):
mapping = self._create_mapping()
db_mapping = self.mapping_obj._get_by_uuid_from_db(self.context,
mapping['uuid'])
for key in self.mapping_obj.fields.keys():
self.assertEqual(db_mapping[key], mapping[key])
def test_get_by_uuid_not_found(self):
self.assertRaises(exception.CellMappingNotFound,
self.mapping_obj._get_by_uuid_from_db, self.context, self.uuid)
def test_save_in_db(self):
mapping = self._create_mapping()
self.mapping_obj._save_in_db(self.context, mapping['uuid'],
{'name': 'meow'})
db_mapping = self.mapping_obj._get_by_uuid_from_db(self.context,
mapping['uuid'])
self.assertNotEqual(db_mapping['name'], mapping['name'])
for key in [key for key in self.mapping_obj.fields.keys()
if key not in ['name', 'updated_at']]:
self.assertEqual(db_mapping[key], mapping[key])
def test_destroy_in_db(self):
mapping = self._create_mapping()
self.mapping_obj._get_by_uuid_from_db(self.context, mapping['uuid'])
self.mapping_obj._destroy_in_db(self.context, mapping['uuid'])
self.assertRaises(exception.CellMappingNotFound,
self.mapping_obj._get_by_uuid_from_db, self.context,
mapping['uuid'])
def test_destroy_in_db_not_found(self):
self.assertRaises(exception.CellMappingNotFound,
self.mapping_obj._destroy_in_db, self.context, self.uuid)
| apache-2.0 |
atosorigin/ansible | test/units/module_utils/basic/test__log_invocation.py | 169 | 1797 | # -*- coding: utf-8 -*-
# (c) 2016, James Cammarata <jimi@sngx.net>
# (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
ARGS = dict(foo=False, bar=[1, 2, 3], bam="bam", baz=u'baz')
ARGUMENT_SPEC = dict(
foo=dict(default=True, type='bool'),
bar=dict(default=[], type='list'),
bam=dict(default="bam"),
baz=dict(default=u"baz"),
password=dict(default=True),
no_log=dict(default="you shouldn't see me", no_log=True),
)
@pytest.mark.parametrize('am, stdin', [(ARGUMENT_SPEC, ARGS)], indirect=['am', 'stdin'])
def test_module_utils_basic__log_invocation(am, mocker):
am.log = mocker.MagicMock()
am._log_invocation()
# Message is generated from a dict so it will be in an unknown order.
# have to check this manually rather than with assert_called_with()
args = am.log.call_args[0]
assert len(args) == 1
message = args[0]
assert len(message) == \
len('Invoked with bam=bam bar=[1, 2, 3] foo=False baz=baz no_log=NOT_LOGGING_PARAMETER password=NOT_LOGGING_PASSWORD')
assert message.startswith('Invoked with ')
assert ' bam=bam' in message
assert ' bar=[1, 2, 3]' in message
assert ' foo=False' in message
assert ' baz=baz' in message
assert ' no_log=NOT_LOGGING_PARAMETER' in message
assert ' password=NOT_LOGGING_PASSWORD' in message
kwargs = am.log.call_args[1]
assert kwargs == \
dict(log_args={
'foo': 'False',
'bar': '[1, 2, 3]',
'bam': 'bam',
'baz': 'baz',
'password': 'NOT_LOGGING_PASSWORD',
'no_log': 'NOT_LOGGING_PARAMETER',
})
| gpl-3.0 |
nelmiux/CarnotKE | jyhton/Lib/test/test_zlib_jy.py | 23 | 1039 | """Misc zlib tests
Made for Jython.
"""
import unittest
import zlib
from array import array
from test import test_support
class ArrayTestCase(unittest.TestCase):
def test_array(self):
self._test_array(zlib.compress, zlib.decompress)
def test_array_compressobj(self):
def compress(value):
co = zlib.compressobj()
return co.compress(value) + co.flush()
def decompress(value):
dco = zlib.decompressobj()
return dco.decompress(value) + dco.flush()
self._test_array(compress, decompress)
def _test_array(self, compress, decompress):
self.assertEqual(compress(array('c', 'jython')), compress('jython'))
intarray = array('i', range(5))
self.assertEqual(compress(intarray), compress(intarray.tostring()))
compressed = array('c', compress('jython'))
self.assertEqual('jython', decompress(compressed))
def test_main():
test_support.run_unittest(ArrayTestCase)
if __name__ == '__main__':
test_main()
| apache-2.0 |
cogeorg/BlackRhino | examples/Georg2012/networkx/algorithms/centrality/load.py | 13 | 6019 | """
Load centrality.
"""
# Copyright (C) 2004-2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)',
'Pieter Swart (swart@lanl.gov)',
'Sasha Gutfraind (ag362@cornell.edu)'])
__all__ = ['load_centrality',
'edge_load']
import networkx as nx
def newman_betweenness_centrality(G,v=None,cutoff=None,
normalized=True,
weight=None):
"""Compute load centrality for nodes.
The load centrality of a node is the fraction of all shortest
paths that pass through that node.
Parameters
----------
G : graph
A networkx graph
normalized : bool, optional
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
n is the number of nodes in G.
weight : None or string, optional
If None, edge weights are ignored.
Otherwise holds the name of the edge attribute used as weight.
cutoff : bool, optional
If specified, only consider paths of length <= cutoff.
Returns
-------
nodes : dictionary
Dictionary of nodes with centrality as the value.
See Also
--------
betweenness_centrality()
Notes
-----
Load centrality is slightly different than betweenness.
For this load algorithm see the reference
Scientific collaboration networks: II.
Shortest paths, weighted networks, and centrality,
M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
"""
if v is not None: # only one node
betweenness=0.0
for source in G:
ubetween=_node_betweenness(G,source,cutoff,normalized,weight)
betweenness+=ubetween[v]
return betweenness
else:
betweenness={}.fromkeys(G,0.0)
for source in betweenness:
ubetween=_node_betweenness(G,source,cutoff,False,weight)
for vk in ubetween:
betweenness[vk]+=ubetween[vk]
if normalized:
order=len(betweenness)
if order <=2:
return betweenness # no normalization b=0 for all nodes
scale=1.0/((order-1)*(order-2))
for v in betweenness:
betweenness[v] *= scale
return betweenness # all nodes
def _node_betweenness(G,source,cutoff=False,normalized=True,weight=None):
"""Node betweenness helper:
see betweenness_centrality for what you probably want.
This actually computes "load" and not betweenness.
See https://networkx.lanl.gov/ticket/103
This calculates the load of each node for paths from a single source.
(The fraction of number of shortests paths from source that go
through each node.)
To get the load for a node you need to do all-pairs shortest paths.
If weight is not None then use Dijkstra for finding shortest paths.
In this case a cutoff is not implemented and so is ignored.
"""
# get the predecessor and path length data
if weight is None:
(pred,length)=nx.predecessor(G,source,cutoff=cutoff,return_seen=True)
else:
(pred,length)=nx.dijkstra_predecessor_and_distance(G,source,weight=weight)
# order the nodes by path length
onodes = [ (l,vert) for (vert,l) in length.items() ]
onodes.sort()
onodes[:] = [vert for (l,vert) in onodes if l>0]
# intialize betweenness
between={}.fromkeys(length,1.0)
while onodes:
v=onodes.pop()
if v in pred:
num_paths=len(pred[v]) # Discount betweenness if more than
for x in pred[v]: # one shortest path.
if x==source: # stop if hit source because all remaining v
break # also have pred[v]==[source]
between[x]+=between[v]/float(num_paths)
# remove source
for v in between:
between[v]-=1
# rescale to be between 0 and 1
if normalized:
l=len(between)
if l > 2:
scale=1.0/float((l-1)*(l-2)) # 1/the number of possible paths
for v in between:
between[v] *= scale
return between
load_centrality=newman_betweenness_centrality
def edge_load(G,nodes=None,cutoff=False):
"""Compute edge load.
WARNING:
This module is for demonstration and testing purposes.
"""
betweenness={}
if not nodes: # find betweenness for every node in graph
nodes=G.nodes() # that probably is what you want...
for source in nodes:
ubetween=_edge_betweenness(G,source,nodes,cutoff=cutoff)
for v in ubetween.keys():
b=betweenness.setdefault(v,0) # get or set default
betweenness[v]=ubetween[v]+b # cumulative total
return betweenness
def _edge_betweenness(G,source,nodes,cutoff=False):
"""
Edge betweenness helper.
"""
between={}
# get the predecessor data
#(pred,length)=_fast_predecessor(G,source,cutoff=cutoff)
(pred,length)=nx.predecessor(G,source,cutoff=cutoff,return_seen=True)
# order the nodes by path length
onodes = [ nn for dd,nn in sorted( (dist,n) for n,dist in length.items() )]
# intialize betweenness, doesn't account for any edge weights
for u,v in G.edges(nodes):
between[(u,v)]=1.0
between[(v,u)]=1.0
while onodes: # work through all paths
v=onodes.pop()
if v in pred:
num_paths=len(pred[v]) # Discount betweenness if more than
for w in pred[v]: # one shortest path.
if w in pred:
num_paths=len(pred[w]) # Discount betweenness, mult path
for x in pred[w]:
between[(w,x)]+=between[(v,w)]/num_paths
between[(x,w)]+=between[(w,v)]/num_paths
return between
| gpl-3.0 |
BonexGu/Blik2D-SDK | Blik2D/addon/tensorflow-1.2.1_for_blik/tensorflow/contrib/factorization/python/ops/gmm.py | 47 | 5877 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of Gaussian mixture model (GMM) clustering using tf.Learn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib import framework
from tensorflow.contrib.factorization.python.ops import gmm_ops
from tensorflow.contrib.framework.python.framework import checkpoint_utils
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import model_fn as model_fn_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops.control_flow_ops import with_dependencies
def _streaming_sum(scalar_tensor):
"""Create a sum metric and update op."""
sum_metric = framework.local_variable(constant_op.constant(0.0))
sum_update = sum_metric.assign_add(scalar_tensor)
return sum_metric, sum_update
class GMM(estimator.Estimator):
"""An estimator for GMM clustering."""
SCORES = 'scores'
ASSIGNMENTS = 'assignments'
ALL_SCORES = 'all_scores'
def __init__(self,
num_clusters,
model_dir=None,
random_seed=0,
params='wmc',
initial_clusters='random',
covariance_type='full',
config=None):
"""Creates a model for running GMM training and inference.
Args:
num_clusters: number of clusters to train.
model_dir: the directory to save the model results and log files.
random_seed: Python integer. Seed for PRNG used to initialize centers.
params: Controls which parameters are updated in the training process.
Can contain any combination of "w" for weights, "m" for means,
and "c" for covars.
initial_clusters: specifies how to initialize the clusters for training.
See gmm_ops.gmm for the possible values.
covariance_type: one of "full", "diag".
config: See Estimator
"""
self._num_clusters = num_clusters
self._params = params
self._training_initial_clusters = initial_clusters
self._covariance_type = covariance_type
self._training_graph = None
self._random_seed = random_seed
super(GMM, self).__init__(
model_fn=self._model_builder(), model_dir=model_dir, config=config)
def predict_assignments(self, input_fn=None, batch_size=None, outputs=None):
"""See BaseEstimator.predict."""
results = self.predict(input_fn=input_fn,
batch_size=batch_size,
outputs=outputs)
for result in results:
yield result[GMM.ASSIGNMENTS]
def score(self, input_fn=None, batch_size=None, steps=None):
"""Predict total sum of distances to nearest clusters.
Note that this function is different from the corresponding one in sklearn
which returns the negative of the sum of distances.
Args:
input_fn: see predict.
batch_size: see predict.
steps: see predict.
Returns:
Total sum of distances to nearest clusters.
"""
results = self.evaluate(input_fn=input_fn, batch_size=batch_size,
steps=steps)
return np.sum(results[GMM.SCORES])
def weights(self):
"""Returns the cluster weights."""
return checkpoint_utils.load_variable(
self.model_dir, gmm_ops.GmmAlgorithm.CLUSTERS_WEIGHT)
def clusters(self):
"""Returns cluster centers."""
clusters = checkpoint_utils.load_variable(
self.model_dir, gmm_ops.GmmAlgorithm.CLUSTERS_VARIABLE)
return np.squeeze(clusters, 1)
def covariances(self):
"""Returns the covariances."""
return checkpoint_utils.load_variable(
self.model_dir, gmm_ops.GmmAlgorithm.CLUSTERS_COVS_VARIABLE)
def _parse_tensor_or_dict(self, features):
if isinstance(features, dict):
return array_ops.concat([features[k] for k in sorted(features.keys())],
1)
return features
def _model_builder(self):
"""Creates a model function."""
def _model_fn(features, labels, mode):
"""Model function."""
assert labels is None, labels
(all_scores, model_predictions, losses, training_op) = gmm_ops.gmm(
self._parse_tensor_or_dict(features), self._training_initial_clusters,
self._num_clusters, self._random_seed, self._covariance_type,
self._params)
incr_step = state_ops.assign_add(variables.get_global_step(), 1)
loss = math_ops.reduce_sum(losses)
training_op = with_dependencies([training_op, incr_step], loss)
predictions = {
GMM.ALL_SCORES: all_scores[0],
GMM.ASSIGNMENTS: model_predictions[0][0],
}
eval_metric_ops = {
GMM.SCORES: _streaming_sum(loss),
}
return model_fn_lib.ModelFnOps(mode=mode, predictions=predictions,
eval_metric_ops=eval_metric_ops,
loss=loss, train_op=training_op)
return _model_fn
| mit |
vmturbo/nova | nova/tests/unit/test_conf.py | 5 | 3768 | # Copyright 2016 HPE, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import tempfile
import fixtures
import mock
from oslo_config import cfg
import nova.conf.compute
from nova import config
from nova import test
class ConfTest(test.NoDBTestCase):
"""This is a test and pattern for parsing tricky options."""
class TestConfigOpts(cfg.ConfigOpts):
def __call__(self, args=None, default_config_files=None):
if default_config_files is None:
default_config_files = []
return cfg.ConfigOpts.__call__(
self,
args=args,
prog='test',
version='1.0',
usage='%(prog)s FOO BAR',
default_config_files=default_config_files,
validate_default_values=True)
def setUp(self):
super(ConfTest, self).setUp()
self.useFixture(fixtures.NestedTempfile())
self.conf = self.TestConfigOpts()
self.tempdirs = []
def create_tempfiles(self, files, ext='.conf'):
tempfiles = []
for (basename, contents) in files:
if not os.path.isabs(basename):
(fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext)
else:
path = basename + ext
fd = os.open(path, os.O_CREAT | os.O_WRONLY)
tempfiles.append(path)
try:
os.write(fd, contents.encode('utf-8'))
finally:
os.close(fd)
return tempfiles
def test_reserved_huge_page(self):
nova.conf.compute.register_opts(self.conf)
paths = self.create_tempfiles(
[('1',
'[DEFAULT]\n'
'reserved_huge_pages = node:0,size:2048,count:64\n')])
self.conf(['--config-file', paths[0]])
# NOTE(sdague): In oslo.config if you specify a parameter
# incorrectly, it silently drops it from the conf. Which means
# the attr doesn't exist at all. The first attr test here is
# for an unrelated boolean option that is using defaults (so
# will always work. It's a basic control that *anything* is working.
self.assertTrue(hasattr(self.conf, 'force_raw_images'))
self.assertTrue(hasattr(self.conf, 'reserved_huge_pages'),
"Parse error with reserved_huge_pages")
# NOTE(sdague): Yes, this actually parses as an array holding
# a dict.
actual = [{'node': '0', 'size': '2048', 'count': '64'}]
self.assertEqual(actual, self.conf.reserved_huge_pages)
class TestParseArgs(test.NoDBTestCase):
@mock.patch.object(config.log, 'register_options')
def test_parse_args_glance_debug_false(self, register_options):
self.flags(debug=False, group='glance')
config.parse_args([], configure_db=False, init_rpc=False)
self.assertIn('glanceclient=WARN', config.CONF.default_log_levels)
@mock.patch.object(config.log, 'register_options')
def test_parse_args_glance_debug_true(self, register_options):
self.flags(debug=True, group='glance')
config.parse_args([], configure_db=False, init_rpc=False)
self.assertIn('glanceclient=DEBUG', config.CONF.default_log_levels)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.