text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""Embeds Xwalk user data files in C++ code."""
import optparse
import os
import sys
import xwalk_paths
import cpp_source
sys.path.insert(0, os.path.join(xwalk_paths.GetSrc(), 'build', 'util'))
import lastchange
def main():
parser = optparse.OptionParser()
parser.add_option('', '--version-file')
parser.add_option(
'', '--directory', type='string', default='.',
help='Path to directory where the cc/h file should be created')
options, args = parser.parse_args()
version = open(options.version_file, 'r').read().strip()
revision = lastchange.FetchVersionInfo(None).revision
if revision:
version += '.' + revision.strip()
global_string_map = {
'kXwalkDriverVersion': version
}
cpp_source.WriteSource('version',
'xwalk/test/xwalkdriver',
options.directory, global_string_map)
if __name__ == '__main__':
sys.exit(main())
|
{
"content_hash": "bd31117307609b8ae31d91f394a0d443",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 71,
"avg_line_length": 24.945945945945947,
"alnum_prop": 0.6370530877573131,
"repo_name": "chenhengjie123/crosswalk-web-driver",
"id": "c8fe19fceca27108e309fe5431ad8153c2034cca",
"size": "1108",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "xwalkdriver/embed_version_in_cpp.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1229"
},
{
"name": "C++",
"bytes": "715807"
},
{
"name": "HTML",
"bytes": "21359"
},
{
"name": "JavaScript",
"bytes": "21381"
},
{
"name": "Objective-C++",
"bytes": "4361"
},
{
"name": "Python",
"bytes": "33613"
}
],
"symlink_target": ""
}
|
import csv
import datetime
import json
import logging
import urlparse
from cStringIO import StringIO
from django.conf import settings
from django.core.exceptions import ValidationError as DjangoValidationError
from django.core.urlresolvers import reverse
from django.core.validators import validate_ipv4_address, validate_ipv46_address
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from mongoengine.base import ValidationError
from crits.campaigns.forms import CampaignForm
from crits.campaigns.campaign import Campaign
from crits.config.config import CRITsConfig
from crits.core import form_consts
from crits.core.class_mapper import class_from_id
from crits.core.crits_mongoengine import EmbeddedSource, EmbeddedCampaign
from crits.core.crits_mongoengine import EmbeddedTicket, json_handler
from crits.core.forms import SourceForm, DownloadFileForm
from crits.core.handlers import build_jtable, csv_export
from crits.core.handlers import jtable_ajax_list, jtable_ajax_delete
from crits.core.user_tools import is_admin, user_sources
from crits.core.user_tools import is_user_subscribed, is_user_favorite
from crits.domains.domain import Domain
from crits.domains.handlers import upsert_domain, get_valid_root_domain
from crits.events.event import Event
from crits.indicators.forms import IndicatorActionsForm
from crits.indicators.forms import IndicatorActivityForm
from crits.indicators.indicator import IndicatorAction
from crits.indicators.indicator import Indicator
from crits.indicators.indicator import EmbeddedConfidence, EmbeddedImpact
from crits.ips.handlers import ip_add_update, validate_and_normalize_ip
from crits.ips.ip import IP
from crits.notifications.handlers import remove_user_from_notification
from crits.objects.object_type import ObjectType
from crits.services.handlers import run_triage, get_supported_services
logger = logging.getLogger(__name__)
def generate_indicator_csv(request):
"""
Generate a CSV file of the Indicator information
:param request: The request for this CSV.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
response = csv_export(request, Indicator)
return response
def generate_indicator_jtable(request, option):
"""
Generate the jtable data for rendering in the list template.
:param request: The request for this jtable.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
obj_type = Indicator
type_ = "indicator"
mapper = obj_type._meta['jtable_opts']
if option == "jtlist":
# Sets display url
details_url = mapper['details_url']
details_url_key = mapper['details_url_key']
fields = mapper['fields']
response = jtable_ajax_list(obj_type,
details_url,
details_url_key,
request,
includes=fields)
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
if option == "jtdelete":
response = {"Result": "ERROR"}
if jtable_ajax_delete(obj_type, request):
response = {"Result": "OK"}
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
jtopts = {
'title': "Indicators",
'default_sort': mapper['default_sort'],
'listurl': reverse('crits.%ss.views.%ss_listing' % (type_,
type_),
args=('jtlist',)),
'deleteurl': reverse('crits.%ss.views.%ss_listing' % (type_,
type_),
args=('jtdelete',)),
'searchurl': reverse(mapper['searchurl']),
'fields': list(mapper['jtopts_fields']),
'hidden_fields': mapper['hidden_fields'],
'linked_fields': mapper['linked_fields'],
'details_link': mapper['details_link'],
'no_sort': mapper['no_sort']
}
config = CRITsConfig.objects().first()
if not config.splunk_search_url:
del jtopts['fields'][1]
jtable = build_jtable(jtopts, request)
jtable['toolbar'] = [
{
'tooltip': "'All Indicators'",
'text': "'All'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'New Indicators'",
'text': "'New'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'New'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'In Progress Indicators'",
'text': "'In Progress'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'In Progress'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Analyzed Indicators'",
'text': "'Analyzed'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'Analyzed'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Deprecated Indicators'",
'text': "'Deprecated'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'Deprecated'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Add Indicator'",
'text': "'Add Indicator'",
'click': "function () {$('#new-indicator').click()}",
},
]
if config.splunk_search_url:
for field in jtable['fields']:
if field['fieldname'].startswith("'splunk"):
field['display'] = """ function (data) {
return '<a href="%s' + data.record.value + '"><img src="/new_images/splunk.png" /></a>';
}
""" % config.splunk_search_url
if option == "inline":
return render_to_response("jtable.html",
{'jtable': jtable,
'jtid': '%s_listing' % type_,
'button': '%ss_tab' % type_},
RequestContext(request))
else:
return render_to_response("%s_listing.html" % type_,
{'jtable': jtable,
'jtid': '%s_listing' % type_},
RequestContext(request))
def get_indicator_details(indicator_id, analyst):
"""
Generate the data to render the Indicator details template.
:param indicator_id: The ObjectId of the Indicator to get details for.
:type indicator_id: str
:param analyst: The user requesting this information.
:type analyst: str
:returns: template (str), arguments (dict)
"""
template = None
users_sources = user_sources(analyst)
indicator = Indicator.objects(id=indicator_id,
source__name__in=users_sources).first()
if not indicator:
error = ("Either this indicator does not exist or you do "
"not have permission to view it.")
template = "error.html"
args = {'error': error}
return template, args
forms = {}
forms['new_action'] = IndicatorActionsForm(initial={'analyst': analyst,
'active': "off",
'date': datetime.datetime.now()})
forms['new_activity'] = IndicatorActivityForm(initial={'analyst': analyst,
'date': datetime.datetime.now()})
forms['new_campaign'] = CampaignForm()#'date': datetime.datetime.now(),
forms['new_source'] = SourceForm(analyst, initial={'date': datetime.datetime.now()})
forms['download_form'] = DownloadFileForm(initial={"obj_type": 'Indicator',
"obj_id": indicator_id})
indicator.sanitize("%s" % analyst)
# remove pending notifications for user
remove_user_from_notification("%s" % analyst, indicator_id, 'Indicator')
# subscription
subscription = {
'type': 'Indicator',
'id': indicator_id,
'subscribed': is_user_subscribed("%s" % analyst,
'Indicator',
indicator_id),
}
# relationship
relationship = {
'type': 'Indicator',
'value': indicator_id,
}
#objects
objects = indicator.sort_objects()
#relationships
relationships = indicator.sort_relationships("%s" % analyst, meta=True)
#comments
comments = {'comments': indicator.get_comments(),
'url_key': indicator_id}
#screenshots
screenshots = indicator.get_screenshots(analyst)
# favorites
favorite = is_user_favorite("%s" % analyst, 'Indicator', indicator.id)
# services
service_list = get_supported_services('Indicator')
# analysis results
service_results = indicator.get_analysis_results()
args = {'objects': objects,
'relationships': relationships,
'comments': comments,
'relationship': relationship,
'subscription': subscription,
"indicator": indicator,
"forms": forms,
"indicator_id": indicator_id,
'screenshots': screenshots,
'service_list': service_list,
'service_results': service_results,
'favorite': favorite,
'rt_url': settings.RT_URL}
return template, args
def get_indicator_type_value_pair(field):
"""
Extracts the type/value pair from a generic field. This is generally used on
fields that can become indicators such as objects or email fields.
The type/value pairs are used in indicator relationships
since indicators are uniquely identified via their type/value pair.
This function can be used in conjunction with:
crits.indicators.handlers.does_indicator_relationship_exist
Args:
field: The input field containing a type/value pair. This field is
generally from custom dictionaries such as from Django templates.
Returns:
Returns true if the input field already has an indicator associated
with its values. Returns false otherwise.
"""
# this is an object
if field.get("name") != None and field.get("type") != None and field.get("value") != None:
name = field.get("name")
type = field.get("type")
value = field.get("value").lower().strip()
full_type = type
if type != name:
full_type = type + " - " + name
return (full_type, value)
# this is an email field
if field.get("field_type") != None and field.get("field_value") != None:
return (field.get("field_type"), field.get("field_value").lower().strip())
# otherwise the logic to extract the type/value pair from this
# specific field type is not supported
return (None, None)
def get_verified_field(data, valid_values, field=None, default=None):
"""
Validate and correct string value(s) in a dictionary key or list,
or a string by itself.
:param data: The data to be verified and corrected.
:type data: dict, list of strings, or str
:param valid_values: Key with simplified string, value with actual string
:type valid_values: dict
:param field: The dictionary key containing the data.
:type field: str
:param default: A value to use if an invalid item cannot be corrected
:type default: str
:returns: the validated/corrected value(str), list of values(list) or ''
"""
if isinstance(data, dict):
data = data.get(field, '')
if isinstance(data, list):
value_list = data
else:
value_list = [data]
for i, item in enumerate(value_list):
if isinstance(item, basestring):
item = item.lower().strip().replace(' - ', '-')
if item in valid_values:
value_list[i] = valid_values[item]
continue
if default is not None:
item = default
continue
return ''
if isinstance(data, list):
return value_list
else:
return value_list[0]
def handle_indicator_csv(csv_data, source, method, reference, ctype, username,
add_domain=False):
"""
Handle adding Indicators in CSV format (file or blob).
:param csv_data: The CSV data.
:type csv_data: str or file handle
:param source: The name of the source for these indicators.
:type source: str
:param method: The method of acquisition of this indicator.
:type method: str
:param reference: The reference to this data.
:type reference: str
:param ctype: The CSV type.
:type ctype: str ("file" or "blob")
:param username: The user adding these indicators.
:type username: str
:param add_domain: If the indicators being added are also other top-level
objects, add those too.
:type add_domain: boolean
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if ctype == "file":
cdata = csv_data.read()
else:
cdata = csv_data.encode('ascii')
data = csv.DictReader(StringIO(cdata), skipinitialspace=True)
result = {'success': True}
result_message = ""
# Compute permitted values in CSV
valid_ratings = {
'unknown': 'unknown',
'benign': 'benign',
'low': 'low',
'medium': 'medium',
'high': 'high'}
valid_campaign_confidence = {
'low': 'low',
'medium': 'medium',
'high': 'high'}
valid_campaigns = {}
for c in Campaign.objects(active='on'):
valid_campaigns[c['name'].lower().replace(' - ', '-')] = c['name']
valid_actions = {}
for a in IndicatorAction.objects(active='on'):
valid_actions[a['name'].lower().replace(' - ', '-')] = a['name']
valid_ind_types = {}
for obj in ObjectType.objects(datatype__enum__exists=False, datatype__file__exists=False):
if obj['object_type'] == obj['name']:
name = obj['object_type']
else:
name = "%s - %s" % (obj['object_type'], obj['name'])
valid_ind_types[name.lower().replace(' - ', '-')] = name
# Start line-by-line import
added = 0
for processed, d in enumerate(data, 1):
ind = {}
ind['value'] = d.get('Indicator', '').lower().strip()
ind['type'] = get_verified_field(d, valid_ind_types, 'Type')
if not ind['value'] or not ind['type']:
# Mandatory value missing or malformed, cannot process csv row
i = ""
result['success'] = False
if not ind['value']:
i += "No valid Indicator value "
if not ind['type']:
i += "No valid Indicator type "
result_message += "Cannot process row %s: %s<br />" % (processed, i)
continue
campaign = get_verified_field(d, valid_campaigns, 'Campaign')
if campaign:
ind['campaign'] = campaign
ind['campaign_confidence'] = get_verified_field(d, valid_campaign_confidence,
'Campaign Confidence',
default='low')
actions = d.get('Action', '')
if actions:
actions = get_verified_field(actions.split(','), valid_actions)
if not actions:
result['success'] = False
result_message += "Cannot process row %s: Invalid Action<br />" % processed
continue
ind['confidence'] = get_verified_field(d, valid_ratings, 'Confidence',
default='unknown')
ind['impact'] = get_verified_field(d, valid_ratings, 'Impact',
default='unknown')
ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME] = d.get(form_consts.Common.BUCKET_LIST, '')
ind[form_consts.Common.TICKET_VARIABLE_NAME] = d.get(form_consts.Common.TICKET, '')
try:
response = handle_indicator_insert(ind, source, reference, analyst=username,
method=method, add_domain=add_domain)
except Exception, e:
result['success'] = False
result_message += "Failure processing row %s: %s<br />" % (processed, str(e))
continue
if response['success']:
if actions:
action = {'active': 'on',
'analyst': username,
'begin_date': '',
'end_date': '',
'performed_date': '',
'reason': '',
'date': datetime.datetime.now()}
for action_type in actions:
action['action_type'] = action_type
action_add(response.get('objectid'), action)
else:
result['success'] = False
result_message += "Failure processing row %s: %s<br />" % (processed, response['message'])
continue
added += 1
if processed < 1:
result['success'] = False
result_message = "Could not find any valid CSV rows to parse!"
result['message'] = "Successfully added %s Indicator(s).<br />%s" % (added, result_message)
return result
def handle_indicator_ind(value, source, ctype, analyst, method='', reference='',
add_domain=False, add_relationship=False, campaign=None,
campaign_confidence=None, confidence=None, impact=None,
bucket_list=None, ticket=None, cache={}):
"""
Handle adding an individual indicator.
:param value: The indicator value.
:type value: str
:param source: The name of the source for this indicator.
:type source: str
:param ctype: The indicator type.
:type ctype: str
:param analyst: The user adding this indicator.
:type analyst: str
:param method: The method of acquisition of this indicator.
:type method: str
:param reference: The reference to this data.
:type reference: str
:param add_domain: If the indicators being added are also other top-level
objects, add those too.
:type add_domain: boolean
:param add_relationship: If a relationship can be made, create it.
:type add_relationship: boolean
:param campaign: Campaign to attribute to this indicator.
:type campaign: str
:param campaign_confidence: Confidence of this campaign.
:type campaign_confidence: str
:param confidence: Indicator confidence.
:type confidence: str
:param impact: Indicator impact.
:type impact: str
:param bucket_list: The bucket(s) to assign to this indicator.
:type bucket_list: str
:param ticket: Ticket to associate with this indicator.
:type ticket: str
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys "success" (boolean) and "message" (str)
"""
result = None
if not source:
return {"success" : False, "message" : "Missing source information."}
if value == None or value.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty value field"}
elif ctype == None or ctype.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty type field"}
else:
ind = {}
ind['type'] = ctype.strip()
ind['value'] = value.lower().strip()
if campaign:
ind['campaign'] = campaign
if campaign_confidence and campaign_confidence in ('low', 'medium', 'high'):
ind['campaign_confidence'] = campaign_confidence
if confidence and confidence in ('unknown', 'benign', 'low', 'medium',
'high'):
ind['confidence'] = confidence
if impact and impact in ('unknown', 'benign', 'low', 'medium', 'high'):
ind['impact'] = impact
if bucket_list:
ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME] = bucket_list
if ticket:
ind[form_consts.Common.TICKET_VARIABLE_NAME] = ticket
try:
return handle_indicator_insert(ind, source, reference, analyst,
method, add_domain, add_relationship, cache=cache)
except Exception, e:
return {'success': False, 'message': repr(e)}
return result
def handle_indicator_insert(ind, source, reference='', analyst='', method='',
add_domain=False, add_relationship=False, cache={}):
"""
Insert an individual indicator into the database.
NOTE: Setting add_domain to True will always create a relationship as well.
However, to create a relationship with an object that already exists before
this function was called, set add_relationship to True. This will assume
that the domain or IP object to create the relationship with already exists
and will avoid infinite mutual calls between, for example, add_update_ip
and this function. add domain/IP objects.
:param ind: Information about the indicator.
:type ind: dict
:param source: The source for this indicator.
:type source: list, str, :class:`crits.core.crits_mongoengine.EmbeddedSource`
:param reference: The reference to the data.
:type reference: str
:param analyst: The user adding this indicator.
:type analyst: str
:param method: Method of acquiring this indicator.
:type method: str
:param add_domain: If this indicator is also a top-level object, try to add
it.
:type add_domain: boolean
:param add_relationship: Attempt to add relationships if applicable.
:type add_relationship: boolean
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"objectid" (str) if successful,
"is_new_indicator" (boolean) if successful.
"""
(ind['value'], error) = validate_indicator_value(ind['value'], ind['type'])
if error:
return {"success": False, "message": error}
is_new_indicator = False
dmain = None
ip = None
rank = {
'unknown': 0,
'benign': 1,
'low': 2,
'medium': 3,
'high': 4,
}
indicator = Indicator.objects(ind_type=ind['type'],
value=ind['value']).first()
if not indicator:
indicator = Indicator()
indicator.ind_type = ind['type']
indicator.value = ind['value']
indicator.created = datetime.datetime.now()
indicator.confidence = EmbeddedConfidence(analyst=analyst)
indicator.impact = EmbeddedImpact(analyst=analyst)
is_new_indicator = True
if 'campaign' in ind:
if isinstance(ind['campaign'], basestring) and len(ind['campaign']) > 0:
confidence = ind.get('campaign_confidence', 'low')
ind['campaign'] = EmbeddedCampaign(name=ind['campaign'],
confidence=confidence,
description="",
analyst=analyst,
date=datetime.datetime.now())
if isinstance(ind['campaign'], EmbeddedCampaign):
indicator.add_campaign(ind['campaign'])
elif isinstance(ind['campaign'], list):
for campaign in ind['campaign']:
if isinstance(campaign, EmbeddedCampaign):
indicator.add_campaign(campaign)
if 'confidence' in ind and rank.get(ind['confidence'], 0) > rank.get(indicator.confidence.rating, 0):
indicator.confidence.rating = ind['confidence']
indicator.confidence.analyst = analyst
if 'impact' in ind and rank.get(ind['impact'], 0) > rank.get(indicator.impact.rating, 0):
indicator.impact.rating = ind['impact']
indicator.impact.analyst = analyst
bucket_list = None
if form_consts.Common.BUCKET_LIST_VARIABLE_NAME in ind:
bucket_list = ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME]
if bucket_list:
indicator.add_bucket_list(bucket_list, analyst)
ticket = None
if form_consts.Common.TICKET_VARIABLE_NAME in ind:
ticket = ind[form_consts.Common.TICKET_VARIABLE_NAME]
if ticket:
indicator.add_ticket(ticket, analyst)
if isinstance(source, list):
for s in source:
indicator.add_source(source_item=s, method=method, reference=reference)
elif isinstance(source, EmbeddedSource):
indicator.add_source(source_item=source, method=method, reference=reference)
elif isinstance(source, basestring):
s = EmbeddedSource()
s.name = source
instance = EmbeddedSource.SourceInstance()
instance.reference = reference
instance.method = method
instance.analyst = analyst
instance.date = datetime.datetime.now()
s.instances = [instance]
indicator.add_source(s)
if add_domain or add_relationship:
ind_type = indicator.ind_type
ind_value = indicator.value
url_contains_ip = False
if ind_type in ("URI - Domain Name", "URI - URL"):
if ind_type == "URI - URL":
domain_or_ip = urlparse.urlparse(ind_value).hostname
try:
validate_ipv46_address(domain_or_ip)
url_contains_ip = True
except DjangoValidationError:
pass
else:
domain_or_ip = ind_value
if not url_contains_ip:
success = None
if add_domain:
success = upsert_domain(domain_or_ip, indicator.source, '%s' % analyst,
None, bucket_list=bucket_list, cache=cache)
if not success['success']:
return {'success': False, 'message': success['message']}
if not success or not 'object' in success:
dmain = Domain.objects(domain=domain_or_ip).first()
else:
dmain = success['object']
if ind_type.startswith("Address - ip") or ind_type == "Address - cidr" or url_contains_ip:
if url_contains_ip:
ind_value = domain_or_ip
try:
validate_ipv4_address(domain_or_ip)
ind_type = 'Address - ipv4-addr'
except DjangoValidationError:
ind_type = 'Address - ipv6-addr'
success = None
if add_domain:
success = ip_add_update(ind_value,
ind_type,
source=indicator.source,
campaign=indicator.campaign,
analyst=analyst,
bucket_list=bucket_list,
ticket=ticket,
indicator_reference=reference,
cache=cache)
if not success['success']:
return {'success': False, 'message': success['message']}
if not success or not 'object' in success:
ip = IP.objects(ip=indicator.value).first()
else:
ip = success['object']
indicator.save(username=analyst)
if dmain:
dmain.add_relationship(indicator,
'Related_To',
analyst="%s" % analyst,
get_rels=False)
dmain.save(username=analyst)
if ip:
ip.add_relationship(indicator,
'Related_To',
analyst="%s" % analyst,
get_rels=False)
ip.save(username=analyst)
# run indicator triage
if is_new_indicator:
indicator.reload()
run_triage(indicator, analyst)
return {'success': True, 'objectid': str(indicator.id),
'is_new_indicator': is_new_indicator, 'object': indicator}
def does_indicator_relationship_exist(field, indicator_relationships):
"""
Checks if the input field's values already have an indicator
by cross checking against the list of indicator relationships. The input
field already has an associated indicator created if the input field's
"type" and "value" pairs exist -- since indicators are uniquely identified
by their type/value pair.
Args:
field: The generic input field containing a type/value pair. This is
checked against a list of indicators relationships to see if a
corresponding indicator already exists. This field is generally
from custom dictionaries such as from Django templates.
indicator_relationships: The list of indicator relationships
to cross reference the input field against.
Returns:
Returns true if the input field already has an indicator associated
with its values. Returns false otherwise.
"""
type, value = get_indicator_type_value_pair(field)
if indicator_relationships != None:
if type != None and value != None:
for indicator_relationship in indicator_relationships:
if indicator_relationship == None:
logger.error('Indicator relationship is not valid: ' +
str(indicator_relationship))
continue
if type == indicator_relationship.get('ind_type') and value == indicator_relationship.get('ind_value'):
return True
else:
logger.error('Could not extract type/value pair of input field' +
'type: ' + str(type) +
'value: ' + (value.encode("utf-8") if value else str(value)) +
'indicator_relationships: ' + str(indicator_relationships))
return False
def ci_search(itype, confidence, impact, actions):
"""
Find indicators based on type, confidence, impact, and/or actions.
:param itype: The indicator type to search for.
:type itype: str
:param confidence: The confidence level(s) to search for.
:type confidence: str
:param impact: The impact level(s) to search for.
:type impact: str
:param actions: The action(s) to search for.
:type actions: str
:returns: :class:`crits.core.crits_mongoengine.CritsQuerySet`
"""
query = {}
if confidence:
item_list = confidence.replace(' ', '').split(',')
query["confidence.rating"] = {"$in": item_list}
if impact:
item_list = impact.replace(' ', '').split(',')
query["impact.rating"] = {"$in": item_list}
if actions:
item_list = actions.split(',')
query["actions.action_type"] = {"$in": item_list}
query["type"] = "%s" % itype.strip()
result_filter = ('type', 'value', 'confidence', 'impact', 'actions')
results = Indicator.objects(__raw__=query).only(*result_filter)
return results
def set_indicator_type(indicator_id, itype, username):
"""
Set the Indicator type.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param itype: The new indicator type.
:type itype: str
:param username: The user updating the indicator.
:type username: str
:returns: dict with key "success" (boolean)
"""
# check to ensure we're not duping an existing indicator
indicator = Indicator.objects(id=indicator_id).first()
value = indicator.value
ind_check = Indicator.objects(ind_type=itype, value=value).first()
if ind_check:
# we found a dupe
return {'success': False}
else:
try:
indicator.ind_type = itype
indicator.save(username=username)
return {'success': True}
except ValidationError:
return {'success': False}
def add_new_indicator_action(action, analyst):
"""
Add a new indicator action to CRITs.
:param action: The action to add to CRITs.
:type action: str
:param analyst: The user adding this action.
:returns: True, False
"""
action = action.strip()
try:
idb_action = IndicatorAction.objects(name=action).first()
if idb_action:
return False
idb_action = IndicatorAction()
idb_action.name = action
idb_action.save(username=analyst)
return True
except ValidationError:
return False
def indicator_remove(_id, username):
"""
Remove an Indicator from CRITs.
:param _id: The ObjectId of the indicator to remove.
:type _id: str
:param username: The user removing the indicator.
:type username: str
:returns: dict with keys "success" (boolean) and "message" (list) if failed.
"""
if is_admin(username):
indicator = Indicator.objects(id=_id).first()
if indicator:
indicator.delete(username=username)
return {'success': True}
else:
return {'success': False, 'message': ['Cannot find Indicator']}
else:
return {'success': False, 'message': ['Must be an admin to delete']}
def action_add(indicator_id, action):
"""
Add an action to an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param action: The information about the action.
:type action: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(action['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.add_action(action['action_type'],
action['active'],
action['analyst'],
action['begin_date'],
action['end_date'],
action['performed_date'],
action['reason'],
action['date'])
indicator.save(username=action['analyst'])
return {'success': True, 'object': action}
except ValidationError, e:
return {'success': False, 'message': e}
def action_update(indicator_id, action):
"""
Update an action for an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param action: The information about the action.
:type action: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(action['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.edit_action(action['action_type'],
action['active'],
action['analyst'],
action['begin_date'],
action['end_date'],
action['performed_date'],
action['reason'],
action['date'])
indicator.save(username=action['analyst'])
return {'success': True, 'object': action}
except ValidationError, e:
return {'success': False, 'message': e}
def action_remove(indicator_id, date, analyst):
"""
Remove an action from an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param date: The date of the action to remove.
:type date: datetime.datetime
:param analyst: The user removing the action.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=indicator_id).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.delete_action(date)
indicator.save(username=analyst)
return {'success': True}
except ValidationError, e:
return {'success': False, 'message': e}
def activity_add(indicator_id, activity):
"""
Add activity to an Indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param activity: The activity information.
:type activity: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(activity['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.add_activity(activity['analyst'],
activity['start_date'],
activity['end_date'],
activity['description'],
activity['date'])
indicator.save(username=activity['analyst'])
return {'success': True, 'object': activity,
'id': str(indicator.id)}
except ValidationError, e:
return {'success': False, 'message': e,
'id': str(indicator.id)}
def activity_update(indicator_id, activity):
"""
Update activity for an Indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param activity: The activity information.
:type activity: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(activity['analyst'])
indicator = Indicator.objects(id=indicator_id,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.edit_activity(activity['analyst'],
activity['start_date'],
activity['end_date'],
activity['description'],
activity['date'])
indicator.save(username=activity['analyst'])
return {'success': True, 'object': activity}
except ValidationError, e:
return {'success': False, 'message': e}
def activity_remove(indicator_id, date, analyst):
"""
Remove activity from an Indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param date: The date of the activity to remove.
:type date: datetime.datetime
:param analyst: The user removing this activity.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=indicator_id).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
indicator.delete_activity(date)
indicator.save(username=analyst)
return {'success': True}
except ValidationError, e:
return {'success': False, 'message': e}
def ci_update(indicator_id, ci_type, value, analyst):
"""
Update confidence or impact for an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param ci_type: What we are updating.
:type ci_type: str ("confidence" or "impact")
:param value: The value to set.
:type value: str ("unknown", "benign", "low", "medium", "high")
:param analyst: The user updating this indicator.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=indicator_id).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
if ci_type == "confidence" or ci_type == "impact":
try:
if ci_type == "confidence":
indicator.set_confidence(analyst, value)
else:
indicator.set_impact(analyst, value)
indicator.save(username=analyst)
return {'success': True}
except ValidationError, e:
return {'success': False, "message": e}
else:
return {'success': False, 'message': 'Invalid CI type'}
def create_indicator_and_ip(type_, id_, ip, analyst):
"""
Add indicators for an IP address.
:param type_: The CRITs top-level object we are getting this IP from.
:type type_: class which inherits from
:class:`crits.core.crits_mongoengine.CritsBaseAttributes`
:param id_: The ObjectId of the top-level object to search for.
:type id_: str
:param ip: The IP address to generate an indicator out of.
:type ip: str
:param analyst: The user adding this indicator.
:type analyst: str
:returns: dict with keys:
"success" (boolean),
"message" (str),
"value" (str)
"""
obj_class = class_from_id(type_, id_)
if obj_class:
ip_class = IP.objects(ip=ip).first()
ind_type = "Address - ipv4-addr"
ind_class = Indicator.objects(ind_type=ind_type, value=ip).first()
# setup IP
if ip_class:
ip_class.add_relationship(obj_class,
"Related_To",
analyst=analyst)
else:
ip_class = IP()
ip_class.ip = ip
ip_class.source = obj_class.source
ip_class.save(username=analyst)
ip_class.add_relationship(obj_class,
"Related_To",
analyst=analyst)
# setup Indicator
message = ""
if ind_class:
message = ind_class.add_relationship(obj_class,
"Related_To",
analyst=analyst)
ind_class.add_relationship(ip_class,
"Related_To",
analyst=analyst)
else:
ind_class = Indicator()
ind_class.source = obj_class.source
ind_class.ind_type = ind_type
ind_class.value = ip
ind_class.save(username=analyst)
message = ind_class.add_relationship(obj_class,
"Related_To",
analyst=analyst)
ind_class.add_relationship(ip_class,
"Related_To",
analyst=analyst)
# save
try:
obj_class.save(username=analyst)
ip_class.save(username=analyst)
ind_class.save(username=analyst)
if message['success']:
rels = obj_class.sort_relationships("%s" % analyst, meta=True)
return {'success': True, 'message': rels, 'value': obj_class.id}
else:
return {'success': False, 'message': message['message']}
except Exception, e:
return {'success': False, 'message': e}
else:
return {'success': False,
'message': "Could not find %s to add relationships" % type_}
def create_indicator_from_tlo(tlo_type, tlo, analyst, source_name=None,
tlo_id=None, ind_type=None, value=None,
update_existing=True, add_domain=True):
"""
Create an indicator from a Top-Level Object (TLO).
:param tlo_type: The CRITs type of the parent TLO.
:type tlo_type: str
:param tlo: A CRITs parent TLO class object
:type tlo: class - some CRITs TLO
:param analyst: The user creating this indicator.
:type analyst: str
:param source_name: The source name for the new source instance that
records this indicator being added.
:type source_name: str
:param tlo_id: The ObjectId of the parent TLO.
:type tlo_id: str
:param ind_type: The indicator type, if TLO is not Domain or IP.
:type ind_type: str
:param value: The value of the indicator, if TLO is not Domain or IP.
:type value: str
:param update_existing: If Indicator already exists, update it
:type update_existing: boolean
:param add_domain: If new indicator contains a domain/ip, add a
matching Domain or IP TLO
:type add_domain: boolean
:returns: dict with keys:
"success" (boolean),
"message" (str),
"value" (str),
"indicator" :class:`crits.indicators.indicator.Indicator`
"""
if not tlo:
tlo = class_from_id(tlo_type, tlo_id)
if not tlo:
return {'success': False,
'message': "Could not find source %s" % obj_type}
source = tlo.source
campaign = tlo.campaign
bucket_list = tlo.bucket_list
tickets = tlo.tickets
# If value and ind_type provided, use them instead of defaults
if tlo_type == "Domain":
value = value or tlo.domain
ind_type = ind_type or "URI - Domain Name"
elif tlo_type == "IP":
value = value or tlo.ip
ind_type = ind_type or tlo.ip_type
elif tlo_type == "Indicator":
value = value or tlo.value
ind_type = ind_type or tlo.ind_type
if not value or not ind_type: # if not provided & no default
return {'success': False,
'message': "Indicator value & type must be provided"
"for TLO of type %s" % obj_type}
#check if indicator already exists
if Indicator.objects(ind_type=ind_type,
value=value).first() and not update_existing:
return {'success': False, 'message': "Indicator already exists"}
result = handle_indicator_ind(value, source,
ctype=ind_type,
analyst=analyst,
add_domain=add_domain,
add_relationship=True,
campaign=campaign,
bucket_list=bucket_list,
ticket=tickets)
if result['success']:
ind = Indicator.objects(id=result['objectid']).first()
if ind:
if source_name:
# add source to show when indicator was created/updated
ind.add_source(source=source_name,
method= 'Indicator created/updated ' \
'from %s with ID %s' % (tlo_type, tlo.id),
date=datetime.datetime.now(),
analyst = analyst)
tlo.add_relationship(ind,
"Related_To",
analyst=analyst)
tlo.save(username=analyst)
for rel in tlo.relationships:
if rel.rel_type == "Event":
# Get event object to pass in.
rel_item = Event.objects(id=rel.object_id).first()
if rel_item:
ind.add_relationship(rel_item,
"Related_To",
analyst=analyst)
ind.save(username=analyst)
tlo.reload()
rels = tlo.sort_relationships("%s" % analyst, meta=True)
return {'success': True, 'message': rels,
'value': tlo.id, 'indicator': ind}
else:
return {'success': False, 'message': "Failed to create Indicator"}
else:
return result
def validate_indicator_value(value, ind_type):
"""
Check that a given value is valid for a particular Indicator type.
:param value: The value to be validated
:type value: str
:param ind_type: The indicator type to validate against
:type ind_type: str
:returns: tuple: (Valid value, Error message)
"""
value = value.strip()
domain = ""
# URL
if ind_type == "URI - URL":
if "://" not in value.split('.')[0]:
return ("", "URI - URL must contain protocol "
"prefix (e.g. http://, https://, ftp://) ")
domain_or_ip = urlparse.urlparse(value).hostname
try:
validate_ipv46_address(domain_or_ip)
return (value, "")
except DjangoValidationError:
domain = domain_or_ip
# Email address
if ind_type == "Address - e-mail":
if '@' not in value:
return ("", "Email address must contain an '@'")
domain_or_ip = value.split('@')[-1]
if domain_or_ip[0] == '[' and domain_or_ip[-1] == ']':
try:
validate_ipv46_address(domain_or_ip[1:-1])
return (value, "")
except DjangoValidationError:
return ("", "Email address does not contain a valid IP")
else:
domain = domain_or_ip
# IPs
if "Address - ipv" in ind_type or "cidr" in ind_type:
(ip_address, error) = validate_and_normalize_ip(value, ind_type)
if error:
return ("", error)
else:
return (ip_address, "")
# Domains
if ind_type == "URI - Domain Name" or domain:
(root, domain, error) = get_valid_root_domain(domain or value)
if error:
return ("", error)
else:
return (value, "")
return (value, "")
|
{
"content_hash": "17a5294018b4e836f93a1755fd9d9444",
"timestamp": "",
"source": "github",
"line_count": 1319,
"max_line_length": 122,
"avg_line_length": 39.25549658832449,
"alnum_prop": 0.5615705511993511,
"repo_name": "0x3a/crits",
"id": "a00a599d25e9b3118547c1698a04d199627b9816",
"size": "51778",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "crits/indicators/handlers.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "8694"
},
{
"name": "CSS",
"bytes": "362142"
},
{
"name": "HTML",
"bytes": "460692"
},
{
"name": "JavaScript",
"bytes": "2036364"
},
{
"name": "Python",
"bytes": "1983831"
},
{
"name": "SaltStack",
"bytes": "3165"
},
{
"name": "Shell",
"bytes": "10825"
}
],
"symlink_target": ""
}
|
"""
Michael duPont
Primary routing controller
"""
import functools
import os
from json import load
from flask import jsonify
from TutorMatch import app
import TutorMatch.dbconn as db
##--- Custom Decorators ---##
def output_str(func):
"""This decorator adds the 'raw' boolean keyword to functions.
When applied, the function's output will be converted to a JSON
string unless the function call includes raw=True
"""
#This decorator allows us to use our custom decs alongside Flask's routing decs
@functools.wraps(func)
def wrapper(*args, raw: bool=False, **kwargs):
"""jsonify the output of a function if not 'raw'"""
ret = func(*args, **kwargs)
if not raw:
ret = jsonify(ret)
return ret
return wrapper
##--- Query Vars ---##
QUERIES = load(open(os.path.dirname(os.path.abspath(__file__))+'/queries.json'))
USER_COLS = 'u.id, u.name, u.utype AS "type"'
SKILL_COLS = 's.name, s.display_name'
COUNT = 'count(*)'
##--- Helper Functions ---##
def skills_for_user(user_id: int) -> [str]:
"""Returns a list of skills names for a given user_id"""
query = QUERIES['skills']['for-user'].format('s.name')
return [r[0] for r in db.query(query, (user_id,))]
def get_skills_for_user_dict(udict: dict, remove_id: bool=False):
"""Adds skills to a list of user dicts"""
for i, user in enumerate(udict):
udict[i]['skills'] = list(skills_for_user(user['id']))
if remove_id:
del udict[i]['id']
return udict
def get_users(subquery: str, args: tuple=None, ext: str='', one: bool=False) -> [dict]:
"""Returns a full list of users utilizing a given subquery key and value args"""
query = QUERIES['users'][subquery].format(USER_COLS) + ext
base = db.query(query, args, return_type='dict')
ret = get_skills_for_user_dict(base)
return ret[0] if one and ret else ret
def get_count(table: str, subquery: str, args: tuple=None) -> int:
"""Returns the number of elements in a given table matching a given subquery key"""
query = QUERIES[table][subquery].format(COUNT)
return db.query(query, args, one=True)[0]
##--- Routing ---##
@app.route('/')
def home() -> str:
return "Hello"
# Users
@app.route('/users')
@output_str
def users() -> [dict]:
"""Returns details on all users"""
return get_users('base')
@app.route('/users/count')
@output_str
def user_count() -> int:
"""Returns the total number of users"""
return get_count('users', 'base')
@app.route('/users/<int:uid>')
@output_str
def user_by_id(uid: int) -> dict:
"""Returns a single user by id"""
return get_users('base', (uid,), ext=' WHERE u.id=?', one=True)
@app.route('/users/<string:utype>')
@output_str
def users_by_type(utype: str) -> [dict]:
"""Returns details on users matching a given type"""
return get_users('type', (utype.lower(),))
@app.route('/users/<string:utype>/count')
@output_str
def user_count_by_type(utype: str) -> int:
"""Returns the total number of users that match a given type"""
return get_count('users', 'type', (utype.lower(),))
@app.route('/users/with/<string:skill>')
@output_str
def users_by_skill(skill: str) -> [dict]:
"""Returns details on users matching a given skill"""
return get_users('skill', (skill.lower(),))
@app.route('/users/with/<string:skill>/count')
@output_str
def user_count_by_skill(skill: str) -> int:
"""Returns the total number of users with a given skill"""
return get_count('users', 'skill', (skill.lower(),))
@app.route('/users/<string:utype>/with/<string:skill>')
@output_str
def users_by_type_and_skill(utype: str, skill: str):
"""Returns details on user that match both a given type and skill"""
return get_users('type-skill', (utype.lower(), skill.lower(),))
@app.route('/users/<string:utype>/with/<string:skill>/count')
@output_str
def user_count_by_type_and_skill(utype: str, skill: str) -> int:
"""Returns the total number of users that match both a given type and skill"""
return get_count('users', 'type-skill', (utype.lower(), skill.lower(),))
# Skills
@app.route('/skills')
@output_str
def skills() -> [dict]:
"""Returns a list of dicts containing all skill details"""
query = QUERIES['skills']['base'].format(SKILL_COLS)
return db.query(query, return_type='dict')
@app.route('/skills/count')
@output_str
def skill_count() -> int:
"""Returns the total number of skills"""
return get_count('skills', 'base')
@app.route('/skills/<int:sid>')
@output_str
def skill_by_id(sid: int) -> dict:
"""Returns a single skill by id"""
query = QUERIES['skills']['base'].format(SKILL_COLS) + ' WHERE s.id=?'
return db.query(query, (sid,), return_type='dict', one=True) or {}
@app.route('/skills/<string:name>')
@output_str
def skill_by_name(name: str):
"""Returns a single skill by name"""
query = QUERIES['skills']['base'].format(SKILL_COLS) + ' WHERE s.name=?'
return db.query(query, (name,), return_type='dict', one=True) or {}
|
{
"content_hash": "92803159faaa4e12bae7a3d9b3c09f19",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 87,
"avg_line_length": 32.58441558441559,
"alnum_prop": 0.6478676763650857,
"repo_name": "flyinactor91/Tutor-Match",
"id": "f9cf7e9fa5dee564a9ce2e58b1da42ffdf175842",
"size": "5018",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TutorMatch/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7755"
}
],
"symlink_target": ""
}
|
import numpy as np
from sklearn import preprocessing
from datetime import datetime
def _corr(C):
R=np.empty_like(C)
#compute correlation from covariance
for i,ci in enumerate(C):
for j,cij in enumerate(ci):
R[i,j] = cij / np.sqrt(C[i,i] * C[j,j])
return R
def pcaFun(x, whiten=False,e=0, type='cov', method='svd',
center=True,normalize=False):
# x : n x m numpy.array of n points and m dimensions
# whiten : boolean parameter - whiten data or not
# e : normalization parameter for whitening data
n,d = x.shape
oX=x
# normalize
if normalize:
x=sklearn.normalize(x,axis=0)
# center data
if center:
avg=np.mean(x,axis=0)
x=x-avg
if method=='eig':
# compute covariance matrix
if type=='cov':
C=x.T.dot(x)
C /= n
elif type=='corr':
#C=np.corrcoef(x,rowvar=0, bias=1)
C=x.T.dot(x)
C /= n
C=_corr(C)
else:
raise Exception('Incompatible argument value \
\'type='+str(type)+'\'')
# compute eig
eigVals,eigVect=np.linalg.eig(C)
#sort eigenthings
eigValOrder=eigVals.argsort()[::-1] #descending eigen indeces
sortedEigVect=np.zeros(eigVect.shape)
sortedEigVal=np.zeros(eigVals.shape)
for i,j in enumerate(eigValOrder):
sortedEigVect[:,i]=eigVect[:,j]
sortedEigVal[i]=eigVals[j]
comps = sortedEigVect
eigs = sortedEigVal
elif method=='svd':
U,S,V = np.linalg.svd(x)
comps=V.T
eigs= (S**2) / n
else:
raise Exception('Incompatible argument value \
\'method='+str(method)+'\'')
# project data
projX=x.dot(comps)
if whiten is True:
whiten_vect = np.sqrt((eigs + e))
projX = projX / whiten_vect
return projX, comps, eigs
# function graddesc(xyData,q,[steps])
# purpose: performing quantum clustering in and moving the
# data points down the potential gradient
# input: xyData - the data vectors
# q=a parameter for the parsen window variance (q=1/(2*sigma^2))
# sigma=parameter for the parsen window variance (choose q or sigma)
# steps=number of gradient descent steps (default=50)
# eta=gradient descent step size
# output: D=location of data o=point after GD
def graddesc(xyData,**kwargs):
"""
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Argument treatment
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
"""
argKeys = kwargs.keys()
if 'steps' in argKeys:
steps = kwargs['steps']
else:
steps = 50
if 'q' in argKeys:
q = kwargs['q']
elif 'sigma' in argKeys:
sigma = kwargs['sigma']
q = 1 / (2 * pow(sigma,2))
else:
sigma=0.1
q = 1 / (2 * pow(sigma,2))
if 'r' in argKeys:
D = kwargs['r']
else:
D = xyData
if 'eta' in argKeys:
eta = kwargs['eta']
else:
eta = 0.1
if 'all_square' in argKeys and kwargs['all_square'] is not False:
if xyData.shape[1]>2:
raise Exception('all_square should not be used in data > 2 dims')
points=kwargs['all_square']
totalPoints=pow(kwargs['all_square'],2)
a=np.linspace(-1,1,points)
D=[(x,y) for x in a for y in a]
D=np.array(D)
else:
D=xyData
if 'return_eta' in argKeys:
return_eta=kwargs['return_eta']
else:
return_eta=False
if 'timelapse' in argKeys:
timelapse=kwargs['timelapse']
if timelapse:
tD=list()
timelapse_count=0
if 'timelapse_list' in argKeys:
timelapse_list=kwargs['timelapse_list']
elif 'timelapse_percent' in argKeys:
timelapse_percent=kwargs['timelapse_percent']
list_inc=int(steps/(steps*timelapse_percent))
if list_inc == 0:
list_inc = 1
timelapse_list=range(steps)[::list_inc]
else:
timelapse_percent=0.25
list_inc=int(steps/(steps*timelapse_percent))
if list_inc == 0:
list_inc = 1
timelapse_list=range(steps)[::list_inc]
timelapse_list=range(steps)[::int(steps*timelapse_percent)]
else:
timelapse=False
if 'timeit' in argKeys:
timeit=kwargs['timeit']
#timings=np.zeros(steps+1) #+1 for the total time
timings=datetime.now()
else:
timeit=False
# add more states to timelapse list
if timelapse:
if timelapse_count in timelapse_list:
tD.append(D)
timelapse_count += 1
"""
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Algorithm starts here
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
"""
# first run
V,P,E,dV = qc(xyData,q=q,r=D)
for j in range(4):
for i in range(steps/4):
# normalize potential gradient
dV = preprocessing.normalize(dV)
# gradient descent
D = D - eta*dV
# add more states to timelapse list
if timelapse:
if timelapse_count in timelapse_list:
tD.append(D)
timelapse_count += 1
"""
if timeit:
start_time=datetime.now()"""
# perform Quantum Clustering
V,P,E,dV = qc(xyData,q=q,r=D)
"""
if timeit:
timeings[i*4]=(datetime.now() - start).total_seconds()"""
eta*=0.5
"""
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Algorithm ends here
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
"""
if timeit:
timings=(datetime.now() - timings).total_seconds()
if timelapse:
tD.append(D)
D=tD
returnList=[D,V,E]
if return_eta:
returnList.append(eta)
if timeit:
returnList.append(timings)
#returnList.append(timelapse_list)
return returnList
# function qc
# purpose: performing quantum clustering in n dimensions
# input:
# ri - a vector of points in n dimensions
# q - the factor q which determines the clustering width
# r - the vector of points to calculate the potential for. equals ri if not specified
# output:
# V - the potential
# P - the wave function
# E - the energy
# dV - the gradient of V
# example: [V,P,E,dV] = qc ([1,1;1,3;3,3],5,[0.5,1,1.5]);
# see also: qc2d
def qc(ri,**kwargs):
argKeys=kwargs.keys()
if 'q' in argKeys:
q=kwargs['q']
elif 'sigma' in argKeys:
sigma=kwargs['sigma']
q = 1 / (2 * pow(sigma,2))
else:
sigma=0.1
q = 1 / (2 * pow(sigma,2))
if 'r' in argKeys:
r=kwargs['r']
else:
r=ri
pointsNum,dims = ri.shape
calculatedNum = r.shape[0]
# prepare the potential
V=np.zeros(calculatedNum)
dP2=np.zeros(calculatedNum)
# prepare P
P=np.zeros(calculatedNum)
singledV1=np.zeros((calculatedNum,dims))
singledV2=np.zeros((calculatedNum,dims))
dV1=np.zeros((calculatedNum,dims))
dV2=np.zeros((calculatedNum,dims))
dV=np.zeros((calculatedNum,dims))
# prevent division by zero
# calculate V
# run over all the points and calculate for each the P and dP2
for point in range(calculatedNum):
# compute ||x-xi||^2
# axis=1 will sum rows instead of columns
D2 = np.sum(pow(r[point]-ri,2),axis=1)
# compute gaussian
singlePoint = np.exp(-q*D2)
# compute Laplacian of gaussian = ||x-xi||^2 * exp(...)
singleLaplace = D2 * singlePoint
#compute gradient components
aux = r[point] - ri
for d in range(dims):
singledV1[:,d] = aux[:,d] * singleLaplace
singledV2[:,d] = aux[:,d] * singlePoint
P[point] = np.sum(singlePoint)
dP2[point] = np.sum(singleLaplace)
dV1[point] = np.sum(singledV1,axis=0)
dV2[point] = np.sum(singledV2,axis=0)
# if there are points with 0 probability,
# assigned them the lowest probability of any point
P=np.where(P==0,np.min(np.extract((P!=0),P)),P)
# compute ground state energy
V = -dims/2 + q*dP2 / P
E = -min(V)
# compute potential on points
V += E
# compute gradient of V
for d in range(dims):
dV[:,d] = -q * dV1[:,d] + (V-E+(dims+2)/2) * dV2[:,d]
return V,P,E,dV
# clust=fineCluster(xyData,minD) cluster xyData points when closer than minD
# output: clust=vector the cluter index that is asigned to each data point
# (it's cluster serial #)
def fineCluster(xyData,minD,potential=None,timeit=False):
if potential is not None:
usePotential=True
else:
usePotential=False
n = xyData.shape[0]
clust = np.zeros(n)
if timeit:
timings=datetime.now()
if usePotential:
# index of points sorted by potential
sortedUnclust=potential.argsort()
# index of unclestered point with lowest potential
i=sortedUnclust[0]
else:
i=0
# fist cluster index is 1
clustInd=1
while np.min(clust)==0:
x=xyData[i]
# euclidean distance from ith point to others
D = np.sum(pow(xyData-x,2),axis=1)
D = pow(D,0.5)
clust = np.where(D<minD,clustInd,clust)
# index of non clustered points
# unclust=[x for x in clust if x == 0]
clusted= clust.nonzero()[0]
if usePotential:
# sorted index of non clustered points
sortedUnclust=[x for x in sortedUnclust if x not in clusted]
if len(sortedUnclust) == 0:
break
#index of unclustered point with lowest potential
i=sortedUnclust[0]
else:
#index of first unclustered datapoint
i=np.argmin(clust)
clustInd += 1
if timeit:
timings=(datetime.now()-timings).total_seconds()
returnList=[clust]
if timeit:
return clust,timings
return clust
|
{
"content_hash": "eb203333c666f974a9142dae4116da54",
"timestamp": "",
"source": "github",
"line_count": 390,
"max_line_length": 91,
"avg_line_length": 22.79230769230769,
"alnum_prop": 0.6270671616604793,
"repo_name": "Chiroptera/QCThesis",
"id": "7ecc13d3eec24f28aa8c95e4724d7b62446f2af5",
"size": "8889",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "MyML/cluster/Horn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "8821"
},
{
"name": "Cuda",
"bytes": "11299"
},
{
"name": "Makefile",
"bytes": "7676"
},
{
"name": "Matlab",
"bytes": "87391"
},
{
"name": "Python",
"bytes": "426550"
}
],
"symlink_target": ""
}
|
from scipy.integrate import cumtrapz
from scipy.interpolate import griddata
from scipy.stats import lognorm
from scipy.optimize import curve_fit
import numpy as np
def fitLogNormParticleDistribution(D10, D50, D90):
'''
Fitting function to get the mu and sigma -parameters of the Log-normal
distribution from cumulative particle distribution values D10, D50 and D90.
The DXX are values that the cumulative particle distribution function gets
at those points. For example D10 means that 10% of particle are smaller
than this size.
d10 = 7.3
d50 = 12.0
d90 = 18.3
(mu, sigma) = fitLogNormParticleDistribution(d10, d50, d90)
print(mu, sigma)
'''
mu = np.log(D50) # fixed by definition
def errfunc(mu_, sig_):
N = lognorm(sig_, scale=np.exp(mu_))
# minimize ze difference between D10 and D90 to cumulative function
# Weight the D10 more by 2*
zero = 2 * np.abs(0.1 - N.cdf(D10)) + np.abs(0.9 - N.cdf(D90))
return(zero)
sigma, pcov = curve_fit(errfunc, [mu], [0])
print(sigma)
return(mu, sigma[0])
def particlesInVolumeLogNormWeight(weight_frac, density_p, density_host,
mu, sigma, particle_diameters):
'''
Function that calculates particle densities in a volume element for
given weight fraction.
Presumes LogNormal particle distribution
'''
print('Weight fraction is %.1f %%' % (weight_frac * 100))
w = weight_frac
vol_frac = w * density_host / density_p / (1 + w * (
density_host / density_p - 1))
print('Volume fraction is %.1f %%' % (vol_frac * 100))
return(particlesInVolumeLogNorm(vol_frac, mu, sigma, particle_diameters))
def particlesInVolumeLogNormWeightTotal(weight_frac, density_p, density_host,
mu, sigma, particle_diameters):
'''
IF ONLY 1 PARTICLE TYPE IN SILICONE!
Returns the total number of particles in volume element.
Presumes LogNormal particle distribution
'''
print('Weight fraction is %.1f %%' % (weight_frac * 100))
w = weight_frac
vol_frac = w * density_host / density_p / (1 + w * (
density_host / density_p - 1))
print('Volume fraction is %.1f %%' % (vol_frac * 100))
return(vol_frac, particlesInVolumeLogNormTotal(vol_frac, mu, sigma,
particle_diameters))
def particlesInVolumeLogNormWeightTotal2(weight_frac1, weight_frac2, dens_p1, dens_p2, dens_host, mu, sigma, particle_diameters):
'''
IF 2 PARTICLE TYPES IN SILICONE!
Returns the total number of particles in volume element.
Presumes LogNormal particle distribution
'''
print('Weight fraction is %.1f %%' % (weight_frac1 * 100))
w_p1 = weight_frac1
w_p2 = weight_frac2
w_s = 1.0 - w_p1 - w_p2
vol_frac = (dens_host * dens_p2 * w_p1) / (w_s * dens_p1 * dens_p2 + w_p1 *dens_host * dens_p2 + w_p2 * dens_host * dens_p1)
print('Volume fraction is %.1f %%' % (vol_frac * 100))
return(vol_frac, particlesInVolumeLogNormTotal(vol_frac, mu, sigma,
particle_diameters))
def particlesInVolumeLogNorm(vol_frac, mu, sigma, particle_diameters):
'''
Function that calculates particle densities in a volume element.
The particles are diameters are log-normally distributed (sigma, mu)
and they have a given volume fraction.
'''
D = particle_diameters
# Calculate particle density(particles per um ^ 3)
N = lognorm(sigma, scale=np.exp(mu))
# Weight factors of each particle size
pdf = N.pdf(D)
# Volume of particle having radius R[m ^ 3]
Vsph = 4.0 / 3.0 * np.pi * (D / 2.0) ** 3.0
# Particle volumes multiplied with weight factors = > volume distribution
WV = pdf * Vsph
# Total volume of the volume distribution
Vtot = np.trapz(WV, D)
# Number of particles in um ^ 3
n_part = vol_frac / Vtot
print('Number of particles in cubic micrometer = %.18f' % n_part)
# Check, should give the volume fraction in %
print("Volume fraction was: %.1f %%" %
(np.trapz(n_part * pdf * Vsph, D) * 100))
bins = pdf * (D[1] - D[0])
# print(bins.sum())
return(n_part * bins)
def particlesInVolumeLogNormTotal(vol_frac, mu, sigma, particle_diameters):
n = particlesInVolumeLogNorm(vol_frac, mu, sigma, particle_diameters)
print("n=", n)
print("n.sum()=", n.sum())
return(n.sum())
def rayleighScatteringCrossSection(wavelengths,
particle_refractive_index,
particle_diameter):
d = particle_diameter
n = particle_refractive_index
l = wavelengths
cross = ((2.0 * (np.pi ** 5.0) * d ** 6.0) / (3 * l ** 4.0) *
(((n ** 2.0) - 1.0) / ((n ** 2.0) + 2.0)) ** 2.0)
return(cross)
def rayleighScatteringPhaseFunction(cosTheta):
return(3.0 / 4.0 * (1 + cosTheta ** 2))
def henyeyGreensteinPhaseFunction(cosTheta, asymmetry_factor):
g = asymmetry_factor
p = 0.5 * (1.0 - g ** 2) / (1 + g ** 2 - 2 * g * cosTheta) ** (3.0 / 2.0)
return(p)
def cumulativeDistribution(phaseFunction, cosTheta):
return(-0.5 * cumtrapz(phaseFunction, cosTheta, initial=0))
def cumulativeDistributionTheta(phaseFunction, theta):
return(cumtrapz(phaseFunction * np.sin(theta), theta, initial=0))
def invertNiceFunction(x, y, yi):
new_y = griddata(y, x, yi)
if np.isnan(new_y[0]):
new_y[0] = x[0]
if np.isnan(new_y[-1]):
new_y[-1] = x[-1]
return(new_y)
'''
th = np.arange(0, 180, 0.5)
th = np.radians(th)
rv = np.linspace(0, 1, 1000)
phase = rayleighScatteringPhaseFunction(np.cos(th))
phase = henyeyGreensteinPhaseFunction(np.cos(th), -0.6)
cumul = cumulativeDistribution(phase, np.cos(th))
invers = invertNiceFunction(np.cos(th), cumul, rv)
plt.plot(rv, np.degrees(np.arccos(invers)))
plt.show()
print(np.degrees(np.arccos(invers)))
'''
'''
if __name__ == '__main__':
particlesInVolumeLogNormWeightTotal(weight_frac=0.24,
density_p=5.0,
density_host=1.1,
mu=1, sigma=1,
particle_diameters=np.array([1, 2]))
particlesInVolumeLogNormWeight(weight_frac=0.24,
density_p=5.0,
density_host=1.1,
mu=1, sigma=1,
particle_diameters=np.array([1, 2]))
'''
|
{
"content_hash": "2cd8c73d04b53bf0e6c7792fba03bcc6",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 129,
"avg_line_length": 33.71212121212121,
"alnum_prop": 0.5941573033707865,
"repo_name": "ollitapa/MMP-TracerApi",
"id": "ab83042019f17b51c429a84a2a525cf15584ad2d",
"size": "7283",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tests/optimization/scatteringTools.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "210900"
}
],
"symlink_target": ""
}
|
__docformat__ = "restructuredtext"
# Let users know if they're missing any of our hard dependencies
hard_dependencies = ("numpy", "pytz", "dateutil")
missing_dependencies = []
for dependency in hard_dependencies:
try:
__import__(dependency)
except ImportError as e:
missing_dependencies.append(f"{dependency}: {e}")
if missing_dependencies:
raise ImportError(
"Unable to import required dependencies:\n" + "\n".join(missing_dependencies)
)
del hard_dependencies, dependency, missing_dependencies
# numpy compat
from pandas.compat import is_numpy_dev as _is_numpy_dev
try:
from pandas._libs import hashtable as _hashtable, lib as _lib, tslib as _tslib
except ImportError as e: # pragma: no cover
module = e.name
raise ImportError(
f"C extension: {module} not built. If you want to import "
"pandas from the source directory, you may need to run "
"'python setup.py build_ext --force' to build the C extensions first."
) from e
from pandas._config import (
get_option,
set_option,
reset_option,
describe_option,
option_context,
options,
)
# let init-time option registration happen
import pandas.core.config_init
from pandas.core.api import (
# dtype
Int8Dtype,
Int16Dtype,
Int32Dtype,
Int64Dtype,
UInt8Dtype,
UInt16Dtype,
UInt32Dtype,
UInt64Dtype,
Float32Dtype,
Float64Dtype,
CategoricalDtype,
PeriodDtype,
IntervalDtype,
DatetimeTZDtype,
StringDtype,
BooleanDtype,
# missing
NA,
isna,
isnull,
notna,
notnull,
# indexes
Index,
CategoricalIndex,
RangeIndex,
NumericIndex,
MultiIndex,
IntervalIndex,
TimedeltaIndex,
DatetimeIndex,
PeriodIndex,
IndexSlice,
# tseries
NaT,
Period,
period_range,
Timedelta,
timedelta_range,
Timestamp,
date_range,
bdate_range,
Interval,
interval_range,
DateOffset,
# conversion
to_numeric,
to_datetime,
to_timedelta,
# misc
Flags,
Grouper,
factorize,
unique,
value_counts,
NamedAgg,
array,
Categorical,
set_eng_float_format,
Series,
DataFrame,
)
from pandas.core.arrays.sparse import SparseDtype
from pandas.tseries.api import infer_freq
from pandas.tseries import offsets
from pandas.core.computation.api import eval
from pandas.core.reshape.api import (
concat,
lreshape,
melt,
wide_to_long,
merge,
merge_asof,
merge_ordered,
crosstab,
pivot,
pivot_table,
get_dummies,
cut,
qcut,
)
from pandas import api, arrays, errors, io, plotting, testing, tseries
from pandas.util._print_versions import show_versions
from pandas.io.api import (
# excel
ExcelFile,
ExcelWriter,
read_excel,
# parsers
read_csv,
read_fwf,
read_table,
# pickle
read_pickle,
to_pickle,
# pytables
HDFStore,
read_hdf,
# sql
read_sql,
read_sql_query,
read_sql_table,
# misc
read_clipboard,
read_parquet,
read_orc,
read_feather,
read_gbq,
read_html,
read_xml,
read_json,
read_stata,
read_sas,
read_spss,
)
from pandas.io.json import _json_normalize as json_normalize
from pandas.util._tester import test
# use the closest tagged version if possible
from pandas._version import get_versions
v = get_versions()
__version__ = v.get("closest-tag", v["version"])
__git_version__ = v.get("full-revisionid")
del get_versions, v
# GH 27101
__deprecated_num_index_names = ["Float64Index", "Int64Index", "UInt64Index"]
def __dir__():
# GH43028
# Int64Index etc. are deprecated, but we still want them to be available in the dir.
# Remove in Pandas 2.0, when we remove Int64Index etc. from the code base.
return list(globals().keys()) + __deprecated_num_index_names
def __getattr__(name):
import warnings
if name in __deprecated_num_index_names:
warnings.warn(
f"pandas.{name} is deprecated "
"and will be removed from pandas in a future version. "
"Use pandas.NumericIndex with the appropriate dtype instead.",
FutureWarning,
stacklevel=2,
)
from pandas.core.api import Float64Index, Int64Index, UInt64Index
return {
"Float64Index": Float64Index,
"Int64Index": Int64Index,
"UInt64Index": UInt64Index,
}[name]
elif name == "datetime":
warnings.warn(
"The pandas.datetime class is deprecated "
"and will be removed from pandas in a future version. "
"Import from datetime module instead.",
FutureWarning,
stacklevel=2,
)
from datetime import datetime as dt
return dt
elif name == "np":
warnings.warn(
"The pandas.np module is deprecated "
"and will be removed from pandas in a future version. "
"Import numpy directly instead.",
FutureWarning,
stacklevel=2,
)
import numpy as np
return np
elif name in {"SparseSeries", "SparseDataFrame"}:
warnings.warn(
f"The {name} class is removed from pandas. Accessing it from "
"the top-level namespace will also be removed in the next version.",
FutureWarning,
stacklevel=2,
)
return type(name, (), {})
elif name == "SparseArray":
warnings.warn(
"The pandas.SparseArray class is deprecated "
"and will be removed from pandas in a future version. "
"Use pandas.arrays.SparseArray instead.",
FutureWarning,
stacklevel=2,
)
from pandas.core.arrays.sparse import SparseArray as _SparseArray
return _SparseArray
raise AttributeError(f"module 'pandas' has no attribute '{name}'")
# module level doc-string
__doc__ = """
pandas - a powerful data analysis and manipulation library for Python
=====================================================================
**pandas** is a Python package providing fast, flexible, and expressive data
structures designed to make working with "relational" or "labeled" data both
easy and intuitive. It aims to be the fundamental high-level building block for
doing practical, **real world** data analysis in Python. Additionally, it has
the broader goal of becoming **the most powerful and flexible open source data
analysis / manipulation tool available in any language**. It is already well on
its way toward this goal.
Main Features
-------------
Here are just a few of the things that pandas does well:
- Easy handling of missing data in floating point as well as non-floating
point data.
- Size mutability: columns can be inserted and deleted from DataFrame and
higher dimensional objects
- Automatic and explicit data alignment: objects can be explicitly aligned
to a set of labels, or the user can simply ignore the labels and let
`Series`, `DataFrame`, etc. automatically align the data for you in
computations.
- Powerful, flexible group by functionality to perform split-apply-combine
operations on data sets, for both aggregating and transforming data.
- Make it easy to convert ragged, differently-indexed data in other Python
and NumPy data structures into DataFrame objects.
- Intelligent label-based slicing, fancy indexing, and subsetting of large
data sets.
- Intuitive merging and joining data sets.
- Flexible reshaping and pivoting of data sets.
- Hierarchical labeling of axes (possible to have multiple labels per tick).
- Robust IO tools for loading data from flat files (CSV and delimited),
Excel files, databases, and saving/loading data from the ultrafast HDF5
format.
- Time series-specific functionality: date range generation and frequency
conversion, moving window statistics, date shifting and lagging.
"""
# Use __all__ to let type checkers know what is part of the public API.
# Pandas is not (yet) a py.typed library: the public API is determined
# based on the documentation.
__all__ = [
"BooleanDtype",
"Categorical",
"CategoricalDtype",
"CategoricalIndex",
"DataFrame",
"DateOffset",
"DatetimeIndex",
"DatetimeTZDtype",
"ExcelFile",
"ExcelWriter",
"Flags",
"Float32Dtype",
"Float64Dtype",
"Grouper",
"HDFStore",
"Index",
"IndexSlice",
"Int16Dtype",
"Int32Dtype",
"Int64Dtype",
"Int8Dtype",
"Interval",
"IntervalDtype",
"IntervalIndex",
"MultiIndex",
"NA",
"NaT",
"NamedAgg",
"NumericIndex",
"Period",
"PeriodDtype",
"PeriodIndex",
"RangeIndex",
"Series",
"SparseDtype",
"StringDtype",
"Timedelta",
"TimedeltaIndex",
"Timestamp",
"UInt16Dtype",
"UInt32Dtype",
"UInt64Dtype",
"UInt8Dtype",
"api",
"array",
"arrays",
"bdate_range",
"concat",
"crosstab",
"cut",
"date_range",
"describe_option",
"errors",
"eval",
"factorize",
"get_dummies",
"get_option",
"infer_freq",
"interval_range",
"io",
"isna",
"isnull",
"json_normalize",
"lreshape",
"melt",
"merge",
"merge_asof",
"merge_ordered",
"notna",
"notnull",
"offsets",
"option_context",
"options",
"period_range",
"pivot",
"pivot_table",
"plotting",
"qcut",
"read_clipboard",
"read_csv",
"read_excel",
"read_feather",
"read_fwf",
"read_gbq",
"read_hdf",
"read_html",
"read_json",
"read_orc",
"read_parquet",
"read_pickle",
"read_sas",
"read_spss",
"read_sql",
"read_sql_query",
"read_sql_table",
"read_stata",
"read_table",
"read_xml",
"reset_option",
"set_eng_float_format",
"set_option",
"show_versions",
"test",
"testing",
"timedelta_range",
"to_datetime",
"to_numeric",
"to_pickle",
"to_timedelta",
"tseries",
"unique",
"value_counts",
"wide_to_long",
]
|
{
"content_hash": "ee1a2592bb7dcc80b46845b0ddc56d51",
"timestamp": "",
"source": "github",
"line_count": 419,
"max_line_length": 88,
"avg_line_length": 24.35799522673031,
"alnum_prop": 0.6286498138349991,
"repo_name": "rs2/pandas",
"id": "9505d0481ee197b190c5c911d9c04c6913bc6755",
"size": "10222",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pandas/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "127"
},
{
"name": "C",
"bytes": "360253"
},
{
"name": "CSS",
"bytes": "1438"
},
{
"name": "Cython",
"bytes": "1081551"
},
{
"name": "Dockerfile",
"bytes": "1690"
},
{
"name": "HTML",
"bytes": "456275"
},
{
"name": "Makefile",
"bytes": "507"
},
{
"name": "Python",
"bytes": "17393243"
},
{
"name": "Shell",
"bytes": "10872"
},
{
"name": "Smarty",
"bytes": "7820"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
}
|
"""Data analysus
:copyright: Copyright (c) 2018-2020 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
from pykern import pkcollections
from pykern import pkconfig
from pykern.pkcollections import PKDict
from pykern.pkdebug import pkdc, pkdlog, pkdp
import numpy
import scipy
import sympy
import sirepo.feature_config
import sirepo.template
def get_fft(t_vals, y_vals):
import scipy.fftpack
import scipy.signal
# fft takes the y data only and assumes it corresponds to equally-spaced x values.
fft_out = scipy.fftpack.fft(y_vals)
num_samples = len(y_vals)
half_num_samples = num_samples // 2
# should all be the same - this will normalize the frequencies
sample_period = abs(t_vals[1] - t_vals[0])
# the first half of the fft data (taking abs() folds in the imaginary part)
y = 2.0 / num_samples * numpy.abs(fft_out[0:half_num_samples])
# get the frequencies found
# fftfreq just generates an array of equally-spaced values that represent the x-axis
# of the fft of data of a given length. It includes negative values
freqs = scipy.fftpack.fftfreq(len(fft_out), d=sample_period) # / sample_period
w = 2.0 * numpy.pi * freqs[0:half_num_samples]
coefs = (2.0 / num_samples) * numpy.abs(fft_out[0:half_num_samples])
peaks, props = scipy.signal.find_peaks(coefs)
found_freqs = [v for v in zip(peaks, numpy.around(w[peaks], 3))]
bin_spread = 10
min_bin = max(0, peaks[0] - bin_spread) if len(peaks) > 0 else 0
max_bin = (
min(half_num_samples, peaks[-1] + bin_spread)
if len(peaks) > 0
else half_num_samples
)
yy = 2.0 / num_samples * numpy.abs(fft_out[min_bin:max_bin])
max_yy = numpy.max(yy)
yy_norm = yy / (max_yy if max_yy != 0 else 1)
ww = 2.0 * numpy.pi * freqs[min_bin:max_bin]
max_y = numpy.max(y)
y_norm = y / (max_y if max_y != 0 else 1)
return w.tolist(), y_norm.tolist()
def fit_to_equation(x, y, equation, var, params):
sym_curve = sympy.sympify(equation)
sym_str = f"{var} {' '.join(params)}"
syms = sympy.symbols(sym_str)
sym_curve_l = sympy.lambdify(syms, sym_curve, "numpy")
p_vals, pcov = scipy.optimize.curve_fit(sym_curve_l, x, y, maxfev=500000)
sigma = numpy.sqrt(numpy.diagonal(pcov))
p_subs = []
p_subs_min = []
p_subs_max = []
# exclude the symbol of the variable when subbing
for sidx, p in enumerate(p_vals, 1):
sig = sigma[sidx - 1]
p_min = p - 2 * sig
p_max = p + 2 * sig
s = syms[sidx]
p_subs.append((s, p))
p_subs_min.append((s, p_min))
p_subs_max.append((s, p_max))
y_fit = sym_curve.subs(p_subs)
y_fit_min = sym_curve.subs(p_subs_min)
y_fit_max = sym_curve.subs(p_subs_max)
y_fit_l = sympy.lambdify(var, y_fit, "numpy")
y_fit_min_l = sympy.lambdify(var, y_fit_min, "numpy")
y_fit_max_l = sympy.lambdify(var, y_fit_max, "numpy")
x_uniform = numpy.linspace(numpy.min(x), numpy.max(x), 100)
return (
x_uniform,
y_fit_l(x_uniform),
y_fit_min_l(x_uniform),
y_fit_max_l(x_uniform),
p_vals,
sigma,
)
def _init():
pass
_init()
|
{
"content_hash": "de83ec1768173cffb40422a373fed210",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 88,
"avg_line_length": 30.685185185185187,
"alnum_prop": 0.6270368135184068,
"repo_name": "radiasoft/sirepo",
"id": "0c9d684696f0b22121240a906fcfa913a09314ca",
"size": "3338",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sirepo/analysis/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "152"
},
{
"name": "CSS",
"bytes": "65716"
},
{
"name": "HTML",
"bytes": "144600"
},
{
"name": "JavaScript",
"bytes": "3855752"
},
{
"name": "Jinja",
"bytes": "190763"
},
{
"name": "Jupyter Notebook",
"bytes": "1262"
},
{
"name": "Opal",
"bytes": "61806"
},
{
"name": "Perl",
"bytes": "31089"
},
{
"name": "Python",
"bytes": "3022923"
},
{
"name": "SCSS",
"bytes": "29855"
},
{
"name": "Shell",
"bytes": "21259"
}
],
"symlink_target": ""
}
|
'''@file dbgru.py
contains de DBGRU class'''
import tensorflow as tf
import model
from nabu.neuralnetworks.components import layer
class LeakyDBGRU(model.Model):
'''A deep bidirectional GRU classifier with memory leakage'''
def _get_outputs(self, inputs, input_seq_length, is_training):
'''
Create the variables and do the forward computation
Args:
inputs: the inputs to the neural network, this is a list of
[batch_size x time x ...] tensors
input_seq_length: The sequence lengths of the input utterances, this
is a [batch_size] vector
is_training: whether or not the network is in training mode
Returns:
- output, which is a [batch_size x time x ...] tensors
'''
#the bgru layer
num_units = int(self.conf['num_units'])
if 'activation_fn' in self.conf:
if self.conf['activation_fn'] == 'tanh':
activation_fn = tf.nn.tanh
elif self.conf['activation_fn'] == 'relu':
activation_fn = tf.nn.relu
else:
raise Exception('Undefined activation function: %s' % activation_fn)
else:
activation_fn = tf.nn.tanh
bgru = layer.LeakyBGRULayer(
num_units=num_units,
leak_factor=float(self.conf['leak_factor']),
activation_fn=activation_fn)
#code not available for multiple inputs!!
if len(inputs) > 1:
raise 'The implementation of DBGRU expects 1 input and not %d' %len(inputs)
else:
inputs=inputs[0]
with tf.variable_scope(self.scope):
if is_training and float(self.conf['input_noise']) > 0:
inputs = inputs + tf.random_normal(
tf.shape(inputs),
stddev=float(self.conf['input_noise']))
logits = inputs
for l in range(int(self.conf['num_layers'])):
logits = bgru(logits, input_seq_length,
'layer' + str(l))
if is_training and float(self.conf['dropout']) < 1:
logits = tf.nn.dropout(logits, float(self.conf['dropout']))
output = logits
return output
|
{
"content_hash": "3b24212a9a076d1cdc4689fd9409a7df",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 80,
"avg_line_length": 30.895522388059703,
"alnum_prop": 0.618840579710145,
"repo_name": "JeroenZegers/Nabu-MSSS",
"id": "648503496d6f2a4af808e2754973ca364957a348",
"size": "2070",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nabu/neuralnetworks/models/leaky_dbgru.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "981104"
},
{
"name": "Shell",
"bytes": "4125"
}
],
"symlink_target": ""
}
|
"""Unit tests for scripts/pylint_extensions."""
import os
import sys
import tempfile
import unittest
_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4')
sys.path.insert(0, _PYLINT_PATH)
# Since these module needs to be imported after adding Pylint path,
# we need to disable isort for the below lines to prevent import
# order errors.
# pylint: disable=wrong-import-position
# pylint: disable=relative-import
import astroid # isort:skip
import pylint_extensions # isort:skip
from pylint import testutils # isort:skip
# pylint: enable=wrong-import-position
# pylint: enable=relative-import
class ExplicitKeywordArgsCheckerTests(unittest.TestCase):
def test_finds_non_explicit_keyword_args(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.ExplicitKeywordArgsChecker)
checker_test_object.setup_method()
func_call_node_one, func_call_node_two, func_call_node_three = (
astroid.extract_node("""
def test(test_var_one, test_var_two=4, test_var_three=5, test_var_four="test_checker"):
test_var_five = test_var_two + test_var_three
return test_var_five
test(2, 5, test_var_three=6) #@
test(2) #@
test(2, 6, test_var_two=5, test_var_four="test_checker") #@
"""))
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='non-explicit-keyword-args',
node=func_call_node_one,
args=(
'\'test_var_two\'',
'function',
'test'
)
),
):
checker_test_object.checker.visit_call(
func_call_node_one)
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_call(
func_call_node_two)
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='non-explicit-keyword-args',
node=func_call_node_three,
args=(
'\'test_var_three\'',
'function',
'test'
)
)
):
checker_test_object.checker.visit_call(
func_call_node_three)
class HangingIndentCheckerTests(unittest.TestCase):
def test_finds_hanging_indent(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.HangingIndentChecker)
checker_test_object.setup_method()
node_break_after_hanging_indent = astroid.scoped_nodes.Module(
name='test',
doc='Custom test')
temp_file = tempfile.NamedTemporaryFile()
filename = temp_file.name
with open(filename, 'w') as tmp:
tmp.write(
"""self.post_json('/ml/trainedclassifierhandler',
self.payload, expect_errors=True, expected_status_int=401)
""")
node_break_after_hanging_indent.file = filename
node_break_after_hanging_indent.path = filename
checker_test_object.checker.process_module(
node_break_after_hanging_indent)
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='no-break-after-hanging-indent',
line=1
),
):
temp_file.close()
node_no_err_message = astroid.scoped_nodes.Module(
name='test',
doc='Custom test')
temp_file = tempfile.NamedTemporaryFile()
filename = temp_file.name
with open(filename, 'w') as tmp:
tmp.write(
"""master_translation_dict = json.loads(
utils.get_file_contents(os.path.join(
os.getcwd(), 'assets', 'i18n', 'en.json')))
""")
node_no_err_message.file = filename
node_no_err_message.path = filename
checker_test_object.checker.process_module(node_no_err_message)
with checker_test_object.assertNoMessages():
temp_file.close()
class DocstringParameterCheckerTests(unittest.TestCase):
def test_finds_docstring_parameter(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.DocstringParameterChecker)
checker_test_object.setup_method()
func_node = astroid.extract_node("""
def test(test_var_one, test_var_two): #@
\"\"\"Function to test docstring parameters.
Args:
test_var_one: int. First test variable.
test_var_two: str. Second test variable.
Returns:
int. The test result.
\"\"\"
result = test_var_one + test_var_two
return result
""")
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_functiondef(func_node)
class ImportOnlyModulesCheckerTests(unittest.TestCase):
def test_finds_import_from(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.ImportOnlyModulesChecker)
checker_test_object.setup_method()
importfrom_node1 = astroid.extract_node("""
from os import path #@
import sys
""")
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_importfrom(importfrom_node1)
importfrom_node2 = astroid.extract_node("""
from os import error #@
import sys
""")
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='import-only-modules',
node=importfrom_node2,
args=('error', 'os')
),
):
checker_test_object.checker.visit_importfrom(
importfrom_node2)
class BackslashContinuationCheckerTests(unittest.TestCase):
def test_finds_backslash_continuation(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.BackslashContinuationChecker)
checker_test_object.setup_method()
node = astroid.scoped_nodes.Module(name='test', doc='Custom test')
temp_file = tempfile.NamedTemporaryFile()
filename = temp_file.name
with open(filename, 'w') as tmp:
tmp.write(
"""message1 = 'abc'\\\n""" # pylint: disable=backslash-continuation
"""'cde'\\\n""" # pylint: disable=backslash-continuation
"""'xyz'
message2 = 'abc\\\\'
message3 = (
'abc\\\\'
'xyz\\\\'
)
""")
node.file = filename
node.path = filename
checker_test_object.checker.process_module(node)
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='backslash-continuation',
line=1
),
testutils.Message(
msg_id='backslash-continuation',
line=2
),
):
temp_file.close()
class FunctionArgsOrderCheckerTests(unittest.TestCase):
def test_finds_function_def(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.FunctionArgsOrderChecker)
checker_test_object.setup_method()
functiondef_node1 = astroid.extract_node("""
def test(self,test_var_one, test_var_two): #@
result = test_var_one + test_var_two
return result
""")
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_functiondef(functiondef_node1)
functiondef_node2 = astroid.extract_node("""
def test(test_var_one, test_var_two, self): #@
result = test_var_one + test_var_two
return result
""")
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='function-args-order-self',
node=functiondef_node2
),
):
checker_test_object.checker.visit_functiondef(functiondef_node2)
class RestrictedImportCheckerTests(unittest.TestCase):
def test_detect_restricted_import(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.RestrictedImportChecker)
checker_test_object.setup_method()
# Tests the case wherein storage layer imports domain layer
# in import statements.
node_err_import = astroid.extract_node("""
import core.domain.activity_domain #@
""")
node_err_import.root().name = 'oppia.core.storage.topic'
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='invalid-import',
node=node_err_import,
args=('domain', 'storage'),
),
):
checker_test_object.checker.visit_import(node_err_import)
# Tests the case wherein storage layer does not import domain layer
# in import statements.
node_no_err_import = astroid.extract_node("""
import core.platform.email.gae_email_services #@
""")
node_no_err_import.root().name = 'oppia.core.storage.topic'
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_import(node_no_err_import)
# Tests the case wherein storage layer imports domain layer
# in import-from statements.
node_err_importfrom = astroid.extract_node("""
from core.domain import activity_domain #@
""")
node_err_importfrom.root().name = 'oppia.core.storage.topic'
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='invalid-import',
node=node_err_importfrom,
args=('domain', 'storage'),
)
):
checker_test_object.checker.visit_importfrom(node_err_importfrom)
# Tests the case wherein storage layer does not import domain layer
# in import-from statements.
node_no_err_importfrom = astroid.extract_node("""
from core.platform.email import gae_email_services #@
""")
node_no_err_importfrom.root().name = 'oppia.core.storage.topicl'
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_importfrom(node_no_err_importfrom)
# Tests the case wherein domain layer imports controller layer
# in import statements.
node_err_import = astroid.extract_node("""
import core.controllers.acl_decorators #@
""")
node_err_import.root().name = 'oppia.core.domain'
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='invalid-import',
node=node_err_import,
args=('controller', 'domain'),
),
):
checker_test_object.checker.visit_import(node_err_import)
# Tests the case wherein domain layer does not import controller layer
# in import statements.
node_no_err_import = astroid.extract_node("""
import core.platform.email.gae_email_services_test #@
""")
node_no_err_import.root().name = 'oppia.core.domain'
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_import(node_no_err_import)
# Tests the case wherein domain layer imports controller layer
# in import-from statements.
node_err_importfrom = astroid.extract_node("""
from core.controllers import acl_decorators #@
""")
node_err_importfrom.root().name = 'oppia.core.domain'
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='invalid-import',
node=node_err_importfrom,
args=('controller', 'domain'),
)
):
checker_test_object.checker.visit_importfrom(node_err_importfrom)
# Tests the case wherein domain layer does not import controller layer
# in import-from statements.
node_no_err_importfrom = astroid.extract_node("""
from core.platform.email import gae_email_services_test #@
""")
node_no_err_importfrom.root().name = 'oppia.core.domain'
with checker_test_object.assertNoMessages():
checker_test_object.checker.visit_importfrom(node_no_err_importfrom)
class SingleCharAndNewlineAtEOFCheckerTests(unittest.TestCase):
def test_checks_single_char_and_newline_eof(self):
checker_test_object = testutils.CheckerTestCase()
checker_test_object.CHECKER_CLASS = (
pylint_extensions.SingleCharAndNewlineAtEOFChecker)
checker_test_object.setup_method()
node_missing_newline_at_eof = astroid.scoped_nodes.Module(
name='test',
doc='Custom test')
temp_file = tempfile.NamedTemporaryFile()
filename = temp_file.name
with open(filename, 'w') as tmp:
tmp.write(
"""c = 'something dummy'
""")
node_missing_newline_at_eof.file = filename
node_missing_newline_at_eof.path = filename
checker_test_object.checker.process_module(node_missing_newline_at_eof)
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='newline-at-eof',
line=2
),
):
temp_file.close()
node_single_char_file = astroid.scoped_nodes.Module(
name='test',
doc='Custom test')
temp_file = tempfile.NamedTemporaryFile()
filename = temp_file.name
with open(filename, 'w') as tmp:
tmp.write("""1""")
node_single_char_file.file = filename
node_single_char_file.path = filename
checker_test_object.checker.process_module(node_single_char_file)
with checker_test_object.assertAddsMessages(
testutils.Message(
msg_id='only-one-character',
line=1
),
):
temp_file.close()
node_no_err_message = astroid.scoped_nodes.Module(
name='test',
doc='Custom test')
temp_file = tempfile.NamedTemporaryFile()
filename = temp_file.name
with open(filename, 'w') as tmp:
tmp.write("""x = 'something dummy'""")
node_no_err_message.file = filename
node_no_err_message.path = filename
checker_test_object.checker.process_module(node_no_err_message)
with checker_test_object.assertNoMessages():
temp_file.close()
|
{
"content_hash": "9eb1ad056841a7fdd7d8ef35d4e5f054",
"timestamp": "",
"source": "github",
"line_count": 420,
"max_line_length": 95,
"avg_line_length": 36.57142857142857,
"alnum_prop": 0.5879557291666667,
"repo_name": "souravbadami/oppia",
"id": "5445d35f4a59ff33ce7aadf76b169deda9caea5b",
"size": "16106",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "scripts/pylint_extensions_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "90864"
},
{
"name": "HTML",
"bytes": "1044569"
},
{
"name": "JavaScript",
"bytes": "606331"
},
{
"name": "Python",
"bytes": "7870122"
},
{
"name": "Shell",
"bytes": "54930"
},
{
"name": "TypeScript",
"bytes": "4922933"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('conferences', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('approvals', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Submission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('node_id', models.CharField(max_length=10)),
('date_created', models.DateTimeField(auto_now_add=True)),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
('file_id', models.CharField(max_length=100, null=True)),
('file_url', models.URLField(blank=True, null=True)),
('approval', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='approvals.Approval')),
('conference', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='conferences.Conference')),
('contributor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='submissions_contributors', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('date_created',),
'permissions': (('can_set_contributor', 'Can set the contributor for a submission'), ('view_submission', 'Can view submission')),
},
),
]
|
{
"content_hash": "6a635113f1ae17ff31459e7083193065",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 166,
"avg_line_length": 44.63157894736842,
"alnum_prop": 0.6079009433962265,
"repo_name": "jnayak1/osf-meetings",
"id": "1b452994037c1e36902de076ea556ce6aa8cdb81",
"size": "1768",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "meetings/submissions/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24181"
},
{
"name": "HTML",
"bytes": "50941"
},
{
"name": "JavaScript",
"bytes": "60462"
},
{
"name": "Python",
"bytes": "84489"
},
{
"name": "Shell",
"bytes": "654"
}
],
"symlink_target": ""
}
|
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class StackV2Request(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'general': 'GeneralSettings',
'placement': 'PlacementSettings',
'platform_variant': 'str',
'ambari_version': 'str',
'hdp_version': 'str',
'parameters': 'dict(str, object)',
'inputs': 'dict(str, object)',
'custom_domain': 'CustomDomainSettings',
'tags': 'Tags',
'instance_groups': 'list[InstanceGroupsV2]',
'failure_policy': 'FailurePolicyRequest',
'stack_authentication': 'StackAuthentication',
'network': 'NetworkV2Request',
'image_settings': 'ImageSettings',
'flex_id': 'int',
'cluster': 'ClusterV2Request',
'gateway_port': 'int'
}
attribute_map = {
'general': 'general',
'placement': 'placement',
'platform_variant': 'platformVariant',
'ambari_version': 'ambariVersion',
'hdp_version': 'hdpVersion',
'parameters': 'parameters',
'inputs': 'inputs',
'custom_domain': 'customDomain',
'tags': 'tags',
'instance_groups': 'instanceGroups',
'failure_policy': 'failurePolicy',
'stack_authentication': 'stackAuthentication',
'network': 'network',
'image_settings': 'imageSettings',
'flex_id': 'flexId',
'cluster': 'cluster',
'gateway_port': 'gatewayPort'
}
def __init__(self, general=None, placement=None, platform_variant=None, ambari_version=None, hdp_version=None, parameters=None, inputs=None, custom_domain=None, tags=None, instance_groups=None, failure_policy=None, stack_authentication=None, network=None, image_settings=None, flex_id=None, cluster=None, gateway_port=None):
"""
StackV2Request - a model defined in Swagger
"""
self._general = None
self._placement = None
self._platform_variant = None
self._ambari_version = None
self._hdp_version = None
self._parameters = None
self._inputs = None
self._custom_domain = None
self._tags = None
self._instance_groups = None
self._failure_policy = None
self._stack_authentication = None
self._network = None
self._image_settings = None
self._flex_id = None
self._cluster = None
self._gateway_port = None
self.general = general
if placement is not None:
self.placement = placement
if platform_variant is not None:
self.platform_variant = platform_variant
if ambari_version is not None:
self.ambari_version = ambari_version
if hdp_version is not None:
self.hdp_version = hdp_version
if parameters is not None:
self.parameters = parameters
if inputs is not None:
self.inputs = inputs
if custom_domain is not None:
self.custom_domain = custom_domain
if tags is not None:
self.tags = tags
self.instance_groups = instance_groups
if failure_policy is not None:
self.failure_policy = failure_policy
if stack_authentication is not None:
self.stack_authentication = stack_authentication
if network is not None:
self.network = network
if image_settings is not None:
self.image_settings = image_settings
if flex_id is not None:
self.flex_id = flex_id
if cluster is not None:
self.cluster = cluster
if gateway_port is not None:
self.gateway_port = gateway_port
@property
def general(self):
"""
Gets the general of this StackV2Request.
general configuration parameters for a cluster (e.g. 'name', 'credentialname')
:return: The general of this StackV2Request.
:rtype: GeneralSettings
"""
return self._general
@general.setter
def general(self, general):
"""
Sets the general of this StackV2Request.
general configuration parameters for a cluster (e.g. 'name', 'credentialname')
:param general: The general of this StackV2Request.
:type: GeneralSettings
"""
if general is None:
raise ValueError("Invalid value for `general`, must not be `None`")
self._general = general
@property
def placement(self):
"""
Gets the placement of this StackV2Request.
placement configuration parameters for a cluster (e.g. 'region', 'availabilityZone')
:return: The placement of this StackV2Request.
:rtype: PlacementSettings
"""
return self._placement
@placement.setter
def placement(self, placement):
"""
Sets the placement of this StackV2Request.
placement configuration parameters for a cluster (e.g. 'region', 'availabilityZone')
:param placement: The placement of this StackV2Request.
:type: PlacementSettings
"""
self._placement = placement
@property
def platform_variant(self):
"""
Gets the platform_variant of this StackV2Request.
cloud provider api variant
:return: The platform_variant of this StackV2Request.
:rtype: str
"""
return self._platform_variant
@platform_variant.setter
def platform_variant(self, platform_variant):
"""
Sets the platform_variant of this StackV2Request.
cloud provider api variant
:param platform_variant: The platform_variant of this StackV2Request.
:type: str
"""
self._platform_variant = platform_variant
@property
def ambari_version(self):
"""
Gets the ambari_version of this StackV2Request.
specific version of ambari
:return: The ambari_version of this StackV2Request.
:rtype: str
"""
return self._ambari_version
@ambari_version.setter
def ambari_version(self, ambari_version):
"""
Sets the ambari_version of this StackV2Request.
specific version of ambari
:param ambari_version: The ambari_version of this StackV2Request.
:type: str
"""
self._ambari_version = ambari_version
@property
def hdp_version(self):
"""
Gets the hdp_version of this StackV2Request.
specific version of HDP
:return: The hdp_version of this StackV2Request.
:rtype: str
"""
return self._hdp_version
@hdp_version.setter
def hdp_version(self, hdp_version):
"""
Sets the hdp_version of this StackV2Request.
specific version of HDP
:param hdp_version: The hdp_version of this StackV2Request.
:type: str
"""
self._hdp_version = hdp_version
@property
def parameters(self):
"""
Gets the parameters of this StackV2Request.
additional cloud specific parameters for stack
:return: The parameters of this StackV2Request.
:rtype: dict(str, object)
"""
return self._parameters
@parameters.setter
def parameters(self, parameters):
"""
Sets the parameters of this StackV2Request.
additional cloud specific parameters for stack
:param parameters: The parameters of this StackV2Request.
:type: dict(str, object)
"""
self._parameters = parameters
@property
def inputs(self):
"""
Gets the inputs of this StackV2Request.
dynamic properties
:return: The inputs of this StackV2Request.
:rtype: dict(str, object)
"""
return self._inputs
@inputs.setter
def inputs(self, inputs):
"""
Sets the inputs of this StackV2Request.
dynamic properties
:param inputs: The inputs of this StackV2Request.
:type: dict(str, object)
"""
self._inputs = inputs
@property
def custom_domain(self):
"""
Gets the custom_domain of this StackV2Request.
settings related to custom domain names
:return: The custom_domain of this StackV2Request.
:rtype: CustomDomainSettings
"""
return self._custom_domain
@custom_domain.setter
def custom_domain(self, custom_domain):
"""
Sets the custom_domain of this StackV2Request.
settings related to custom domain names
:param custom_domain: The custom_domain of this StackV2Request.
:type: CustomDomainSettings
"""
self._custom_domain = custom_domain
@property
def tags(self):
"""
Gets the tags of this StackV2Request.
stack related tags
:return: The tags of this StackV2Request.
:rtype: Tags
"""
return self._tags
@tags.setter
def tags(self, tags):
"""
Sets the tags of this StackV2Request.
stack related tags
:param tags: The tags of this StackV2Request.
:type: Tags
"""
self._tags = tags
@property
def instance_groups(self):
"""
Gets the instance_groups of this StackV2Request.
collection of instance groupst
:return: The instance_groups of this StackV2Request.
:rtype: list[InstanceGroupsV2]
"""
return self._instance_groups
@instance_groups.setter
def instance_groups(self, instance_groups):
"""
Sets the instance_groups of this StackV2Request.
collection of instance groupst
:param instance_groups: The instance_groups of this StackV2Request.
:type: list[InstanceGroupsV2]
"""
if instance_groups is None:
raise ValueError("Invalid value for `instance_groups`, must not be `None`")
self._instance_groups = instance_groups
@property
def failure_policy(self):
"""
Gets the failure_policy of this StackV2Request.
failure policy in case of failures
:return: The failure_policy of this StackV2Request.
:rtype: FailurePolicyRequest
"""
return self._failure_policy
@failure_policy.setter
def failure_policy(self, failure_policy):
"""
Sets the failure_policy of this StackV2Request.
failure policy in case of failures
:param failure_policy: The failure_policy of this StackV2Request.
:type: FailurePolicyRequest
"""
self._failure_policy = failure_policy
@property
def stack_authentication(self):
"""
Gets the stack_authentication of this StackV2Request.
stack related authentication
:return: The stack_authentication of this StackV2Request.
:rtype: StackAuthentication
"""
return self._stack_authentication
@stack_authentication.setter
def stack_authentication(self, stack_authentication):
"""
Sets the stack_authentication of this StackV2Request.
stack related authentication
:param stack_authentication: The stack_authentication of this StackV2Request.
:type: StackAuthentication
"""
self._stack_authentication = stack_authentication
@property
def network(self):
"""
Gets the network of this StackV2Request.
stack related network
:return: The network of this StackV2Request.
:rtype: NetworkV2Request
"""
return self._network
@network.setter
def network(self, network):
"""
Sets the network of this StackV2Request.
stack related network
:param network: The network of this StackV2Request.
:type: NetworkV2Request
"""
self._network = network
@property
def image_settings(self):
"""
Gets the image_settings of this StackV2Request.
settings for custom images
:return: The image_settings of this StackV2Request.
:rtype: ImageSettings
"""
return self._image_settings
@image_settings.setter
def image_settings(self, image_settings):
"""
Sets the image_settings of this StackV2Request.
settings for custom images
:param image_settings: The image_settings of this StackV2Request.
:type: ImageSettings
"""
self._image_settings = image_settings
@property
def flex_id(self):
"""
Gets the flex_id of this StackV2Request.
id of the related flex subscription
:return: The flex_id of this StackV2Request.
:rtype: int
"""
return self._flex_id
@flex_id.setter
def flex_id(self, flex_id):
"""
Sets the flex_id of this StackV2Request.
id of the related flex subscription
:param flex_id: The flex_id of this StackV2Request.
:type: int
"""
self._flex_id = flex_id
@property
def cluster(self):
"""
Gets the cluster of this StackV2Request.
cluster request object on stack
:return: The cluster of this StackV2Request.
:rtype: ClusterV2Request
"""
return self._cluster
@cluster.setter
def cluster(self, cluster):
"""
Sets the cluster of this StackV2Request.
cluster request object on stack
:param cluster: The cluster of this StackV2Request.
:type: ClusterV2Request
"""
self._cluster = cluster
@property
def gateway_port(self):
"""
Gets the gateway_port of this StackV2Request.
port of the gateway secured proxy
:return: The gateway_port of this StackV2Request.
:rtype: int
"""
return self._gateway_port
@gateway_port.setter
def gateway_port(self, gateway_port):
"""
Sets the gateway_port of this StackV2Request.
port of the gateway secured proxy
:param gateway_port: The gateway_port of this StackV2Request.
:type: int
"""
if gateway_port is not None and gateway_port > 65535:
raise ValueError("Invalid value for `gateway_port`, must be a value less than or equal to `65535`")
if gateway_port is not None and gateway_port < 1025:
raise ValueError("Invalid value for `gateway_port`, must be a value greater than or equal to `1025`")
self._gateway_port = gateway_port
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, StackV2Request):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "e95d6704375953b9a2b168be8f48a2e0",
"timestamp": "",
"source": "github",
"line_count": 577,
"max_line_length": 984,
"avg_line_length": 30.386481802426342,
"alnum_prop": 0.60554383163178,
"repo_name": "Chaffelson/whoville",
"id": "4d2f26a539a50e60a90a2ec729d5c96419e2b995",
"size": "17550",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "whoville/cloudbreak/models/stack_v2_request.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "6961"
},
{
"name": "HTML",
"bytes": "72038"
},
{
"name": "Python",
"bytes": "3729355"
},
{
"name": "Shell",
"bytes": "95963"
},
{
"name": "TSQL",
"bytes": "345"
}
],
"symlink_target": ""
}
|
'''
NAPALM CLI Tools: validate
===========================
Validating deployments from the shell.
'''
# Python3 support
from __future__ import print_function
from __future__ import unicode_literals
# import helpers
from napalm_base import get_network_driver
from napalm_base.clitools.helpers import build_help
from napalm_base.clitools.helpers import configure_logging
from napalm_base.clitools.helpers import parse_optional_args
from napalm_base.clitools.helpers import warning
# stdlib
import sys
import json
import logging
logger = logging.getLogger('cl_napalm_validate.py')
warning()
def main():
args = build_help(validate=True)
configure_logging(logger, args.debug)
logger.debug('Getting driver for OS "{driver}"'.format(driver=args.vendor))
driver = get_network_driver(args.vendor)
optional_args = parse_optional_args(args.optional_args)
logger.debug('Connecting to device "{}" with user "{}" and optional_args={}'.format(
args.hostname, args.user, optional_args))
with driver(args.hostname, args.user, args.password, optional_args=optional_args) as device:
logger.debug('Generating compliance report')
print(json.dumps(device.compliance_report(args.validation_file), indent=4))
logger.debug('Closing session')
sys.exit(0)
if __name__ == '__main__':
main()
|
{
"content_hash": "f5e0b8e8f52a86981b5933daa4578ae2",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 96,
"avg_line_length": 28.76595744680851,
"alnum_prop": 0.7085798816568047,
"repo_name": "napalm-automation/napalm-base",
"id": "825d79d729d55e1ef174c7a892a52aab7fe6490e",
"size": "1376",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "napalm_base/clitools/cl_napalm_validate.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "213606"
},
{
"name": "Roff",
"bytes": "928"
},
{
"name": "Ruby",
"bytes": "3465"
},
{
"name": "Smarty",
"bytes": "1306"
}
],
"symlink_target": ""
}
|
"""
neighbor/__init__.py
Created by Thomas Mangin on 2015-06-04.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
# import sys
import socket
from copy import deepcopy
from exabgp.protocol.family import AFI
from exabgp.protocol.family import SAFI
from exabgp.bgp.neighbor import Neighbor
from exabgp.bgp.message import OUT
from exabgp.bgp.message.open.holdtime import HoldTime
from exabgp.bgp.message.update.nlri.flow import NLRI
from exabgp.configuration.core import Section
from exabgp.configuration.neighbor.api import ParseAPI
from exabgp.configuration.family import ParseFamily
from exabgp.configuration.parser import boolean
from exabgp.configuration.parser import ip
from exabgp.configuration.parser import asn
from exabgp.configuration.parser import port
from exabgp.configuration.neighbor.parser import ttl
from exabgp.configuration.neighbor.parser import md5
from exabgp.configuration.neighbor.parser import hold_time
from exabgp.configuration.neighbor.parser import router_id
from exabgp.configuration.neighbor.parser import hostname
from exabgp.configuration.neighbor.parser import domainname
from exabgp.configuration.neighbor.parser import description
from exabgp.configuration.neighbor.parser import inherit
def _hostname ():
value = socket.gethostname()
if not value:
return 'localhost'
return value.split('.')[0]
def _domainname ():
value = socket.gethostname()
if not value:
return 'localdomain'
return ''.join(value.split('.')[1:])
class ParseNeighbor (Section):
TTL_SECURITY = 255
syntax = ''
known = {
'inherit': inherit,
'description': description,
'hostname': hostname,
'domainname': domainname,
'router-id': router_id,
'hold-time': hold_time,
'local-address': ip,
'peer-address': ip,
'local-as': asn,
'peer-as': asn,
'passive': boolean,
'listen': port,
'ttl-security': ttl,
'md5': md5,
'group-updates': boolean,
'auto-flush': boolean,
'adj-rib-out': boolean,
'manual-eor': boolean,
}
action = {
'inherit': 'set-command',
'description': 'set-command',
'hostname': 'set-command',
'domainname': 'set-command',
'router-id': 'set-command',
'hold-time': 'set-command',
'local-address': 'set-command',
'peer-address': 'set-command',
'local-as': 'set-command',
'peer-as': 'set-command',
'passive': 'set-command',
'listen': 'set-command',
'ttl-security': 'set-command',
'md5': 'set-command',
'group-updates': 'set-command',
'auto-flush': 'set-command',
'adj-rib-out': 'set-command',
'manual-eor': 'set-command',
'route': 'append-name',
}
default = {
'passive': False,
'group-updates': True,
'auto-flush': True,
'adj-rib-out': False,
'manual-eor': False,
}
name = 'neighbor'
def __init__ (self, tokeniser, scope, error, logger):
Section.__init__(self,tokeniser,scope,error,logger)
self._neighbors = []
self.neighbors = {}
def clear (self):
self._neighbors = []
self.neighbors = {}
def pre (self):
self.scope.to_context()
return self.parse(self.name,'peer-address')
def post (self):
local = self.scope.pop_context(self.name)
neighbor = Neighbor()
# XXX: use the right class for the data type
# XXX: we can use the scope.nlri interface ( and rename it ) to set some values
neighbor.router_id = local.get('router-id',None)
neighbor.peer_address = local.get('peer-address',None)
neighbor.local_address = local.get('local-address',None)
neighbor.local_as = local.get('local-as',None)
neighbor.peer_as = local.get('peer-as',None)
neighbor.passive = local.get('passive',False)
neighbor.listen = local.get('listen',0)
neighbor.hold_time = local.get('hold-time',HoldTime(180))
neighbor.host_name = local.get('host-name',_hostname())
neighbor.domain_name = local.get('domain-name',_domainname())
neighbor.md5 = local.get('md5',None)
neighbor.description = local.get('description','')
neighbor.flush = local.get('auto-flush',True)
neighbor.adjribout = local.get('adj-rib-out',True)
neighbor.aigp = local.get('aigp',None)
neighbor.ttl = local.get('ttl-security',None)
neighbor.group_updates = local.get('group-updates',True)
neighbor.manual_eor = local.get('manual-eor', False)
neighbor.api = ParseAPI.extract()
# capabilities
capability = local.get('capability',{})
neighbor.add_path = capability.get('add-path',0)
neighbor.asn4 = capability.get('asn4',True)
neighbor.multisession = capability.get('multi-session',False)
neighbor.operational = capability.get('operational',False)
neighbor.route_refresh = capability.get('route-refresh',0)
if capability.get('graceful-restart',False) is not False:
neighbor.graceful_restart = capability.get('graceful-restart',0) or int(neighbor.hold_time)
families = []
for family in ParseFamily.convert.keys():
for pair in local.get('family',{}).get(family,[]):
families.append(pair)
families = families or NLRI.known_families()
if (AFI.ipv4,SAFI.unicast) not in families:
families.append((AFI(AFI.ipv4),SAFI(SAFI.unicast)))
for family in families:
neighbor.add_family(family)
neighbor.changes = []
for section in ('static','l2vpn','flow'):
routes = local.get(section,{}).get('routes',[])
for route in routes:
route.nlri.action = OUT.ANNOUNCE
neighbor.changes.extend(routes)
messages = local.get('operational',{}).get('routes',[])
if not neighbor.router_id:
neighbor.router_id = neighbor.local_address
if neighbor.route_refresh:
if neighbor.adjribout:
self.logger.configuration('route-refresh requested, enabling adj-rib-out')
missing = neighbor.missing()
if missing:
return self.error.set('incomplete neighbor, missing %s' % missing)
if neighbor.local_address.afi != neighbor.peer_address.afi:
return self.error.set('local-address and peer-address must be of the same family')
if neighbor.peer_address.top() in self._neighbors:
return self.error.set('duplicate peer definition %s' % neighbor.peer_address.top())
self._neighbors.append(neighbor.peer_address.top())
# check we are not trying to announce routes without the right MP announcement
for change in neighbor.changes:
family = change.nlri.family()
if family not in families and family != (AFI.ipv4,SAFI.unicast):
return self.error.set('Trying to announce a route of type %s,%s when we are not announcing the family to our peer' % change.nlri.family())
def _init_neighbor (neighbor):
families = neighbor.families()
for change in neighbor.changes:
if change.nlri.family() in families:
# This add the family to neighbor.families()
neighbor.rib.outgoing.insert_announced_watchdog(change)
for message in messages:
if message.family() in families:
if message.name == 'ASM':
neighbor.asm[message.family()] = message
else:
neighbor.messages.append(message)
self.neighbors[neighbor.name()] = neighbor
# create one neighbor object per family for multisession
if neighbor.multisession and len(neighbor.families()) > 1:
for family in neighbor.families():
# XXX: FIXME: Ok, it works but it takes LOTS of memory ..
m_neighbor = deepcopy(neighbor)
m_neighbor.make_rib()
m_neighbor.rib.outgoing.families = [family]
_init_neighbor(m_neighbor)
else:
neighbor.make_rib()
_init_neighbor(neighbor)
return True
# display configuration
# for line in str(neighbor).split('\n'):
# self.logger.configuration(line)
# self.logger.configuration("\n")
|
{
"content_hash": "9bd9d5d5874a185515a75f5ec19a2d89",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 142,
"avg_line_length": 32.14049586776859,
"alnum_prop": 0.6799948572897917,
"repo_name": "blablacar/exabgp",
"id": "bbf5e7418e390463e55a6dc04948b3f62783b7ac",
"size": "7796",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/exabgp/configuration/neighbor/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Perl",
"bytes": "1516"
},
{
"name": "Python",
"bytes": "1191461"
},
{
"name": "Shell",
"bytes": "17891"
}
],
"symlink_target": ""
}
|
import os
import argparse
import multiprocessing
import itertools
from datetime import datetime
import xarray as xr
from scipy.stats import moment
def get_stats(obs_dataarray):
noise_var = obs_dataarray['noise_var'].values
obs_arr = obs_dataarray['obs'].stack(s=['y', 'x']).values
m2_biased = moment(obs_arr, moment=2, axis=-1, nan_policy='omit')
m3_biased = moment(obs_arr, moment=3, axis=-1, nan_policy='omit')
m4_biased = moment(obs_arr, moment=4, axis=-1, nan_policy='omit')
m2_unbiased = m2_biased - noise_var
m3_unbiased = m3_biased
m4_unbiased = m4_biased - (6 * m2_unbiased * noise_var) - \
(3 * noise_var ** 2)
v_biased = m2_biased
s_biased = m3_biased / m2_biased ** (3 / 2)
k_biased = (m4_biased / m2_biased ** 2) - 3
v_unbiased = m2_unbiased
s_unbiased = m3_unbiased / m2_unbiased ** (3 / 2)
k_unbiased = (m4_unbiased / m2_unbiased ** 2) - 3
return xr.Dataset(
{'m2_biased': (['f'], m2_biased), 'm3_biased': (['f'], m3_biased),
'm4_biased': (['f'], m4_biased), 'm2_unbiased': (['f'], m2_unbiased),
'm3_unbiased': (['f'], m3_unbiased),
'm4_unbiased': (['f'], m4_unbiased), 'v_biased': (['f'], v_biased),
's_biased': (['f'], s_biased), 'k_biased': (['f'], k_biased),
'v_unbiased': (['f'], v_unbiased), 's_unbiased': (['f'], s_unbiased),
'k_unbiased': (['f'], k_unbiased)},
coords={'f': obs_dataarray.coords['f']}
)
def run(params):
start_time = datetime.now()
bin_width, filter_bandwidth, theta, shift, \
signal_field, noise_field, noise_multiplier = params
# Get file paths
signal_dir = '/scratch/pkittiwi/fg1p/signal_map/bin{:.2f}/' \
'fbw{:.2f}/theta{:.1f}/shift{:d}' \
.format(bin_width, filter_bandwidth, theta, shift)
noise_dir = '/scratch/pkittiwi/fg1p/noise_map/bin{:.2f}/' \
'fbw{:.2f}/theta{:.1f}/shift{:d}' \
.format(bin_width, filter_bandwidth, theta, shift)
output_dir = '/scratch/pkittiwi/fg1p/stats_mc/obsn{:.1f}/bin{:.2f}/' \
'fbw{:.2f}/theta{:.1f}/shift{:d}/s{:03d}' \
.format(noise_multiplier, bin_width, filter_bandwidth, theta,
shift, signal_field)
signal_file = '{:s}/signal_map_bin{:.2f}_fbw{:.2f}_' \
'theta{:.1f}_shift{:d}_{:03d}.nc'\
.format(signal_dir, bin_width, filter_bandwidth,
theta, shift, signal_field)
noise_file = '{:s}/noise_map_bin{:.2f}_fbw{:.2f}_' \
'theta{:.1f}_shift{:d}_{:03d}.nc'\
.format(noise_dir, bin_width, filter_bandwidth,
theta, shift, noise_field)
output_file = '{:s}/stats_mc_obsn{:.1f}_bin{:.2f}_fbw{:.2f}_' \
'theta{:.1f}_shift{:d}_{:03d}_{:03d}.nc' \
.format(output_dir, noise_multiplier, bin_width, filter_bandwidth,
theta, shift, signal_field, noise_field)
mask_file = '/scratch/pkittiwi/fg1p/hera331_fov_mask.nc'
obs_dir = '/scratch/pkittiwi/fg1p/obs_map/obsn{:.1f}/bin{:.2f}/' \
'fbw{:.2f}/theta{:.1f}/shift{:d}/s{:03d}' \
.format(noise_multiplier, bin_width, filter_bandwidth, theta,
shift, signal_field)
obs_file = '{:s}/obs_map_obsn{:.1f}_bin{:.2f}_fbw{:.2f}_' \
'theta{:.1f}_shift{:d}_{:03d}_{:03d}.nc' \
.format(obs_dir, noise_multiplier, bin_width, filter_bandwidth,
theta, shift, signal_field, noise_field)
# Load data to memory and align coordinates
with xr.open_dataarray(signal_file) as da:
signal = da.load()
with xr.open_dataarray(noise_file) as da:
noise = da.load()
with xr.open_dataarray(mask_file) as da:
mask = da.load()
for key, values in noise.coords.items():
signal.coords[key] = values
mask.coords[key] = values
signal, noise, mask = xr.align(signal, noise, mask)
# Make observation
signal = signal.where(mask == 1)
noise = noise.where(mask == 1) * noise_multiplier
obs = signal + noise
obs.name = 'obs'
obs.attrs = {'signal_field': signal_field, 'noise_field': noise_field,
'noise_multiplier': noise_multiplier, 'bin_width': bin_width,
'filter_bandwidth': filter_bandwidth, 'theta': theta,
'shift': shift}
# Calculate noise variance
noise_var = noise.var(dim=['y', 'x'])
noise_var.name = 'noise_var'
noise_var.attrs = {
'noise_field': noise_field, 'noise_multiplier': noise_multiplier,
'bin_width': bin_width, 'filter_bandwidth': filter_bandwidth,
'theta': theta, 'shift': shift
}
# Save observation and noise_variance
os.makedirs(obs_dir, exist_ok=True)
obs = xr.merge([obs, noise_var])
obs.to_netcdf(obs_file)
del signal
del noise
del mask
# Calculate statistic
out = get_stats(obs)
out.attrs = {'signal_field': signal_field, 'noise_field': noise_field,
'noise_multiplier': noise_multiplier, 'bin_width': bin_width,
'filter_bandwidth': filter_bandwidth, 'theta': theta,
'shift': shift}
os.makedirs(output_dir, exist_ok=True)
out.to_netcdf(output_file)
out.close()
print(
'Finish. signal_file = {:s}. noise_file = {:s}. output_file = {:s}.'
'Time spent {:.5f} sec.'
.format(signal_file, noise_file, output_file,
(datetime.now() - start_time).total_seconds())
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'--bin_width', nargs='+', type=float, required=True,
help='Frequency binning bin width [MHz]'
)
parser.add_argument(
'--filter_bandwidth', nargs='+', type=float, required=True,
help='Filter bandwidth [MHz]'
)
parser.add_argument(
'--theta', nargs='+', type=float, required=True,
help='Instrument max FoV from the field center. [deg]'
)
parser.add_argument(
'--shift', nargs='+', type=int, required=True,
help='Wedge shift in kz [pixel]'
)
parser.add_argument(
'--signal_field', nargs='+', type=int, required=True,
help='Signal field number [0, 20]'
)
parser.add_argument(
'--noise_field', nargs='+', type=int, required=True,
help='Noise field number [0, 499]'
)
parser.add_argument(
'--noise_multiplier', nargs='+', type=float, default=1,
help='Multiply noise map by this value before calculation'
)
parser.add_argument(
'--nprocs', type=int, default=1, help='Processes to spawn'
)
args = parser.parse_args()
iter_params = itertools.product(
args.bin_width, args.filter_bandwidth, args.theta, args.shift,
args.signal_field, args.noise_field, args.noise_multiplier
)
pool = multiprocessing.Pool(args.nprocs)
pool.map(run, iter_params)
pool.close()
pool.join()
|
{
"content_hash": "0cf654151188c43eb5a93630f15bb7f6",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 78,
"avg_line_length": 38.13812154696133,
"alnum_prop": 0.581776039403158,
"repo_name": "piyanatk/sim",
"id": "7adec985e68d9a084f8ddab1581ba9252017970b",
"size": "6903",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/fg1p/cal_mc_stats_mp.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
from panda3d.core import NodePath
from direct.interval.IntervalGlobal import *
from direct.showbase import PythonUtil
from toontown.battle.BattleProps import globalPropPool
from direct.directnotify import DirectNotifyGlobal
SFX = PythonUtil.Enum('poof, magic')
SFXPATHS = {SFX.poof: 'phase_4/audio/sfx/firework_distance_02.ogg',
SFX.magic: 'phase_4/audio/sfx/SZ_DD_treasure.ogg'}
class DustCloud(NodePath):
dustCloudCount = 0
sounds = {}
notify = DirectNotifyGlobal.directNotify.newCategory('DustCloud')
def __init__(self, parent = hidden, fBillboard = 1, wantSound = 0):
NodePath.__init__(self)
self.assign(globalPropPool.getProp('suit_explosion_dust'))
if fBillboard:
self.setBillboardAxis()
self.reparentTo(parent)
self.seqNode = self.find('**/+SequenceNode').node()
self.seqNode.setFrameRate(0)
self.wantSound = wantSound
if self.wantSound and not DustCloud.sounds:
DustCloud.sounds[SFX.poof] = loader.loadSfx(SFXPATHS[SFX.poof])
self.track = None
self.trackId = DustCloud.dustCloudCount
DustCloud.dustCloudCount += 1
self.setBin('fixed', 100, 1)
self.hide()
return
def createTrack(self, rate = 24):
def getSoundFuncIfAble(soundId):
sound = DustCloud.sounds.get(soundId)
if self.wantSound and sound:
return sound.play
else:
def dummy():
pass
return dummy
tflipDuration = self.seqNode.getNumChildren() / float(rate)
self.track = Sequence(Func(self.show), Func(self.messaging), Func(self.seqNode.play, 0, self.seqNode.getNumFrames() - 1), Func(self.seqNode.setFrameRate, rate), Func(getSoundFuncIfAble(SFX.poof)), Wait(tflipDuration), Func(self._resetTrack), name='dustCloud-track-%d' % self.trackId)
def _resetTrack(self):
self.seqNode.setFrameRate(0)
self.hide()
def messaging(self):
self.notify.debug('CREATING TRACK ID: %s' % self.trackId)
def isPlaying(self):
if self.track == None:
return False
elif self.track.isPlaying():
return True
else:
return False
return
def play(self, rate = 24):
self.stop()
self.createTrack(rate)
self.track.start()
def loop(self, rate = 24):
self.stop()
self.createTrack(rate)
self.track.loop()
def stop(self):
if self.track:
self.track.finish()
self.track.clearToInitial()
def destroy(self):
self.notify.debug('DESTROYING TRACK ID: %s' % self.trackId)
if self.track:
self._resetTrack()
self.track.clearToInitial()
del self.track
del self.seqNode
self.removeNode()
|
{
"content_hash": "a7ce255f5b846d943389a2aeab9001ec",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 291,
"avg_line_length": 33.94252873563219,
"alnum_prop": 0.5997290890619709,
"repo_name": "DedMemez/ODS-August-2017",
"id": "e4eec4891fa06d691ff83db004c0caaf1a5a5d4e",
"size": "3040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "effects/DustCloud.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "10152014"
},
{
"name": "Shell",
"bytes": "707"
}
],
"symlink_target": ""
}
|
import os
import tqdm
# Clean these folders and saved parsed version of them.
clean_folders = ["bnwiki", "arwiki", "ruwiki", "ptwiki", "idwiki"]
for i in range(len(clean_folders)):
clean_folder = clean_folders[i]
output_folder = clean_folders[i]+"_parsed"
os.mkdir(output_folder)
for folder in tqdm.tqdm(os.listdir(clean_folder)):
path = os.path.join(clean_folder, folder)
os.mkdir(os.path.join(output_folder, folder))
for file in os.listdir(path):
article = []
with open(os.path.join(path, file)) as f:
for line in f:
if line.startswith("</doc>") or line.startswith("<doc"):
continue
else:
article.append(line)
with open(os.path.join(output_folder, folder, file), "w+") as f:
for line in article:
f.write(line+"\n")
|
{
"content_hash": "5f643e7fe269fa3d8bfebc895fbb8aea",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 38.958333333333336,
"alnum_prop": 0.5508021390374331,
"repo_name": "keras-team/keras-nlp",
"id": "1caddcf8e3afbd1df8f33b1322baa6b2f0771fed",
"size": "935",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/pretrained_tokenizers/word_piece_cleaning_script.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "288"
},
{
"name": "Jsonnet",
"bytes": "779"
},
{
"name": "Jupyter Notebook",
"bytes": "464150"
},
{
"name": "Python",
"bytes": "730841"
},
{
"name": "Shell",
"bytes": "1279"
}
],
"symlink_target": ""
}
|
from django.dispatch import Signal
user_created = Signal(providing_args=['user'])
user_updated = Signal(providing_args=['user'])
user_deleted = Signal(providing_args=['user'])
user_groups_updated = Signal(providing_args=['user'])
user_confirmed = Signal(providing_args=['request', 'user'])
user_rejected = Signal(providing_args=['request', 'user'])
user_activated = Signal(providing_args=['request', 'user'])
user_disabled = Signal(providing_args=['request', 'user'])
user_enabled = Signal(providing_args=['request', 'user'])
|
{
"content_hash": "e8ee137b321d40238f34034ae34aebe9",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 59,
"avg_line_length": 44,
"alnum_prop": 0.7253787878787878,
"repo_name": "aipescience/django-daiquiri",
"id": "4f4cdc776e318b94fd2ace7c409a5e2076840584",
"size": "528",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "daiquiri/auth/signals.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "28598"
},
{
"name": "HTML",
"bytes": "236579"
},
{
"name": "JavaScript",
"bytes": "97087"
},
{
"name": "Python",
"bytes": "602159"
}
],
"symlink_target": ""
}
|
from kuai import Kuai, set_backend
set_backend('signal')
def hello(name):
print(f"Hello, {name}")
signal = Kuai.on('signal', hello)
signal("cymrow")
|
{
"content_hash": "5b66cbd4e3a007c113663d1a3c4c7666",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 34,
"avg_line_length": 15.7,
"alnum_prop": 0.6751592356687898,
"repo_name": "Duroktar/Kuai",
"id": "975aeafb0c7d6ed50502f326bd5469b9110160e4",
"size": "157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/signal-backend-demo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2262"
},
{
"name": "Python",
"bytes": "17883"
}
],
"symlink_target": ""
}
|
from google.cloud import texttospeech_v1beta1
def sample_synthesize_speech():
# Create a client
client = texttospeech_v1beta1.TextToSpeechClient()
# Initialize request argument(s)
input = texttospeech_v1beta1.SynthesisInput()
input.text = "text_value"
voice = texttospeech_v1beta1.VoiceSelectionParams()
voice.language_code = "language_code_value"
audio_config = texttospeech_v1beta1.AudioConfig()
audio_config.audio_encoding = "ALAW"
request = texttospeech_v1beta1.SynthesizeSpeechRequest(
input=input,
voice=voice,
audio_config=audio_config,
)
# Make the request
response = client.synthesize_speech(request=request)
# Handle the response
print(response)
# [END texttospeech_v1beta1_generated_TextToSpeech_SynthesizeSpeech_sync]
|
{
"content_hash": "f5412f33a574c3b9e7b98fb57cf9d40a",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 73,
"avg_line_length": 27.433333333333334,
"alnum_prop": 0.7193195625759417,
"repo_name": "googleapis/python-texttospeech",
"id": "a82e3d2b59bfe1ebcae416532d27baac06b2b189",
"size": "2228",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/texttospeech_v1beta1_generated_text_to_speech_synthesize_speech_sync.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "368339"
},
{
"name": "Shell",
"bytes": "30678"
}
],
"symlink_target": ""
}
|
import copy
from unittest import mock
import netaddr
from oslo_serialization import jsonutils
from oslo_utils.fixture import uuidsentinel as uuids
from webob import exc
from nova.api.openstack.compute import hypervisors as hypervisors_v21
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
CPU_INFO = """
{"arch": "x86_64",
"vendor": "fake",
"topology": {"cores": 1, "threads": 1, "sockets": 1},
"features": [],
"model": ""}"""
TEST_HYPERS = [
dict(id=1,
uuid=uuids.hyper1,
service_id=1,
host="compute1",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper1",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info=CPU_INFO,
disk_available_least=100,
host_ip=netaddr.IPAddress('1.1.1.1')),
dict(id=2,
uuid=uuids.hyper2,
service_id=2,
host="compute2",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper2",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info=CPU_INFO,
disk_available_least=100,
host_ip=netaddr.IPAddress('2.2.2.2'))]
TEST_SERVICES = [
objects.Service(id=1,
uuid=uuids.service1,
host="compute1",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
objects.Service(id=2,
uuid=uuids.service2,
host="compute2",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
]
TEST_HYPERS_OBJ = [objects.ComputeNode(**hyper_dct)
for hyper_dct in TEST_HYPERS]
TEST_HYPERS[0].update({'service': TEST_SERVICES[0]})
TEST_HYPERS[1].update({'service': TEST_SERVICES[1]})
TEST_SERVERS = [dict(name="inst1", uuid=uuids.instance_1, host="compute1"),
dict(name="inst2", uuid=uuids.instance_2, host="compute2"),
dict(name="inst3", uuid=uuids.instance_3, host="compute1"),
dict(name="inst4", uuid=uuids.instance_4, host="compute2")]
def fake_compute_node_get_all(context, limit=None, marker=None):
if marker in ['99999', uuids.invalid_marker]:
raise exception.MarkerNotFound(marker)
marker_found = True if marker is None else False
output = []
for hyper in TEST_HYPERS_OBJ:
# Starting with the 2.53 microversion, the marker is a uuid.
if not marker_found and marker in (str(hyper.id), hyper.uuid):
marker_found = True
elif marker_found:
if limit is None or len(output) < int(limit):
output.append(hyper)
return output
def fake_compute_node_search_by_hypervisor(context, hypervisor_re):
return TEST_HYPERS_OBJ
def fake_compute_node_get(context, compute_id):
for hyper in TEST_HYPERS_OBJ:
if hyper.uuid == compute_id:
return hyper
if (
(isinstance(compute_id, int) or compute_id.isdigit()) and
hyper.id == int(compute_id)
):
return hyper
raise exception.ComputeHostNotFound(host=compute_id)
def fake_service_get_by_compute_host(context, host):
for service in TEST_SERVICES:
if service.host == host:
return service
def fake_compute_node_statistics(context):
result = dict(
count=0,
vcpus=0,
memory_mb=0,
local_gb=0,
vcpus_used=0,
memory_mb_used=0,
local_gb_used=0,
free_ram_mb=0,
free_disk_gb=0,
current_workload=0,
running_vms=0,
disk_available_least=0,
)
for hyper in TEST_HYPERS_OBJ:
for key in result:
if key == 'count':
result[key] += 1
else:
result[key] += getattr(hyper, key)
return result
def fake_instance_get_all_by_host(context, host):
results = []
for inst in TEST_SERVERS:
if inst['host'] == host:
inst_obj = fake_instance.fake_instance_obj(context, **inst)
results.append(inst_obj)
return results
class HypervisorsTestV21(test.NoDBTestCase):
api_version = '2.1'
# Allow subclasses to override if the id value in the response is the
# compute node primary key integer id or the uuid.
expect_uuid_for_id = False
# TODO(stephenfin): These should just be defined here
TEST_HYPERS_OBJ = copy.deepcopy(TEST_HYPERS_OBJ)
TEST_SERVICES = copy.deepcopy(TEST_SERVICES)
TEST_SERVERS = copy.deepcopy(TEST_SERVERS)
DETAIL_HYPERS_DICTS = copy.deepcopy(TEST_HYPERS)
del DETAIL_HYPERS_DICTS[0]['service_id']
del DETAIL_HYPERS_DICTS[1]['service_id']
del DETAIL_HYPERS_DICTS[0]['host']
del DETAIL_HYPERS_DICTS[1]['host']
del DETAIL_HYPERS_DICTS[0]['uuid']
del DETAIL_HYPERS_DICTS[1]['uuid']
DETAIL_HYPERS_DICTS[0].update({'state': 'up',
'status': 'enabled',
'service': dict(id=1, host='compute1',
disabled_reason=None)})
DETAIL_HYPERS_DICTS[1].update({'state': 'up',
'status': 'enabled',
'service': dict(id=2, host='compute2',
disabled_reason=None)})
INDEX_HYPER_DICTS = [
dict(id=1, hypervisor_hostname="hyper1",
state='up', status='enabled'),
dict(id=2, hypervisor_hostname="hyper2",
state='up', status='enabled')]
DETAIL_NULL_CPUINFO_DICT = {'': '', None: None}
def _get_request(self, use_admin_context, url='', version=None):
return fakes.HTTPRequest.blank(
url,
use_admin_context=use_admin_context,
version=version or self.api_version)
def _get_hyper_id(self):
"""Helper function to get the proper hypervisor id for a request
:returns: The first hypervisor's uuid for microversions that expect a
uuid for the id, otherwise the hypervisor's id primary key
"""
return (self.TEST_HYPERS_OBJ[0].uuid if self.expect_uuid_for_id
else self.TEST_HYPERS_OBJ[0].id)
def setUp(self):
super(HypervisorsTestV21, self).setUp()
self.controller = hypervisors_v21.HypervisorsController()
self.controller.servicegroup_api.service_is_up = mock.MagicMock(
return_value=True)
host_api = self.controller.host_api
host_api.compute_node_get_all = mock.MagicMock(
side_effect=fake_compute_node_get_all)
host_api.service_get_by_compute_host = mock.MagicMock(
side_effect=fake_service_get_by_compute_host)
host_api.compute_node_search_by_hypervisor = mock.MagicMock(
side_effect=fake_compute_node_search_by_hypervisor)
host_api.compute_node_get = mock.MagicMock(
side_effect=fake_compute_node_get)
self.stub_out('nova.db.main.api.compute_node_statistics',
fake_compute_node_statistics)
def test_view_hypervisor_nodetail_noservers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], False, req)
self.assertEqual(self.INDEX_HYPER_DICTS[0], result)
def test_view_hypervisor_detail_noservers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], True, req)
self.assertEqual(self.DETAIL_HYPERS_DICTS[0], result)
def test_view_hypervisor_nodetail_service_down(self):
self.controller.servicegroup_api.service_is_up.return_value = False
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict['state'] = 'down'
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], False, req)
self.assertEqual(expected_dict, result)
def test_view_hypervisor_detail_service_down(self):
self.controller.servicegroup_api.service_is_up.return_value = False
expected_dict = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
expected_dict['state'] = 'down'
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], True, req)
self.assertEqual(expected_dict, result)
def test_view_hypervisor_nodetail_service_disabled(self):
service = copy.deepcopy(TEST_SERVICES[0])
service.disabled = True
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], service, False, req)
self.assertEqual('disabled', result['status'])
def test_view_hypervisor_detail_service_disabled(self):
service = copy.deepcopy(TEST_SERVICES[0])
service.disabled = True
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], service, True, req)
self.assertEqual('disabled', result['status'])
def test_view_hypervisor_servers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(self.TEST_HYPERS_OBJ[0],
self.TEST_SERVICES[0],
False, req,
self.TEST_SERVERS)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'servers': [
dict(name="inst1", uuid=uuids.instance_1),
dict(name="inst2", uuid=uuids.instance_2),
dict(name="inst3", uuid=uuids.instance_3),
dict(name="inst4", uuid=uuids.instance_4)]})
self.assertEqual(expected_dict, result)
def _test_view_hypervisor_detail_cpuinfo_null(self, cpu_info):
req = self._get_request(True)
test_hypervisor_obj = copy.deepcopy(self.TEST_HYPERS_OBJ[0])
test_hypervisor_obj.cpu_info = cpu_info
result = self.controller._view_hypervisor(test_hypervisor_obj,
self.TEST_SERVICES[0],
True, req)
expected_dict = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
expected_dict.update({'cpu_info':
self.DETAIL_NULL_CPUINFO_DICT[cpu_info]})
self.assertEqual(result, expected_dict)
def test_view_hypervisor_detail_cpuinfo_empty_string(self):
self._test_view_hypervisor_detail_cpuinfo_null('')
def test_view_hypervisor_detail_cpuinfo_none(self):
self._test_view_hypervisor_detail_cpuinfo_null(None)
def test_index(self):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
def test_index_compute_host_not_found(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.ComputeHostNotFound(host=host)
m_get = self.controller.host_api.compute_node_get_all
m_get.side_effect = None
m_get.return_value = compute_nodes
self.controller.host_api.service_get_by_compute_host.side_effect = (
fake_service_get_by_compute_host)
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].uuid if self.expect_uuid_for_id
else compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
self.assertDictEqual(expected, result['hypervisors'][0])
def test_index_compute_host_not_mapped(self):
"""Tests that we don't fail index if a host is not mapped."""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.HostMappingNotFound(name=host)
self.controller.host_api.compute_node_get_all.return_value = (
compute_nodes)
self.controller.host_api.service_get_by_compute_host = (
fake_service_get_by_compute_host)
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].uuid if self.expect_uuid_for_id
else compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
self.assertDictEqual(expected, result['hypervisors'][0])
def test_detail(self):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(dict(hypervisors=self.DETAIL_HYPERS_DICTS), result)
def test_detail_compute_host_not_found(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.ComputeHostNotFound(host=host)
m_get = self.controller.host_api.compute_node_get_all
m_get.side_effect = None
m_get.return_value = compute_nodes
self.controller.host_api.service_get_by_compute_host.side_effect = (
fake_service_get_by_compute_host)
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
# we don't care about all of the details, just make sure we get
# the subset we care about and there are more keys than what index
# would return
hypervisor = result['hypervisors'][0]
self.assertTrue(
set(expected.keys()).issubset(set(hypervisor.keys())))
self.assertGreater(len(hypervisor.keys()), len(expected.keys()))
self.assertEqual(compute_nodes[0].hypervisor_hostname,
hypervisor['hypervisor_hostname'])
def test_detail_compute_host_not_mapped(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.HostMappingNotFound(name=host)
self.controller.host_api.service_get_by_compute_host.side_effect = (
fake_service_get_by_compute_host)
self.controller.host_api.compute_node_get_all.return_value = (
compute_nodes)
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
# we don't care about all of the details, just make sure we get
# the subset we care about and there are more keys than what index
# would return
hypervisor = result['hypervisors'][0]
self.assertTrue(
set(expected.keys()).issubset(set(hypervisor.keys())))
self.assertGreater(len(hypervisor.keys()), len(expected.keys()))
self.assertEqual(compute_nodes[0].hypervisor_hostname,
hypervisor['hypervisor_hostname'])
def test_show(self):
req = self._get_request(True)
hyper_id = self._get_hyper_id()
result = self.controller.show(req, hyper_id)
self.assertEqual({'hypervisor': self.DETAIL_HYPERS_DICTS[0]}, result)
def test_show_compute_host_not_mapped(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
self.controller.host_api.service_get_by_compute_host.side_effect = (
exception.HostMappingNotFound(name='foo'))
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(
exc.HTTPNotFound, self.controller.show, req, hyper_id)
self.assertTrue(
self.controller.host_api.service_get_by_compute_host.called)
self.controller.host_api.compute_node_get.assert_called_once_with(
mock.ANY, hyper_id)
def test_show_noid(self):
req = self._get_request(True)
hyperid = uuids.hyper3 if self.expect_uuid_for_id else '3'
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, hyperid)
def test_show_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, 'abc')
def test_uptime(self):
with mock.patch.object(
self.controller.host_api, 'get_host_uptime',
return_value='fake uptime',
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
result = self.controller.uptime(req, hyper_id)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'uptime': "fake uptime"})
self.assertEqual(dict(hypervisor=expected_dict), result)
self.assertEqual(1, mock_get_uptime.call_count)
def test_uptime_noid(self):
req = self._get_request(True)
hyper_id = uuids.hyper3 if self.expect_uuid_for_id else '3'
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req,
hyper_id)
def test_uptime_not_implemented(self):
with mock.patch.object(
self.controller.host_api, 'get_host_uptime',
side_effect=NotImplementedError,
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(
exc.HTTPNotImplemented,
self.controller.uptime, req, hyper_id)
self.assertEqual(1, mock_get_uptime.call_count)
def test_uptime_host_not_found(self):
with mock.patch.object(
self.controller.host_api, 'get_host_uptime',
side_effect=exception.HostNotFound('foo'),
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(
exc.HTTPBadRequest,
self.controller.uptime, req, hyper_id)
self.assertEqual(1, mock_get_uptime.call_count)
def test_uptime_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req, 'abc')
def test_uptime_hypervisor_down(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exception.ComputeServiceUnavailable(host='dummy')
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPBadRequest,
self.controller.uptime, req, hyper_id)
mock_get_uptime.assert_called_once_with(
mock.ANY, self.TEST_HYPERS_OBJ[0].host)
def test_uptime_hypervisor_not_mapped_service_get(self):
self.controller.host_api.service_get_by_compute_host.side_effect = (
exception.HostMappingNotFound(name='dummy'))
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound,
self.controller.uptime, req, hyper_id)
self.assertTrue(
self.controller.host_api.service_get_by_compute_host.called)
def test_uptime_hypervisor_not_mapped(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exception.HostMappingNotFound(name='dummy')
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound,
self.controller.uptime, req, hyper_id)
mock_get_uptime.assert_called_once_with(
mock.ANY, self.TEST_HYPERS_OBJ[0].host)
def test_search(self):
req = self._get_request(True)
result = self.controller.search(req, 'hyper')
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
def test_search_non_exist(self):
m_search = self.controller.host_api.compute_node_search_by_hypervisor
m_search.side_effect = None
m_search.return_value = []
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.search, req, 'a')
self.assertEqual(1, m_search.call_count)
def test_search_unmapped(self):
m_search = self.controller.host_api.compute_node_search_by_hypervisor
m_search.side_effect = None
m_search.return_value = [mock.MagicMock()]
self.controller.host_api.service_get_by_compute_host.side_effect = (
exception.HostMappingNotFound(name='foo'))
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.search, req, 'a')
self.assertTrue(
self.controller.host_api.service_get_by_compute_host.called)
@mock.patch.object(objects.InstanceList, 'get_by_host',
side_effect=fake_instance_get_all_by_host)
def test_servers(self, mock_get):
req = self._get_request(True)
result = self.controller.servers(req, 'hyper')
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS)
expected_dict[0].update({'servers': [
dict(uuid=uuids.instance_1),
dict(uuid=uuids.instance_3)]})
expected_dict[1].update({'servers': [
dict(uuid=uuids.instance_2),
dict(uuid=uuids.instance_4)]})
for output in result['hypervisors']:
servers = output['servers']
for server in servers:
del server['name']
self.assertEqual(dict(hypervisors=expected_dict), result)
def test_servers_not_mapped(self):
req = self._get_request(True)
with mock.patch.object(
self.controller.host_api, 'instance_get_all_by_host',
side_effect=exception.HostMappingNotFound(name='something'),
):
self.assertRaises(
exc.HTTPNotFound,
self.controller.servers, req, 'hyper')
def test_servers_compute_host_not_found(self):
req = self._get_request(True)
self.controller.host_api.service_get_by_compute_host.side_effect = (
exception.ComputeHostNotFound(host='foo'))
with mock.patch.object(
self.controller.host_api,
'instance_get_all_by_host',
side_effect=fake_instance_get_all_by_host,
):
# The result should be empty since every attempt to fetch the
# service for a hypervisor "failed"
result = self.controller.servers(req, 'hyper')
self.assertEqual({'hypervisors': []}, result)
def test_servers_non_id(self):
m_search = self.controller.host_api.compute_node_search_by_hypervisor
m_search.side_effect = None
m_search.return_value = []
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound,
self.controller.servers,
req, '115')
self.assertEqual(1, m_search.call_count)
def test_servers_with_non_integer_hypervisor_id(self):
m_search = self.controller.host_api.compute_node_search_by_hypervisor
m_search.side_effect = None
m_search.return_value = []
req = self._get_request(True)
self.assertRaises(
exc.HTTPNotFound, self.controller.servers, req, 'abc')
self.assertEqual(1, m_search.call_count)
def test_servers_with_no_servers(self):
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=[]) as mock_inst_get_all:
req = self._get_request(True)
result = self.controller.servers(req, self.TEST_HYPERS_OBJ[0].id)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
self.assertTrue(mock_inst_get_all.called)
def test_statistics(self):
req = self._get_request(True)
result = self.controller.statistics(req)
self.assertEqual(dict(hypervisor_statistics=dict(
count=2,
vcpus=8,
memory_mb=20 * 1024,
local_gb=500,
vcpus_used=4,
memory_mb_used=10 * 1024,
local_gb_used=250,
free_ram_mb=10 * 1024,
free_disk_gb=250,
current_workload=4,
running_vms=4,
disk_available_least=200)), result)
class HypervisorsTestV228(HypervisorsTestV21):
api_version = '2.28'
DETAIL_HYPERS_DICTS = copy.deepcopy(HypervisorsTestV21.DETAIL_HYPERS_DICTS)
DETAIL_HYPERS_DICTS[0]['cpu_info'] = jsonutils.loads(CPU_INFO)
DETAIL_HYPERS_DICTS[1]['cpu_info'] = jsonutils.loads(CPU_INFO)
DETAIL_NULL_CPUINFO_DICT = {'': {}, None: {}}
class HypervisorsTestV233(HypervisorsTestV228):
api_version = '2.33'
def test_index_pagination(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=1&marker=1')
result = self.controller.index(req)
expected = {
'hypervisors': [
{'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
],
'hypervisors_links': [
{'href': 'http://localhost/v2/os-hypervisors?limit=1&marker=2',
'rel': 'next'}
]
}
self.assertEqual(expected, result)
def test_index_pagination_with_invalid_marker(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?marker=99999')
self.assertRaises(exc.HTTPBadRequest,
self.controller.index, req)
def test_index_pagination_with_invalid_non_int_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=-9')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_pagination_with_invalid_string_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_with_invalid_string_limit(self):
req = self._get_request(
True,
'/v2/1234/os-hypervisors/?limit=1&limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_validation(self):
expected = [{
'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
]
params = {
'limit': 1,
'marker': 1,
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?marker=1&%s=%s&%s=%s' %
(param, value, param, value))
result = self.controller.index(req)
self.assertEqual(expected, result['hypervisors'])
def test_index_pagination_with_additional_filter(self):
expected = {
'hypervisors': [
{'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
],
'hypervisors_links': [
{'href': 'http://localhost/v2/os-hypervisors?limit=1&marker=2',
'rel': 'next'}
]
}
req = self._get_request(
True, '/v2/1234/os-hypervisors?limit=1&marker=1&additional=3')
result = self.controller.index(req)
self.assertEqual(expected, result)
def test_detail_pagination(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=1')
result = self.controller.detail(req)
link = 'http://localhost/v2/os-hypervisors/detail?limit=1&marker=2'
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_detail_pagination_with_invalid_marker(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors/detail?marker=99999')
self.assertRaises(exc.HTTPBadRequest,
self.controller.detail, req)
def test_detail_pagination_with_invalid_string_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors/detail?limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_with_invalid_string_limit(self):
req = self._get_request(
True,
'/v2/1234/os-hypervisors/detail?limit=1&limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_validation(self):
expected = [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
]
params = {
'limit': 1,
'marker': 1,
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?marker=1&%s=%s&%s=%s' %
(param, value, param, value))
result = self.controller.detail(req)
self.assertEqual(expected, result['hypervisors'])
def test_detail_pagination_with_additional_filter(self):
link = 'http://localhost/v2/os-hypervisors/detail?limit=1&marker=2'
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{
'href': link,
'rel': 'next'}]
}
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=1&unknown=2')
result = self.controller.detail(req)
self.assertEqual(expected, result)
class HypervisorsTestV252(HypervisorsTestV233):
"""This is a boundary test to make sure 2.52 works like 2.33."""
api_version = '2.52'
class HypervisorsTestV253(HypervisorsTestV252):
api_version = hypervisors_v21.UUID_FOR_ID_MIN_VERSION
expect_uuid_for_id = True
# This is an expected response for index().
INDEX_HYPER_DICTS = [
dict(id=uuids.hyper1, hypervisor_hostname="hyper1",
state='up', status='enabled'),
dict(id=uuids.hyper2, hypervisor_hostname="hyper2",
state='up', status='enabled')]
def setUp(self):
super(HypervisorsTestV253, self).setUp()
# This is an expected response for detail().
for index, detail_hyper_dict in enumerate(self.DETAIL_HYPERS_DICTS):
detail_hyper_dict['id'] = TEST_HYPERS[index]['uuid']
detail_hyper_dict['service']['id'] = TEST_SERVICES[index].uuid
def test_servers(self):
"""Asserts that calling the servers route after 2.52 fails."""
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.servers,
self._get_request(True), 'hyper')
def test_servers_not_mapped(self):
# the separate 'servers' API has been removed, so skip this test
pass
def test_servers_compute_host_not_found(self):
# the separate 'servers' API has been removed, so skip this test
pass
def test_servers_non_id(self):
# the separate 'servers' API has been removed, so skip this test
pass
def test_servers_with_non_integer_hypervisor_id(self):
# the separate 'servers' API has been removed, so skip this test
pass
def test_servers_with_no_servers(self):
# the separate 'servers' API has been removed, so skip this test
pass
def test_index_with_no_servers(self):
"""Tests GET /os-hypervisors?with_servers=1 when there are no
instances on the given host.
"""
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=[]) as mock_inst_get_all:
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=1')
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
# instance_get_all_by_host is called for each hypervisor
self.assertEqual(2, mock_inst_get_all.call_count)
mock_inst_get_all.assert_has_calls((
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[0].host),
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[1].host)))
def test_index_with_servers_not_mapped(self):
"""Tests that instance_get_all_by_host fails with HostMappingNotFound.
"""
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=1')
with mock.patch.object(
self.controller.host_api, 'instance_get_all_by_host',
side_effect=exception.HostMappingNotFound(name='something')):
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=[]), result)
def test_index_with_servers_compute_host_not_found(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?with_servers=1')
self.controller.host_api.service_get_by_compute_host.side_effect = (
exception.ComputeHostNotFound(host='foo'))
with mock.patch.object(
self.controller.host_api,
"instance_get_all_by_host",
side_effect=fake_instance_get_all_by_host,
):
# The result should be empty since every attempt to fetch the
# service for a hypervisor "failed"
result = self.controller.index(req)
self.assertEqual({'hypervisors': []}, result)
def test_index_with_servers(self):
"""Tests GET /os-hypervisors?with_servers=True"""
instances = [
objects.InstanceList(objects=[objects.Instance(
id=1, uuid=uuids.hyper1_instance1)]),
objects.InstanceList(objects=[objects.Instance(
id=2, uuid=uuids.hyper2_instance1)])]
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
side_effect=instances) as mock_inst_get_all:
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=True')
result = self.controller.index(req)
index_with_servers = copy.deepcopy(self.INDEX_HYPER_DICTS)
index_with_servers[0]['servers'] = [
{'name': 'instance-00000001', 'uuid': uuids.hyper1_instance1}]
index_with_servers[1]['servers'] = [
{'name': 'instance-00000002', 'uuid': uuids.hyper2_instance1}]
self.assertEqual(dict(hypervisors=index_with_servers), result)
# instance_get_all_by_host is called for each hypervisor
self.assertEqual(2, mock_inst_get_all.call_count)
mock_inst_get_all.assert_has_calls((
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[0].host),
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[1].host)))
def test_index_with_servers_invalid_parameter(self):
"""Tests using an invalid with_servers query parameter."""
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=invalid')
self.assertRaises(
exception.ValidationError, self.controller.index, req)
def test_index_with_hostname_pattern_and_paging_parameters(self):
"""This is a negative test to validate that trying to list hypervisors
with a hostname pattern and paging parameters results in a 400 error.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=foo&'
'limit=1&marker=%s' % uuids.marker)
ex = self.assertRaises(exc.HTTPBadRequest, self.controller.index, req)
self.assertIn('Paging over hypervisors with the '
'hypervisor_hostname_pattern query parameter is not '
'supported.', str(ex))
def test_index_with_hostname_pattern_no_match(self):
"""This is a poorly named test, it's really checking the 404 case where
there is no match for the hostname pattern.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?with_servers=yes&'
'hypervisor_hostname_pattern=shenzhen')
m_search = self.controller.host_api.compute_node_search_by_hypervisor
m_search.side_effect = None
m_search.return_value = objects.ComputeNodeList()
self.assertRaises(exc.HTTPNotFound, self.controller.index, req)
m_search.assert_called_once_with(
req.environ['nova.context'], 'shenzhen')
def test_detail_with_hostname_pattern(self):
"""Test listing hypervisors with details and using the
hypervisor_hostname_pattern query string.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=shenzhen')
m_search = self.controller.host_api.compute_node_search_by_hypervisor
m_search.side_effect = None
m_search.return_value = objects.ComputeNodeList(
objects=[TEST_HYPERS_OBJ[0]])
result = self.controller.detail(req)
m_search.assert_called_once_with(
req.environ['nova.context'], 'shenzhen')
expected = {'hypervisors': [self.DETAIL_HYPERS_DICTS[0]]}
# There are no links when using the hypervisor_hostname_pattern
# query string since we can't page using a pattern matcher.
self.assertNotIn('hypervisors_links', result)
self.assertDictEqual(expected, result)
def test_detail_invalid_hostname_pattern_parameter(self):
"""Tests passing an invalid hypervisor_hostname_pattern query
parameter.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=invalid~host')
self.assertRaises(
exception.ValidationError, self.controller.detail, req)
def test_search(self):
"""Asserts that calling the search route after 2.52 fails."""
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.search,
self._get_request(True), 'hyper')
def test_search_non_exist(self):
"""This is a duplicate of test_servers_with_non_integer_hypervisor_id.
"""
pass
def test_search_unmapped(self):
"""This is already tested with test_index_compute_host_not_mapped."""
pass
def test_show_non_integer_id(self):
"""There is no reason to test this for 2.53 since 2.53 requires a
non-integer id (requires a uuid).
"""
pass
def test_show_integer_id(self):
"""Tests that we get a 400 if passed a hypervisor integer id to show().
"""
req = self._get_request(True)
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.show, req, '1')
self.assertIn('Invalid uuid 1', str(ex))
def test_show_with_servers_invalid_parameter(self):
"""Tests passing an invalid value for the with_servers query parameter
to the show() method to make sure the query parameter is validated.
"""
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=invalid' % hyper_id)
ex = self.assertRaises(
exception.ValidationError, self.controller.show, req, hyper_id)
self.assertIn('with_servers', str(ex))
def test_show_with_servers_host_mapping_not_found(self):
"""Tests that a 404 is returned if instance_get_all_by_host raises
HostMappingNotFound.
"""
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=true' % hyper_id)
with mock.patch.object(
self.controller.host_api, 'instance_get_all_by_host',
side_effect=exception.HostMappingNotFound(name=hyper_id)):
self.assertRaises(exc.HTTPNotFound, self.controller.show,
req, hyper_id)
def test_show_with_servers(self):
"""Tests the show() result when servers are included in the output."""
instances = objects.InstanceList(objects=[objects.Instance(
id=1, uuid=uuids.hyper1_instance1)])
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=on' % hyper_id)
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=instances) as mock_inst_get_all:
result = self.controller.show(req, hyper_id)
show_with_servers = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
show_with_servers['servers'] = [
{'name': 'instance-00000001', 'uuid': uuids.hyper1_instance1}]
self.assertDictEqual(dict(hypervisor=show_with_servers), result)
# instance_get_all_by_host is called
mock_inst_get_all.assert_called_once_with(
req.environ['nova.context'], TEST_HYPERS_OBJ[0].host)
def test_show_duplicate_query_parameters_validation(self):
"""Tests that the show query parameter schema enforces only a single
entry for any query parameter.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=1&with_servers=1' %
uuids.hyper1)
self.assertRaises(exception.ValidationError,
self.controller.show, req, uuids.hyper1)
def test_uptime_non_integer_id(self):
"""There is no reason to test this for 2.53 since 2.53 requires a
non-integer id (requires a uuid).
"""
pass
def test_uptime_integer_id(self):
"""Tests that we get a 400 if passed a hypervisor integer id to
uptime().
"""
req = self._get_request(True)
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.uptime, req, '1')
self.assertIn('Invalid uuid 1', str(ex))
def test_detail_pagination(self):
"""Tests details paging with uuid markers."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?limit=1&marker=%s' %
TEST_HYPERS_OBJ[0].uuid)
result = self.controller.detail(req)
link = ('http://localhost/v2/os-hypervisors/detail?limit=1&marker=%s' %
TEST_HYPERS_OBJ[1].uuid)
expected = {
'hypervisors': [self.DETAIL_HYPERS_DICTS[1]],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_detail_pagination_with_invalid_marker(self):
"""Tests detail paging with an invalid marker (not found)."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?marker=%s' % uuids.invalid_marker)
self.assertRaises(exc.HTTPBadRequest,
self.controller.detail, req)
def test_detail_pagination_with_additional_filter(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=9&unknown=2')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_validation(self):
"""Tests that the list Detail query parameter schema enforces only a
single entry for any query parameter.
"""
params = {
'limit': 1,
'marker': uuids.marker,
'hypervisor_hostname_pattern': 'foo',
'with_servers': 'true'
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?%s=%s&%s=%s' %
(param, value, param, value))
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_index_pagination(self):
"""Tests index paging with uuid markers."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?limit=1&marker=%s' %
TEST_HYPERS_OBJ[0].uuid)
result = self.controller.index(req)
link = ('http://localhost/v2/os-hypervisors?limit=1&marker=%s' %
TEST_HYPERS_OBJ[1].uuid)
expected = {
'hypervisors': [{
'hypervisor_hostname': 'hyper2',
'id': TEST_HYPERS_OBJ[1].uuid,
'state': 'up',
'status': 'enabled'
}],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_index_pagination_with_invalid_marker(self):
"""Tests index paging with an invalid marker (not found)."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?marker=%s' % uuids.invalid_marker)
self.assertRaises(exc.HTTPBadRequest,
self.controller.index, req)
def test_index_pagination_with_additional_filter(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/?limit=1&marker=9&unknown=2')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_validation(self):
"""Tests that the list query parameter schema enforces only a single
entry for any query parameter.
"""
params = {
'limit': 1,
'marker': uuids.marker,
'hypervisor_hostname_pattern': 'foo',
'with_servers': 'true'
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?%s=%s&%s=%s' %
(param, value, param, value))
self.assertRaises(exception.ValidationError,
self.controller.index, req)
class HypervisorsTestV275(HypervisorsTestV253):
api_version = '2.75'
def _test_servers_with_no_servers(self, func, version=None, **kwargs):
"""Tests GET APIs return 'servers' field in response even
no servers on hypervisors.
"""
with mock.patch.object(
self.controller.host_api,
'instance_get_all_by_host',
return_value=[],
):
req = self._get_request(
url='/os-hypervisors?with_servers=1',
use_admin_context=True,
version=version)
result = func(req, **kwargs)
return result
def test_index_with_no_servers(self):
result = self._test_servers_with_no_servers(self.controller.index)
for hyper in result['hypervisors']:
self.assertEqual(0, len(hyper['servers']))
def test_index_with_no_servers_old_version(self):
result = self._test_servers_with_no_servers(
self.controller.index, version='2.74')
for hyper in result['hypervisors']:
self.assertNotIn('servers', hyper)
def test_detail_with_no_servers(self):
result = self._test_servers_with_no_servers(self.controller.detail)
for hyper in result['hypervisors']:
self.assertEqual(0, len(hyper['servers']))
def test_detail_with_no_servers_old_version(self):
result = self._test_servers_with_no_servers(
self.controller.detail, version='2.74')
for hyper in result['hypervisors']:
self.assertNotIn('servers', hyper)
def test_show_with_no_servers(self):
result = self._test_servers_with_no_servers(
self.controller.show, id=uuids.hyper1)
self.assertEqual(0, len(result['hypervisor']['servers']))
def test_show_with_no_servers_old_version(self):
result = self._test_servers_with_no_servers(
self.controller.show, version='2.74', id=uuids.hyper1)
self.assertNotIn('servers', result['hypervisor'])
class HypervisorsTestV288(HypervisorsTestV275):
api_version = '2.88'
DETAIL_HYPERS_DICTS = copy.deepcopy(HypervisorsTestV21.DETAIL_HYPERS_DICTS)
for hypervisor in DETAIL_HYPERS_DICTS:
for key in (
'cpu_info', 'current_workload', 'disk_available_least',
'free_disk_gb', 'free_ram_mb', 'local_gb', 'local_gb_used',
'memory_mb', 'memory_mb_used', 'running_vms', 'vcpus',
'vcpus_used',
):
del hypervisor[key]
hypervisor['uptime'] = 'fake uptime'
def setUp(self):
super().setUp()
self.controller.host_api.get_host_uptime = mock.MagicMock(
return_value='fake uptime')
def test_view_hypervisor_detail_cpuinfo_empty_string(self):
# cpu_info is no longer included in the response, so skip this test
pass
def test_view_hypervisor_detail_cpuinfo_none(self):
# cpu_info is no longer included in the response, so skip this test
pass
def test_uptime(self):
req = self._get_request(True)
self.assertRaises(
exception.VersionNotFoundForAPIMethod,
self.controller.uptime, req)
def test_uptime_old_version(self):
req = self._get_request(use_admin_context=True, version='2.87')
hyper_id = self._get_hyper_id()
# no exception == pass
self.controller.uptime(req, hyper_id)
def test_uptime_noid(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_uptime_not_implemented(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_uptime_implemented(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_uptime_integer_id(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_uptime_host_not_found(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_uptime_hypervisor_down(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_uptime_hypervisor_not_mapped_service_get(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_uptime_hypervisor_not_mapped(self):
# the separate 'uptime' API has been removed, so skip this test
pass
def test_show_with_uptime_notimplemented(self):
self.controller.host_api.get_host_uptime.side_effect = (
NotImplementedError())
req = self._get_request(use_admin_context=True)
hyper_id = self._get_hyper_id()
result = self.controller.show(req, hyper_id)
expected_dict = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
expected_dict.update({'uptime': None})
self.assertEqual({'hypervisor': expected_dict}, result)
self.assertEqual(
1, self.controller.host_api.get_host_uptime.call_count)
def test_show_with_uptime_hypervisor_down(self):
self.controller.host_api.get_host_uptime.side_effect = (
exception.ComputeServiceUnavailable(host='dummy'))
req = self._get_request(use_admin_context=True)
hyper_id = self._get_hyper_id()
result = self.controller.show(req, hyper_id)
expected_dict = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
expected_dict.update({'uptime': None})
self.assertEqual({'hypervisor': expected_dict}, result)
self.assertEqual(
1,
self.controller.host_api.get_host_uptime.call_count
)
def test_show_old_version(self):
# ensure things still work as expected here
req = self._get_request(use_admin_context=True, version='2.87')
hyper_id = self._get_hyper_id()
result = self.controller.show(req, hyper_id)
self.assertNotIn('uptime', result)
def test_statistics(self):
req = self._get_request(use_admin_context=True)
self.assertRaises(
exception.VersionNotFoundForAPIMethod,
self.controller.statistics, req)
def test_statistics_old_version(self):
req = self._get_request(use_admin_context=True, version='2.87')
# no exception == pass
self.controller.statistics(req)
|
{
"content_hash": "4547c9e709db783c6ebe1b2a254d1ae0",
"timestamp": "",
"source": "github",
"line_count": 1533,
"max_line_length": 79,
"avg_line_length": 39.69341161121983,
"alnum_prop": 0.5717830731306491,
"repo_name": "openstack/nova",
"id": "a908988811f5d792e89a96aefd1b3d250bc0c1a6",
"size": "61490",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/unit/api/openstack/compute/test_hypervisors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
}
|
import urllib2
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('url', help='The URL of the running Zuul instance')
parser.add_argument('pipeline_name', help='The name of the Zuul pipeline')
parser.add_argument('comment', help='The text of the Gerrit comment')
parser.add_argument('--review-host', default='review',
help='The Gerrit hostname')
options = parser.parse_args()
data = urllib2.urlopen('%s/status.json' % options.url).read()
data = json.loads(data)
for pipeline in data['pipelines']:
if pipeline['name'] != options.pipeline_name:
continue
for queue in pipeline['change_queues']:
for head in queue['heads']:
for change in head:
print 'ssh %s gerrit review %s --message \\"%s\\"' % (
options.review_host,
change['id'],
options.comment)
|
{
"content_hash": "88dce55ca1350152ec264732cb703344",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 74,
"avg_line_length": 36.56,
"alnum_prop": 0.6258205689277899,
"repo_name": "devdattakulkarni/zuul_messaging",
"id": "7fc541b1efc02351e50a9afcaa002999f99e8f4e",
"size": "1612",
"binary": false,
"copies": "1",
"ref": "refs/heads/solum_messaging_trigger",
"path": "tools/zuul-changes.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "9486"
},
{
"name": "Python",
"bytes": "352158"
},
{
"name": "Shell",
"bytes": "594"
}
],
"symlink_target": ""
}
|
"""This script handles all of the processing for versioning packages.
package_version.py manages all of the various operations done between
packages, including archiving, extracting, uploading, and downloading
packages. For a list of options and commands, see the help for the script.
Glossary:
Package: A list of archives, such as "nacl_x86_glibc" or "nacl_x86_newlib".
Package Archive: An archive (usually a tar file) that is part of a package.
Package Target: Package targets consists of packages. Each package target
has it's own version of a package. An example of a package target would
be something such as "win_x86_nacl_x86" or "mac_x86_nacl_x86". In that case,
"win_x86_nacl_x86" and "mac_x86_nacl_x86" would each have their own version
of "nacl_x86_glibc" and "nacl_x86_newlib" for windows and mac respectively.
Revision: The revision identifier of a sanctioned version.
This is used to synchronize packages to sanctioned versions.
JSON Files:
Packages File - A file which describes the various package targets for each
platform/architecture along with the packages associated with each package
target.
[Default file: build/package_version/standard_packages.json].
Package File - A file which contains the list of package archives within
a package.
[Default file: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE.json]
Archive File - A file which describes an archive within a package. Each
archive description file will contain information about an archive such
as name, URL to download from, and hash.
[Default File: toolchain/.tars/$PACKAGE_TARGET/$PACKAGE/$ARCHIVE.json]
Revision File - A file which describes the sanctioned version of package
for each of the package targets associated with it.
[Default file: toolchain_revisions/$PACKAGE.json]
"""
from __future__ import print_function
import argparse
import collections
import logging
import os
import shutil
import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.dirname(SCRIPT_DIR))
import cygtar
sys.path.append(os.path.dirname(os.path.dirname(SCRIPT_DIR)))
import pynacl.file_tools
import pynacl.gsd_storage
import pynacl.log_tools
import pynacl.platform
import pynacl.working_directory
import archive_info
import error
import package_info
import package_locations
import packages_info
import revision_info
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_DIR = os.path.dirname(CURRENT_DIR)
NACL_DIR = os.path.dirname(BUILD_DIR)
TEMP_SUFFIX = '.tmp'
DEFAULT_PACKAGES_JSON = os.path.join(CURRENT_DIR, 'standard_packages.json')
DEFAULT_REVISIONS_DIR = os.path.join(NACL_DIR, 'toolchain_revisions')
DEFAULT_DEST_DIR = os.path.join(NACL_DIR, 'toolchain')
DEFAULT_CLOUD_BUCKET = 'nativeclient-archive2'
#
# These are helper functions that help each command.
#
def CleanTempFiles(directory):
"""Cleans up all temporary files ending with TEMP_SUFFIX in a directory."""
for root, dirs, files in os.walk(directory):
for file_name in files:
if file_name.endswith(TEMP_SUFFIX):
file_path = os.path.join(root, file_name)
os.unlink(file_path)
def GetPackageTargetPackages(custom_package_name, package_target_packages):
"""Returns a list of package target packages given a custom package name.
A custom package name can either have a specified package target attached
to it (IE. $PACKAGE_TARGET/PACKAGE_NAME) or be extracted out of a default
list of package targets along with their packages.
Args:
custom_package_name: Package name with an optional package target.
package_target_packages: List of tuples (package_target, package).
Returns:
List of package target tuples matching the package name.
"""
package_path = custom_package_name.replace('\\', os.path.sep)
package_path = package_path.replace('/', os.path.sep)
if os.path.sep in package_path:
# Package target is part of the custom package name, just return it.
package_target, package_name = package_path.split(os.path.sep, 1)
return [(package_target, package_name)]
# Package target is not part of the package name, filter from list of passed
# in package targets.
return [
(package_target, package)
for package_target, package in package_target_packages
if package == custom_package_name
]
def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
downloader=None, revision_num=None,
include_logs=False):
"""Downloads package archives from the cloud to the tar directory.
Args:
tar_dir: Root tar directory where archives will be downloaded to.
package_target: Package target of the package to download.
package_name: Package name of the package to download.
package_desc: package_info object of the package to download.
downloader: function which takes a url and a file path for downloading.
Returns:
The list of files that were downloaded.
"""
downloaded_files = []
if downloader is None:
downloader = pynacl.gsd_storage.HttpDownload
local_package_file = package_locations.GetLocalPackageFile(tar_dir,
package_target,
package_name)
# Download packages information file along with each of the package
# archives described in the information file. Also keep track of what
# new package names matches old package names. We will have to delete
# stale package names after we are finished.
update_archives = []
for archive_obj in package_desc.GetArchiveList():
archive_desc = archive_obj.GetArchiveData()
local_archive_file = package_locations.GetLocalPackageArchiveFile(
tar_dir,
archive_desc.name,
archive_desc.hash)
old_hash = archive_info.GetArchiveHash(local_archive_file)
if archive_desc.hash == old_hash:
logging.debug('Skipping matching archive: %s', archive_desc.name)
continue
archive_tuple = (local_archive_file, archive_obj.GetArchiveData())
update_archives.append(archive_tuple)
if update_archives:
logging.info('--Syncing %s to revision %s--' % (package_name, revision_num))
num_archives = len(update_archives)
for index, archive_tuple in enumerate(update_archives):
local_archive_file, archive_desc = archive_tuple
pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
if archive_desc.url is None:
raise error.Error('Error, no URL for archive: %s' % archive_desc.name)
logging.info('Downloading package archive: %s (%d/%d)' %
(archive_desc.name, index+1, num_archives))
try:
downloader(archive_desc.url, local_archive_file)
except Exception as e:
raise error.Error('Could not download URL (%s): %s' %
(archive_desc.url, e))
# Delete any stale log files
local_archive_log = package_locations.GetLocalPackageArchiveLogFile(
local_archive_file)
if os.path.isfile(local_archive_log):
os.unlink(local_archive_log)
verified_hash = archive_info.GetArchiveHash(local_archive_file)
if verified_hash != archive_desc.hash:
raise error.Error('Package hash check failed: %s != %s' %
(verified_hash, archive_desc.hash))
downloaded_files.append(local_archive_file)
# Download any logs if include_logs is True.
if include_logs:
download_logs = []
for archive_obj in package_desc.GetArchiveList():
archive_desc = archive_obj.GetArchiveData()
if archive_desc.log_url:
local_archive_file = package_locations.GetLocalPackageArchiveFile(
tar_dir,
archive_desc.name,
archive_desc.hash)
local_archive_log = package_locations.GetLocalPackageArchiveLogFile(
local_archive_file)
if not os.path.isfile(local_archive_log):
download_log_tuple = (archive_desc.name,
archive_desc.log_url,
local_archive_log)
download_logs.append(download_log_tuple)
if download_logs:
logging.info('--Syncing %s Logs--' % (package_name))
num_logs = len(download_logs)
for index, download_log_tuple in enumerate(download_logs):
name, log_url, local_log_file = download_log_tuple
logging.info('Downloading archive log: %s (%d/%d)' %
(name, index+1, num_logs))
try:
downloader(log_url, local_log_file)
except Exception as e:
raise IOError('Could not download log URL (%s): %s' %
(log_url, e))
# Save the package file so we know what we currently have.
if not update_archives and os.path.isfile(local_package_file):
try:
local_package_desc = package_info.PackageInfo(local_package_file)
if local_package_desc == package_desc:
return downloaded_files
except:
# Something is wrong with our package file, just resave it.
pass
package_desc.SavePackageFile(local_package_file)
return downloaded_files
def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
extra_archives=[]):
"""Archives local package archives to the tar directory.
Args:
tar_dir: Root tar directory where archives live.
package_target: Package target of the package to archive.
package_name: Package name of the package to archive.
archives: List of archive file paths where archives currently live.
extra_archives: Extra archives that are expected to be build elsewhere.
Returns:
Returns the local package file that was archived.
"""
local_package_file = package_locations.GetLocalPackageFile(tar_dir,
package_target,
package_name)
valid_archive_files = set()
archive_list = []
package_desc = package_info.PackageInfo()
package_archives = ([(archive, False) for archive in archives] +
[(archive, True) for archive in extra_archives])
for archive, skip_missing in package_archives:
archive_url = None
archive_log_url = None
if '@' in archive:
archive, archive_url = archive.split('@', 1)
if ',' in archive_url:
archive_url, archive_log_url = archive_url.split(',', 1)
extract_param = ''
tar_src_dir = ''
extract_dir = ''
if ',' in archive:
archive, extract_param = archive.split(',', 1)
if ':' in extract_param:
tar_src_dir, extract_dir = extract_param.split(':', 1)
else:
tar_src_dir = extract_param
archive_hash = archive_info.GetArchiveHash(archive)
archive_name = os.path.basename(archive)
archive_desc = archive_info.ArchiveInfo(name=archive_name,
hash=archive_hash,
url=archive_url,
tar_src_dir=tar_src_dir,
extract_dir=extract_dir,
log_url=archive_log_url)
package_desc.AppendArchive(archive_desc)
if archive_hash is None:
if skip_missing:
logging.info('Skipping archival of missing file: %s', archive)
continue
raise error.Error('Invalid package: %s.' % archive)
archive_list.append(archive)
archive_basename = os.path.basename(archive)
archive_json = archive_basename + '.json'
valid_archive_files.update([archive_basename, archive_json])
# We do not need to archive the package if it already matches. But if the
# local package file is invalid or does not match, then we should recreate
# the json file.
if os.path.isfile(local_package_file):
try:
current_package_desc = package_info.PackageInfo(local_package_file,
skip_missing=True)
if current_package_desc == package_desc:
return
except ValueError:
pass
# Copy each of the packages over to the tar directory first.
for archive_file in archive_list:
archive_name = os.path.basename(archive_file)
archive_hash = archive_info.GetArchiveHash(archive_file)
local_archive_file = package_locations.GetLocalPackageArchiveFile(
tar_dir,
archive_name,
archive_hash
)
if archive_hash == archive_info.GetArchiveHash(local_archive_file):
logging.info('Skipping archive of duplicate file: %s', archive_file)
else:
logging.info('Archiving file: %s', archive_file)
pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
shutil.copyfile(archive_file, local_archive_file)
# Once all the copying is completed, update the local packages file.
logging.info('Package "%s" archived: %s', package_name, local_package_file)
pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
package_desc.SavePackageFile(local_package_file)
return local_package_file
def UploadPackage(storage, revision, tar_dir, package_target, package_name,
is_shared_package, annotate=False, skip_missing=False,
custom_package_file=None):
"""Uploads a local package file to the supplied cloud storage object.
By default local package files are expected to be found in the standardized
location within the tar directory, however a custom package file may be
specified to upload from a different location. Package archives that do not
have their URL field set will automaticaly have the archives uploaded so that
someone accessing the package file from the cloud storage will also have
access to the package archives.
Args:
storage: Cloud storage object which supports PutFile and GetFile.
revision: Revision identifier the package should be associated with.
tar_dir: Root tar directory where archives live.
package_target: Package target of the package to archive.
package_name: Package name of the package to archive.
is_shared_package: Is this package shared among all package targets?
annotate: Print annotations for build bots?
skip_missing: Skip missing package archive files?
custom_package_file: File location for a custom package file.
Returns:
Returns remote download key for the uploaded package file.
"""
if custom_package_file is not None:
local_package_file = custom_package_file
else:
local_package_file = package_locations.GetLocalPackageFile(
tar_dir,
package_target,
package_name)
# Upload the package file and also upload any local package archives so
# that they are downloadable.
package_desc = package_info.PackageInfo(local_package_file,
skip_missing=skip_missing)
upload_package_desc = package_info.PackageInfo()
for archive_obj in package_desc.GetArchiveList():
archive_desc = archive_obj.GetArchiveData()
url = archive_desc.url
if archive_desc.hash and url is None:
if annotate:
print('@@@BUILD_STEP Archive:%s (upload)@@@' % archive_desc.name)
archive_file = package_locations.GetLocalPackageArchiveFile(
tar_dir,
archive_desc.name,
archive_desc.hash)
archive_hash = archive_info.GetArchiveHash(archive_file)
if archive_hash is None:
raise error.Error('Missing Archive File: %s' % archive_file)
elif archive_hash != archive_desc.hash:
raise error.Error(
'Archive hash does not match package hash: %s' % archive_file
+ '\n Archive Hash: %s' % archive_hash
+ '\n Package Hash: %s' % archive_desc.hash)
logging.warn('Missing archive URL: %s', archive_desc.name)
logging.warn('Uploading archive to be publically available...')
remote_archive_key = package_locations.GetRemotePackageArchiveKey(
archive_desc.name,
archive_desc.hash)
url = storage.PutFile(archive_file, remote_archive_key, clobber=True)
if annotate:
print('@@@STEP_LINK@download@%s@@@' % url)
updated_archive_obj = archive_obj.Copy(url=url)
upload_package_desc.AppendArchive(updated_archive_obj)
upload_package_file = local_package_file + '.upload'
pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
upload_package_desc.SavePackageFile(upload_package_file)
logging.info('Uploading package information: %s', package_name)
remote_package_key = package_locations.GetRemotePackageKey(
is_shared_package,
revision,
package_target,
package_name)
package_info.UploadPackageInfoFiles(storage, package_target, package_name,
remote_package_key, upload_package_file,
skip_missing=skip_missing,
annotate=annotate)
return remote_package_key
def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
downloader=None, skip_missing=False,
overlay_tar_dir=None, quiet=False):
"""Extracts package targets from the tar directory to the destination.
Each package archive within a package will be verified before being
extracted. If a package archive does not exist or does not match the hash
stored within the package file, it will be re-downloaded before being
extracted.
Args:
package_target_packages: List of tuples of package target and package names.
tar_dir: Source tar directory where package archives live.
dest_dir: Root destination directory where packages will be extracted to.
downloader: function which takes a url and a file path for downloading.
"""
if downloader is None:
downloader = pynacl.gsd_storage.HttpDownload
for package_target, package_name in package_target_packages:
package_file = package_locations.GetLocalPackageFile(tar_dir,
package_target,
package_name)
package_desc = package_info.PackageInfo(package_file,
skip_missing=skip_missing)
dest_package_dir = package_locations.GetFullDestDir(dest_dir,
package_target,
package_name)
dest_package_file = package_locations.GetDestPackageFile(dest_dir,
package_target,
package_name)
# Get a list of overlay archives.
overlaid_archives = set()
if overlay_tar_dir:
overlay_file = package_locations.GetLocalPackageFile(overlay_tar_dir,
package_target,
package_name)
logging.debug('Checking overlaid package file: %s', overlay_file)
if os.path.isfile(overlay_file):
logging.info('Found overlaid package file: %s', overlay_file)
overlay_package_desc = package_info.PackageInfo(overlay_file,
skip_missing=True)
combined_archives = dict([(archive_obj.GetArchiveData().name,
archive_obj)
for archive_obj
in package_desc.GetArchiveList()])
for archive_obj in overlay_package_desc.GetArchiveList():
archive_desc = archive_obj.GetArchiveData()
if archive_desc.hash:
overlaid_archives.add(archive_desc.name)
combined_archives[archive_desc.name] = archive_obj
package_desc = package_info.PackageInfo()
for archive_name, archive_obj in combined_archives.iteritems():
package_desc.AppendArchive(archive_obj)
# Only do the extraction if the extract packages do not match.
if os.path.isfile(dest_package_file):
try:
dest_package_desc = package_info.PackageInfo(dest_package_file)
if dest_package_desc == package_desc:
logging.debug('Skipping extraction for package (%s)', package_name)
continue
except:
# Destination package file cannot be trusted, if invalid re-extract.
pass
# Delete the old package file before we extract.
os.unlink(dest_package_file)
if os.path.isdir(dest_package_dir):
logging.debug('Deleting old package directory: %s', dest_package_dir)
pynacl.file_tools.RemoveDirectoryIfPresent(dest_package_dir)
logging.info('Extracting package (%s) to directory: %s',
package_name, dest_package_dir)
archive_list = package_desc.GetArchiveList()
num_archives = len(archive_list)
for index, archive_obj in enumerate(archive_list):
archive_desc = archive_obj.GetArchiveData()
archive_file = None
if archive_desc.name in overlaid_archives:
archive_file = package_locations.GetLocalPackageArchiveFile(
overlay_tar_dir,
archive_desc.name,
archive_desc.hash)
logging.info('Using overlaid tar: %s', archive_file)
else:
archive_file = package_locations.GetLocalPackageArchiveFile(
tar_dir,
archive_desc.name,
archive_desc.hash)
# Upon extraction, some files may not be downloaded (or have stale files),
# we need to check the hash of each file and attempt to download it if
# they do not match.
archive_hash = archive_info.GetArchiveHash(archive_file)
if archive_hash != archive_desc.hash:
if archive_desc.url is None:
if skip_missing:
logging.info('Skipping extraction of missing archive: %s' %
archive_file)
continue
raise error.Error('Invalid archive file and URL: %s' % archive_file)
logging.warn('Archive missing, downloading: %s', archive_desc.name)
logging.info('Downloading %s: %s', archive_desc.name, archive_desc.url)
pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
downloader(archive_desc.url, archive_file)
archive_hash = archive_info.GetArchiveHash(archive_file)
if archive_hash != archive_desc.hash:
raise error.Error('Downloaded archive file does not match hash.'
' [%s] Expected %s, received %s.' %
(archive_file, archive_desc.hash, archive_hash))
destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
logging.info('Extracting %s (%d/%d)' %
(archive_desc.name, index+1, num_archives))
temp_dir = os.path.join(destination_dir, '.tmp')
pynacl.file_tools.RemoveDirectoryIfPresent(temp_dir)
os.makedirs(temp_dir)
tar_output = not quiet
tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output)
curdir = os.getcwd()
os.chdir(temp_dir)
try:
tar.Extract()
tar.Close()
finally:
os.chdir(curdir)
temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
pynacl.file_tools.RemoveDirectoryIfPresent(temp_dir)
pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
package_desc.SavePackageFile(dest_package_file)
def CleanupTarDirectory(tar_dir):
"""Deletes any files within the tar directory that are not referenced.
Files such as package archives are shared between packages and therefore
non-trivial to delete. Package files may also change so old packages may
stay on the local hard drive even though they are not read anymore. This
function will walk through the tar directory and cleanup any stale files
it does not recognize.
Args:
tar_dir: Source tar directory where package archives live.
"""
# Keep track of the names of all known files and directories. Because of
# case insensitive file systems, we should lowercase all the paths so
# that we do not accidentally delete any files.
known_directories = set()
known_files = set()
for package_target, package_list in package_locations.WalkPackages(tar_dir):
for package_name in package_list:
package_file = package_locations.GetLocalPackageFile(tar_dir,
package_target,
package_name)
try:
package_desc = package_info.PackageInfo(package_file, skip_missing=True)
except:
continue
for archive_obj in package_desc.GetArchiveList():
archive_desc = archive_obj.GetArchiveData()
if not archive_desc.hash:
continue
archive_file = package_locations.GetLocalPackageArchiveFile(
tar_dir,
archive_desc.name,
archive_desc.hash)
log_file = package_locations.GetLocalPackageArchiveLogFile(archive_file)
known_files.add(archive_file.lower())
known_files.add(log_file.lower())
package_name = package_info.GetLocalPackageName(package_file)
package_directory = os.path.join(os.path.dirname(package_file),
package_name)
known_files.add(package_file.lower())
known_directories.add(package_directory.lower())
# We are going to be deleting all files that do not match any known files,
# so do a sanity check that this is an actual tar directory. If we have no
# known files or directories, we probably do not have a valid tar directory.
if not known_directories or not known_files:
raise error.Error('No packages found for tar directory: %s' % tar_dir)
for dirpath, dirnames, filenames in os.walk(tar_dir, topdown=False):
if dirpath.lower() in known_directories:
continue
for filename in filenames:
full_path = os.path.join(dirpath, filename)
if full_path.lower() in known_files:
continue
logging.debug('Removing stale file: %s', full_path)
os.unlink(full_path)
if not os.listdir(dirpath):
logging.debug('Removing stale directory: %s', dirpath)
os.rmdir(dirpath)
#
# Each Command has 2 functions that describes it:
# 1. A parser function which specifies the extra command options each command
# will have.
# 2. An execution function which is called when a user actually executes
# the command.
#
def _ListCmdArgParser(subparser):
subparser.description = 'Lists package information.'
def _DoListCmd(arguments):
package_targets = collections.defaultdict(list)
for package_target, package in arguments.package_target_packages:
package_targets[package_target].append(package)
modes_dict = arguments.packages_desc.GetPackageModes()
if not modes_dict:
print('No Package Modes Found.')
else:
print('Listing Modes:')
for mode, package_list in modes_dict.iteritems():
print(' [%s]' % mode)
for package in package_list:
print(' ', package)
if arguments.mode:
print()
print('Current Mode Selected:', arguments.mode)
print()
print('Listing Package Targets and Packages:')
for package_target, packages in package_targets.iteritems():
print(' [%s]:' % package_target)
for package in sorted(packages):
print(' ', package)
def _ArchiveCmdArgParser(subparser):
subparser.description = 'Archive package archives to tar directory.'
subparser.add_argument(
'--archive-package', metavar='NAME', dest='archive__package',
required=True,
help='Package name archives will be packaged into.')
subparser.add_argument(
'--extra-archive', metavar='ARCHIVE', dest='archive__extra_archive',
action='append', default=[],
help='Extra archives that are expected to be built elsewhere.')
subparser.add_argument(
metavar='TAR(,SRCDIR(:EXTRACTDIR))(@URL,LOGURL)', dest='archive__archives',
nargs='+',
help='Package archive with an optional tar information and url.'
' SRCDIR is the root directory where files live inside of the tar.'
' EXTRACTDIR is the directory to extract files to relative to the'
' destination directory. The URL is where the package can be'
' downloaded from.')
subparser.add_argument(
'-x', '--extract', dest='archive__extract',
action='store_true', default=False,
help='Extract package archives after they have been archived.')
def _DoArchiveCmd(arguments):
package_target_packages = GetPackageTargetPackages(
arguments.archive__package,
arguments.package_target_packages
)
if not package_target_packages:
raise error.Error('Unknown package: %s.' % arguments.archive__package
+ ' Did you forget to add "$PACKAGE_TARGET/"?')
for package_target, package_name in package_target_packages:
ArchivePackageArchives(arguments.tar_dir,
package_target,
package_name,
arguments.archive__archives,
extra_archives=arguments.archive__extra_archive)
if arguments.archive__extract:
ExtractPackageTargets([(package_target, package_name)],
arguments.tar_dir,
arguments.dest_dir,
skip_missing=True,
quiet=arguments.quiet)
def _ExtractCmdArgParser(subparser):
subparser.description = 'Extract packages from tar directory.'
subparser.add_argument(
'--skip-missing', dest='extract__skip_missing',
action='store_true', default=False,
help='Skip missing archive files when extracting rather than erroring out.')
subparser.add_argument(
'--overlay-tar-dir', dest='overlay_tar_dir',
default=None,
help='Extracts tar directories as usual, except uses any packages' +
' found within the overlay tar directory first.')
def _DoExtractCmd(arguments):
ExtractPackageTargets(
arguments.package_target_packages,
arguments.tar_dir,
arguments.dest_dir,
skip_missing=arguments.extract__skip_missing,
overlay_tar_dir=arguments.overlay_tar_dir,
quiet=arguments.quiet)
def _UploadCmdArgParser(subparser):
subparser.description = 'Upload a package file.'
subparser.add_argument(
'--upload-package', metavar='NAME', dest='upload__package', required=True,
help='Package to upload.')
subparser.add_argument(
'--revision', metavar='ID', dest='upload__revision', required=True,
help='Revision of the package to upload.')
subparser.add_argument(
'--package-file', metavar='FILE', dest='upload__file',
default=None,
help='Use custom package file instead of standard package file found'
' in the tar directory.')
subparser.add_argument(
'--skip-missing', dest='upload__skip_missing',
action='store_true', default=False,
help='Skip missing archive files when uploading package archives.')
def _DoUploadCmd(arguments):
package_target_packages = GetPackageTargetPackages(
arguments.upload__package,
arguments.package_target_packages
)
if not package_target_packages:
raise error.Error('Unknown package: %s.' % arguments.upload__package
+ ' Did you forget to add "$PACKAGE_TARGET/"?')
for package_target, package_name in package_target_packages:
UploadPackage(
arguments.gsd_store,
arguments.upload__revision,
arguments.tar_dir,
package_target,
package_name,
arguments.packages_desc.IsSharedPackage(package_name),
annotate=arguments.annotate,
skip_missing=arguments.upload__skip_missing,
custom_package_file=arguments.upload__file
)
def _SyncCmdArgParser(subparser):
subparser.description = 'Download package archives to the tar directory.'
subparser.add_argument(
'--revision', metavar='ID', dest='sync__revision',
default=None,
help='Revision identifier of the packages to download.')
subparser.add_argument(
'--include-logs', dest='sync__include_logs',
action='store_true', default=False,
help='Also download logs next to each archive if available.')
subparser.add_argument(
'-x', '--extract', dest='sync__extract',
action='store_true', default=False,
help='Extract package archives after they have been downloaded.')
def _DoSyncCmd(arguments):
for package_target, package_name in arguments.package_target_packages:
if arguments.sync__revision is None:
# When the sync revision number is not specified, use the set
# revision number found in the revision directory.
revision_file = package_locations.GetRevisionFile(
arguments.revisions_dir,
package_name)
revision_desc = revision_info.RevisionInfo(
arguments.packages_desc,
revision_file)
package_desc = revision_desc.GetPackageInfo(package_target)
revision_num = revision_desc.GetRevisionNumber()
else:
# When the sync revision number is specified, find the package to
# download remotely using the revision.
revision_num = arguments.sync__revision
remote_package_key = package_locations.GetRemotePackageKey(
arguments.packages_desc.IsSharedPackage(package_name),
arguments.sync__revision,
package_target,
package_name)
with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
temp_package_file = os.path.join(
work_dir,
os.path.basename(remote_package_key) + TEMP_SUFFIX)
package_info.DownloadPackageInfoFiles(
temp_package_file,
remote_package_key,
downloader=arguments.gsd_store.GetFile)
package_desc = package_info.PackageInfo(temp_package_file)
DownloadPackageArchives(
arguments.tar_dir,
package_target,
package_name,
package_desc,
revision_num=revision_num,
include_logs=arguments.sync__include_logs)
CleanTempFiles(arguments.tar_dir)
if arguments.sync__extract:
ExtractPackageTargets(
arguments.package_target_packages,
arguments.tar_dir,
arguments.dest_dir,
quiet=arguments.quiet)
def _SetRevisionCmdArgParser(subparser):
subparser.description = 'Specify the revision of a package.'
subparser.add_argument(
'--revision-package', metavar='NAME', dest='setrevision__package',
action='append', default=[],
help='Package name to set revision of.')
subparser.add_argument(
'--revision-set', metavar='SET-NAME', dest='setrevision__revset',
action='append', default=[],
help='Revision set to set revision for.')
subparser.add_argument(
'--revision', metavar='ID', dest='setrevision__revision',
required=True,
help='Revision identifier of the package to set.')
def _DoSetRevisionCmd(arguments):
packages_list = arguments.setrevision__package
revision_sets = arguments.setrevision__revset
revision_num = arguments.setrevision__revision
for revision_set in revision_sets:
set_packages = arguments.packages_desc.GetRevisionSet(revision_set)
if set_packages is None:
raise error.Error('Invalid Revision Set: %s' % revision_set)
packages_list.extend(set_packages)
if not packages_list:
raise error.Error('No revision packages have been supplied.')
for package_name in packages_list:
revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
revision_desc.SetRevisionNumber(revision_num)
custom_package_targets = GetPackageTargetPackages(package_name, [])
if not custom_package_targets:
package_targets = arguments.packages_desc.GetPackageTargetsForPackage(
package_name
)
else:
package_targets = [target[0] for target in custom_package_targets]
first_target = custom_package_targets[0]
package_name = first_target[1]
for package_target in package_targets:
with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
remote_package_key = package_locations.GetRemotePackageKey(
arguments.packages_desc.IsSharedPackage(package_name),
revision_num,
package_target,
package_name)
temp_package_file = os.path.join(
work_dir,
os.path.basename(remote_package_key) + TEMP_SUFFIX)
package_info.DownloadPackageInfoFiles(
temp_package_file,
remote_package_key,
downloader=arguments.gsd_store.GetFile)
package_desc = package_info.PackageInfo(temp_package_file)
logging.info('Setting %s:%s to revision %s',
package_target, package_name, revision_num)
revision_desc.SetTargetRevision(
package_name,
package_target,
package_desc)
revision_file = package_locations.GetRevisionFile(
arguments.revisions_dir,
package_name)
pynacl.file_tools.MakeParentDirectoryIfAbsent(revision_file)
revision_desc.SaveRevisionFile(revision_file)
CleanTempFiles(arguments.revisions_dir)
def _GetRevisionCmdArgParser(subparser):
subparser.description = 'Get the revision of a package.'
subparser.add_argument(
'--revision-package', metavar='NAME', dest='getrevision__packages',
action='append', default=[],
help='Package name to get revision of.')
subparser.add_argument(
'--revision-set', metavar='SET-NAME', dest='getrevision__revset',
action='append', default=[],
help='Revision set to set revision for.')
def _DoGetRevisionCmd(arguments):
packages_list = arguments.getrevision__packages
revision_sets = arguments.getrevision__revset
for revision_set in revision_sets:
set_packages = arguments.packages_desc.GetRevisionSet(revision_set)
if set_packages is None:
raise error.Error('Invalid Revision Set: %s' % revision_set)
packages_list.extend(set_packages)
if not packages_list:
raise error.Error('No revision packages have been supplied.')
revision_number = None
for package_name in packages_list:
custom_package_targets = GetPackageTargetPackages(package_name, [])
if custom_package_targets:
custom_target, package_name = custom_package_targets[0]
revision_file = package_locations.GetRevisionFile(arguments.revisions_dir,
package_name)
if not os.path.isfile(revision_file):
raise error.Error('No revision set for package: %s.' % package_name)
revision_desc = revision_info.RevisionInfo(arguments.packages_desc,
revision_file)
package_revision = revision_desc.GetRevisionNumber()
if revision_number is None:
revision_number = package_revision
elif revision_number != package_revision:
logging.error('Listing Get Revision Packages:')
for package in packages_list:
logging.error(' %s', package)
raise error.Error('Package revisions are not set to the same revision.')
print(revision_number)
def _RevPackagesCmdArgParser(subparser):
subparser.description = 'Prints list of packages for a revision set name.'
subparser.add_argument(
'--revision-set', metavar='NAME', dest='revpackages__name',
required=True,
help='Name of the package or revision set.')
def _DoRevPackagesCmd(arguments):
revision_package = arguments.revpackages__name
packages_list = [revision_package]
# Check if the package_name is a revision set.
revision_set = arguments.packages_desc.GetRevisionSet(revision_package)
if revision_set is not None:
packages_list = revision_set
for package_name in packages_list:
print(package_name)
def _FillEmptyTarsParser(subparser):
subparser.description = 'Fill missing archives with empty ones in a package.'
subparser.add_argument(
'--fill-package', metavar='NAME', dest='fillemptytars_package',
required=True,
help='Package name to fill empty archives of.')
def _DoFillEmptyTarsCmd(arguments):
package_target_packages = GetPackageTargetPackages(
arguments.fillemptytars_package,
arguments.package_target_packages
)
if not package_target_packages:
raise error.Error('Unknown package: %s.' % arguments.fillemptytars_package
+ ' Did you forget to add "$PACKAGE_TARGET/"?')
for package_target, package_name in package_target_packages:
package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
package_target,
package_name)
package_desc = package_info.PackageInfo(package_path, skip_missing=True)
output_package_desc = package_info.PackageInfo()
for archive in package_desc.GetArchiveList():
# If archive does not exist, fill it with an empty one.
archive_data = archive.GetArchiveData()
if archive_data.hash:
output_package_desc.AppendArchive(archive)
else:
logging.info('Filling missing archive: %s.', archive_data.name)
if (archive_data.name.endswith('.tar.gz') or
archive_data.name.endswith('.tgz')):
mode = 'w:gz'
elif archive_data.name.endswith('.bz2'):
mode = 'w:bz2'
elif archive_data.name.endswith('.tar'):
mode = 'w:'
else:
raise error.Error('Unknown archive type: %s.' % archive_data.name)
temp_archive_file = os.path.join(arguments.tar_dir, archive_data.name)
tar_file = cygtar.CygTar(temp_archive_file, mode)
tar_file.Close()
tar_hash = archive_info.GetArchiveHash(temp_archive_file)
archive_file = package_locations.GetLocalPackageArchiveFile(
arguments.tar_dir,
archive_data.name,
tar_hash)
pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
os.rename(temp_archive_file, archive_file)
empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
hash=tar_hash)
output_package_desc.AppendArchive(empty_archive)
output_package_desc.SavePackageFile(package_path)
def _RecalcRevsParser(subparser):
subparser.description = 'Recalculates hashes for files in revision directory.'
def _DoRecalcRevsCmd(arguments):
for json_file in os.listdir(arguments.revisions_dir):
if json_file.endswith('.json'):
revision_file = os.path.join(arguments.revisions_dir, json_file)
revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
revision_desc.LoadRevisionFile(revision_file, skip_hash_verify=True)
revision_desc.SaveRevisionFile(revision_file)
def _CleanupParser(subparser):
subparser.description = 'Cleans up any unused package archives files.'
def _DoCleanupCmd(arguments):
CleanupTarDirectory(arguments.tar_dir)
CommandFuncs = collections.namedtuple(
'CommandFuncs',
['parse_func', 'do_cmd_func'])
COMMANDS = {
'list': CommandFuncs(_ListCmdArgParser, _DoListCmd),
'archive': CommandFuncs(_ArchiveCmdArgParser, _DoArchiveCmd),
'extract': CommandFuncs(_ExtractCmdArgParser, _DoExtractCmd),
'upload': CommandFuncs(_UploadCmdArgParser, _DoUploadCmd),
'sync': CommandFuncs(_SyncCmdArgParser, _DoSyncCmd),
'setrevision': CommandFuncs(_SetRevisionCmdArgParser, _DoSetRevisionCmd),
'getrevision': CommandFuncs(_GetRevisionCmdArgParser, _DoGetRevisionCmd),
'revpackages': CommandFuncs(_RevPackagesCmdArgParser, _DoRevPackagesCmd),
'fillemptytars': CommandFuncs(_FillEmptyTarsParser, _DoFillEmptyTarsCmd),
'recalcrevisions': CommandFuncs(_RecalcRevsParser, _DoRecalcRevsCmd),
'cleanup': CommandFuncs(_CleanupParser, _DoCleanupCmd),
}
def ParseArgs(args):
parser = argparse.ArgumentParser()
host_platform = pynacl.platform.GetOS()
host_arch = pynacl.platform.GetArch3264()
# List out global options for all commands.
parser.add_argument(
'-v', '--verbose', dest='verbose',
action='store_true', default=False,
help='Verbose output')
parser.add_argument(
'-q', '--quiet', dest='quiet',
action='store_true', default=False,
help='Quiet output')
parser.add_argument(
'--platform', dest='host_platform',
default=host_platform,
help='Custom platform other than the current (%s).' % host_platform)
parser.add_argument(
'--arch', dest='host_arch',
default=host_arch,
help='Custom architecture other than the current (%s).' % host_arch)
parser.add_argument(
'--package-targets', dest='package_targets',
default=None,
help='Custom package targets specifed as comma separated names. Defaults'
' to package targets defined for host platform and architecture inside'
' of the packages json file.')
parser.add_argument(
'--mode', dest='mode',
default=None,
help='Specify a package mode to filter by, modes are specified within'
' the packages json file. For a list of modes use the "list" command.')
parser.add_argument(
'--packages', dest='packages',
default=None,
help='Custom packages specified as comma separated package names. Custom'
' packages not defined by the packages json file must be prefixed by'
' the package_target directory (IE. $PACKAGE_TARGET/$PACKAGE).')
parser.add_argument(
'--append', metavar='PACKAGE', dest='append_packages',
action='append', default=[],
help='Append extra package to current list of packages.')
parser.add_argument(
'--exclude', metavar='PACKAGE', dest='exclude_packages',
action='append', default=[],
help='Exclude package from current list of packages.')
parser.add_argument(
'--packages-json', dest='packages_json',
default=DEFAULT_PACKAGES_JSON, type=argparse.FileType('rt'),
help='Packages description file.'
' [Default: %s]' % DEFAULT_PACKAGES_JSON)
parser.add_argument(
'--revisions-dir', dest='revisions_dir',
default=DEFAULT_REVISIONS_DIR,
help='Revisions directory where packages revisions will be found.')
parser.add_argument(
'--dest-dir', dest='dest_dir',
default=DEFAULT_DEST_DIR,
help='Destination directory where all the packages will be extracted to.')
parser.add_argument(
'--tar-dir', dest='tar_dir',
default=None,
help='Directory for package archive files. Defaults to "$DEST-DIR/.tars".')
parser.add_argument(
'--annotate', dest='annotate',
action='store_true', default=False,
help='Print out build bot annotations.')
parser.add_argument(
'--cloud-bucket', dest='cloud_bucket',
default=DEFAULT_CLOUD_BUCKET,
help='Google storage cloud bucket name.'
' [Default: %s]' % DEFAULT_CLOUD_BUCKET)
# Add subparsers for all commands. These are flags for specific commands,
# IE. [options] command [command-options]
command_parser = parser.add_subparsers(title='command', dest='command')
for command, cmd_funcs in COMMANDS.items():
sub_parser = command_parser.add_parser(command)
cmd_funcs.parse_func(sub_parser)
arguments = parser.parse_args(args)
pynacl.log_tools.SetupLogging(
verbose=arguments.verbose, quiet=arguments.quiet)
if arguments.tar_dir is None:
arguments.tar_dir = os.path.join(arguments.dest_dir, '.tars')
# Parse the package description up front and store it into the arguments
# object. Almost all the commands need to use this information.
packages_desc = packages_info.PackagesInfo(arguments.packages_json)
arguments.packages_desc = packages_desc
# Based on the host platform and host architecture, we can determine the set
# of package targets used from the packages description. Minimize platform
# and architecture errors by standardizing the names using pynacl.platform.
if arguments.package_targets is None:
package_targets = packages_desc.GetPackageTargets(
pynacl.platform.GetOS(arguments.host_platform),
pynacl.platform.GetArch3264(arguments.host_arch))
else:
package_targets = arguments.package_targets.split(',')
# If the packages argument were not set, use the default list of packages
# for each package target.
packages_set = set()
if arguments.packages is None:
for package_target in package_targets:
packages = packages_desc.GetPackages(package_target)
if packages is None:
raise error.Error('No packages defined for Package Target: %s.' %
package_target)
packages_set.update(packages)
else:
packages_set.update(arguments.packages.split(','))
# If a mode was set, only use packages listed in the mode.
if arguments.mode:
modes_dict = packages_desc.GetPackageModes()
if arguments.mode not in modes_dict:
logging.info('Valid Package Modes:')
for mode in modes_dict:
logging.info(' %s', mode)
raise error.Error('Invalid Package Mode: %s.' % arguments.mode)
packages_set.intersection_update(modes_dict[arguments.mode])
# Append/exclude any extra packages that were specified.
packages_set.update(arguments.append_packages)
packages_set.difference_update(arguments.exclude_packages)
# Build a dictionary that organizes packages to their respective package
# targets. Packages may exist in multiple package targets so we will have
# to have the key be package and value be a list of package targets.
package_targets_dict = collections.defaultdict(list)
for package_target in package_targets:
for package in packages_desc.GetPackages(package_target):
package_targets_dict[package].append(package_target)
# Use the list of packages to determine the set of package target packages
# we are operating on, custom package targets will have the package target
# inside of the name of the package name (see help for "--packages" argument).
# The package_target_packages is a list of tuples (package_target, package),
# for every package along with the associated package target.
package_target_packages = []
for package in sorted(packages_set):
package_targets = package_targets_dict.get(package, None)
if package_targets is None:
custom_package_targets = GetPackageTargetPackages(package, [])
if not custom_package_targets:
raise error.Error('Invalid custom package: "%s".'
' Expected $PACKAGE_TARGET/$PACKAGE' % package)
package_target_packages.extend(custom_package_targets)
else:
for package_target in package_targets:
package_target_packages.append((package_target, package))
arguments.package_target_packages = package_target_packages
# Create a GSD Storage object for those who need it.
cloud_bucket = arguments.cloud_bucket
gsd_store = pynacl.gsd_storage.GSDStorage(cloud_bucket, [cloud_bucket])
arguments.gsd_store = gsd_store
return arguments
def main(args):
# If verbose is on, do not catch error.Error() exceptions separately but
# allow python to catch the errors and print out the entire callstack.
# Note that we cannot rely on ParseArgs() to parse if verbose is on, because
# ParseArgs() could throw an exception.
if '-v' in args or '--verbose' in args:
arguments = ParseArgs(args)
return COMMANDS[arguments.command].do_cmd_func(arguments)
else:
try:
arguments = ParseArgs(args)
return COMMANDS[arguments.command].do_cmd_func(arguments)
except error.Error as e:
sys.stderr.write('package_version: ' + str(e) + '\n')
return 1
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
{
"content_hash": "af8a6d6c845234af814c3cba416d16a0",
"timestamp": "",
"source": "github",
"line_count": 1293,
"max_line_length": 80,
"avg_line_length": 40.124516627996904,
"alnum_prop": 0.6715560609857173,
"repo_name": "endlessm/chromium-browser",
"id": "4a96086ca1621722c639aede0072547a750fa600",
"size": "52071",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "native_client/build/package_version/package_version.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import render_to_response
# Create your views here.
def index(request):
return render(request,'account/login.html')
|
{
"content_hash": "7c4d9a65162871d6d931a9efb7f5a7c5",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 47,
"avg_line_length": 30.857142857142858,
"alnum_prop": 0.8009259259259259,
"repo_name": "cmput404wi16/metablog",
"id": "4029d3b58b979f3aac8ee175576901dc7da9fe42",
"size": "216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "account/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "35"
},
{
"name": "HTML",
"bytes": "1089"
},
{
"name": "Python",
"bytes": "15307"
},
{
"name": "Shell",
"bytes": "1564"
}
],
"symlink_target": ""
}
|
"""Utilities for computing accuracy metrics on retrieved neighbors."""
import collections
import json
import os
from absl import flags
import numpy as np
from sklearn.metrics import average_precision_score
import tensorflow.compat.v1 as tf
flags.DEFINE_integer("max_examples", 1000, "Max num of candidates in records.")
FLAGS = flags.FLAGS
RawResult = collections.namedtuple(
"RawResult", ["q_id", "cand_num", "prob", "label_id", "prior_score"])
def results_file_outputs(output_dir, global_step, eval_name):
return os.path.join(output_dir, eval_name + "_" + str(global_step) + ".tsv")
def results_file_metrics(output_dir, global_step, eval_name):
return os.path.join(output_dir,
eval_name + "_" + str(global_step) + "_metrics.tsv")
def rr(y_true):
"""Return the reciprical rank of the first positive candidate."""
for index in range(0, len(y_true)):
if y_true[index]:
return 1.0 / (index + 1)
return 0.0
def bounded_rr(y_true, k):
rr_full = rr(y_true)
if rr_full >= 1 / k:
return rr_full
return 0.0
def rank_metrics(raw_results_list,
output_dir,
global_step,
eval_name,
save_raw=True):
"""Computes recall at N and other metrics for the ranking model."""
to_return = {}
predictions_per_query = collections.OrderedDict()
n_queries = 0
max_examples = FLAGS.max_examples
total_reciprocal_rank = 0.0
total_average_precision = 0.0
for elem in raw_results_list:
if elem.q_id not in predictions_per_query:
predictions_per_query[elem.q_id] = []
predictions_per_query[elem.q_id].append(elem)
correct_top = np.zeros(max_examples)
mrr_s = {}
map_s = {}
ks_of_interest = [10, 20, 100, 200, 1000]
for k in ks_of_interest:
mrr_s[k] = 0
map_s[k] = 0
for _, pred_q in predictions_per_query.items():
sorted_pred = sorted(pred_q, key=lambda x: (x.prob), reverse=True)
unique_sorted_pred = []
set_cands = set()
for cand in sorted_pred:
if cand.cand_num in set_cands:
continue
set_cands.add(cand.cand_num)
unique_sorted_pred.append(cand)
sorted_pred = unique_sorted_pred
cands = [x.cand_num for x in pred_q]
s_cands = set(cands)
if len(unique_sorted_pred) < len(s_cands):
print("Multiple passages from document.")
if len(s_cands) > max_examples:
print("Too many examples!")
correct_example_index = max_examples
for i in range(len(sorted_pred)):
if sorted_pred[i].label_id == 1:
correct_example_index = i
break
# collect labels and scores for average precision
y_true = []
y_scores = []
for elem in sorted_pred:
y_true.append(elem.label_id)
y_scores.append(elem.prob)
mrr_increment = rr(y_true)
precision_increment = 0.0
if mrr_increment > 0:
precision_increment = average_precision_score(y_true, y_scores)
total_reciprocal_rank += mrr_increment
total_average_precision += precision_increment
# get all mrr and map quantities of interest
for k in ks_of_interest:
mrr_k = bounded_rr(y_true, k)
mrr_s[k] += mrr_k
if mrr_k > 0:
map_s[k] += average_precision(y_true, y_scores, k)
j = correct_example_index
n_queries += 1
while j < max_examples:
correct_top[j] += 1
j += 1
for i in range(max_examples):
correct_top[i] = correct_top[i] / n_queries
print("Queries: %d", n_queries)
print(correct_top)
to_return["recall_at_1"] = correct_top[0]
to_return["recall_at_3"] = correct_top[2]
to_return["recall_at_5"] = correct_top[4]
to_return["recall_at_10"] = correct_top[9]
to_return["recall_at_100"] = correct_top[99]
to_return["recall_at_200"] = correct_top[199]
to_return["recall_at_" + str(max_examples)] = correct_top[max_examples - 1]
to_return["map"] = total_average_precision / n_queries
to_return["mrr"] = total_reciprocal_rank / n_queries
for k in ks_of_interest:
to_return["mrr_" + str(k)] = mrr_s[k] / n_queries
to_return["map_" + str(k)] = map_s[k] / n_queries
out_predictions = results_file_outputs(output_dir, global_step, eval_name)
out_metrics = results_file_metrics(output_dir, global_step, eval_name)
if save_raw:
write_predictions(raw_results_list, out_predictions)
write_metrics(to_return, out_metrics)
print(to_return)
print("max examples " + str(max_examples))
return to_return
def write_predictions(raw_results_list, out_file):
"""Writing out the predictions to a file."""
print("writing predictions to " + out_file)
with tf.io.gfile.GFile(out_file, "w") as writer:
writer.write("QueryID\tDocId\tModelScore\tLabel\tPriorScore\n")
for raw_result in raw_results_list:
writer.write(
str(raw_result.q_id) + "\t" + str(raw_result.cand_num) + "\t" +
str(raw_result.prob) + "\t" + str(raw_result.label_id) + "\t" +
str(raw_result.prior_score) + "\n")
def average_precision(y_true, y_scores, k):
rr_k = bounded_rr(y_true, k)
if rr_k > 0:
return average_precision_score(y_true[:k], y_scores[:k])
else:
return 0.0
def mean_squared_error(pred_list):
to_return = 0.0
for elem in pred_list:
diff = elem.prob - elem.prior_score
if diff < 0:
diff = -diff
to_return += diff
return to_return / len(pred_list)
def write_metrics(results, out_file):
"""Writing out metrics to a file."""
print("writing stats to " + out_file)
with tf.io.gfile.GFile(out_file, "w") as writer:
writer.write(json.dumps(results, indent=2, sort_keys=True) + "\n")
|
{
"content_hash": "1585cd75fa7530771a1728669320b6e9",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 79,
"avg_line_length": 30.227027027027027,
"alnum_prop": 0.6389484978540773,
"repo_name": "google-research/language",
"id": "8cdb0a77571c9ce86ba748125a93c2c04f132f65",
"size": "6207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "language/multivec/models/metrics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "9834"
},
{
"name": "CSS",
"bytes": "602"
},
{
"name": "HTML",
"bytes": "25162"
},
{
"name": "JavaScript",
"bytes": "8857"
},
{
"name": "Jupyter Notebook",
"bytes": "1505066"
},
{
"name": "Python",
"bytes": "7139472"
},
{
"name": "Shell",
"bytes": "183709"
}
],
"symlink_target": ""
}
|
import sys
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class RedisEnterpriseManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for RedisEnterpriseManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2022-01-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(RedisEnterpriseManagementClientConfiguration, self).__init__(**kwargs)
api_version: Literal["2022-01-01"] = kwargs.pop("api_version", "2022-01-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-redisenterprise/{}".format(VERSION))
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
self.credential, *self.credential_scopes, **kwargs
)
|
{
"content_hash": "2ff49598cb7f9d6454c827ffd9c6fa55",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 114,
"avg_line_length": 51.59375,
"alnum_prop": 0.7201695941853422,
"repo_name": "Azure/azure-sdk-for-python",
"id": "057dc74d9708363de1bc864613da62efdce82dde",
"size": "3770",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/redisenterprise/azure-mgmt-redisenterprise/azure/mgmt/redisenterprise/_configuration.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from numpy import array, zeros, zeros_like, sqrt, diagflat, repeat
from .parsers import PdbParser
class UniverseFactory(object):
"""docstring for UniverseFactory"""
def __init__(self):
self.atoms = []
self.pdb_parser = PdbParser()
def add_atom(self, atom):
"""docstring"""
if atom.serial_num is None:
atom.serial_num = len(self.atoms)
self.atoms.append(atom)
def create_universe(self):
return Universe(self.atoms)
def load_atoms_from_file(self, filename):
if filename.endswith(".pdb"):
for a in self.pdb_parser.iter_atoms_in_pdb_file(filename):
self.add_atom(a)
else:
print "Expected a .pdb file, got %s" % filename
return
class Universe(object):
"""docstring for Universe"""
def __init__(self, atoms):
self.atoms = atoms
self.initialize_matrices()
def __len__(self):
return len(self.atoms)
def __iter__(self):
for a in self.atoms:
yield a
def initialize_matrices(self):
masses = [a.mass for a in self.atoms]
coords = [(a.x, a.y, a.z) for a in self.atoms]
charges = [a.charge for a in self.atoms]
radii = [a.radius for a in self.atoms]
self.mass_array = array(masses)
self.coord_array = array(coords)
self.charge_array = array(charges)
self.radius_array = array(radii)
inv_sqrt_diag = 1./sqrt( repeat(self.mass_array, 3) )
self.M = diagflat(inv_sqrt_diag)
def get_inv_sqrt_mass_matrix(self):
return self.M
def get_coords(self):
return self.coord_array
|
{
"content_hash": "38269a73ea913496961cd55900401923",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 70,
"avg_line_length": 29.45614035087719,
"alnum_prop": 0.5848719475878499,
"repo_name": "grollins/sardine",
"id": "ba6432e55f2aa3ae4452079ce88ae2643549df2f",
"size": "1679",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sardine/universe.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "56239"
},
{
"name": "Shell",
"bytes": "324"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "elevendance.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "dc136c46893e98266b328f3394cb9751",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 75,
"avg_line_length": 25.77777777777778,
"alnum_prop": 0.7155172413793104,
"repo_name": "paulmand3l/elevendance",
"id": "d65dfe9a62dcb5868277f45503a5c2e062a7d176",
"size": "254",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "3617"
},
{
"name": "Python",
"bytes": "17733"
}
],
"symlink_target": ""
}
|
"""
Created on Mon Apr 24 12:25:17 2017
@author: dongyu
"""
import os
import utm
import glob
import shutil
import time
import lib
import lib.SUNTANS
import hydro_wrapper
import oilspill_wrapper
from DownloadTool import downloadROMS
from FileTool import increaseT2
from WindTool import NCEP_wind, TAMU_NCEP_wind
from Probability_map import Pmap
from Blended_model import blend
import pdb
class upper_wrapper(object):
"""
The uppermost level of wrapper
"""
# Switches to different models
hydro_model='BLENDED' # hydrodynamic model options: 'SUNTANS', 'ROMS', 'BLENDED'
hydro_run=True
# ROMS data source
ROMS_datasource='online'
# General options
ishindcast=True
runGNOME=False
runTracPy=False
## GNOME settings
gnome_subset=False
gnome_bbox=None
probability_map=False
google_earth=False
mpl = 8
number = 10 # number of ensembles
interval = 10800 # seconds
OBC_opt = 'file' # Type 3 boundary condition option: 'constant',
#'file','OTIS', 'ROMS', 'ROMSOTIS','ROMSFILE', 'ROMSOTISFILE'
IC_opt = 'SUNTANS'
def __init__(self,**kwargs):
self.__dict__.update(kwargs)
def __call__(self,starttime, endtime, starttime2, period, init_latlon=[28.353786, -95.315109]):
## hydrodynamic time
self.starttime = starttime
self.endtime = endtime
## oil spill time
self.starttime2 = starttime2
self.period = period
self.init_latlon = init_latlon
if self.hydro_model == 'SUNTANS':
self.run_suntans()
elif self.hydro_model == 'ROMS':
self.run_roms()
elif self.hydro_model == 'BLENDED':
self.run_blended()
else:
raise Exception, 'Need to set the hydro_model parameter !!'
def run_suntans(self):
"""
use SUNTANS velocity to run GNOME
"""
(utm_x,utm_y)=utm.from_latlon(self.init_latlon[0], self.init_latlon[1])[0:2]
for i in range(self.number):
start = time.time()
print "Running simulation #%s !!\n"%str(i)
if self.hydro_run:
#### run SUNTANS ####
hydro_wrapper.runSUNTANS(self.starttime, self.endtime, self.OBC_opt, self.IC_opt, ROMS_datasource=self.ROMS_datasource)
if self.runGNOME:
## Collect SUNTANS file
basedir = os.getcwd()
os.chdir(basedir+'/SUNTANS/rundata')
ncfiles = []
for ff in glob.glob("GalvCoarse_0*"):
ncfiles.append(ff)
os.chdir(basedir)
SUNTANS_file = []
for f in ncfiles:
SUNTANS_file.append('%s/%s'%(basedir+'/SUNTANS/rundata', f))
## Prepare for GNOME run
GNOME_dir = "GNOME/%s"%str(i)
if os.path.exists(GNOME_dir):
shutil.rmtree(GNOME_dir)
os.makedirs(GNOME_dir)
SUNTANS_out = '%s/txsuntans.nc'%GNOME_dir
oilspill_wrapper.init_model(i, opt='SUNTANS')
oilspill_wrapper.Tx_SUNTANS(SUNTANS_file, SUNTANS_out)
print "Forecast simulation, downloading TAMU-NCEP wind !!!\n"
subset_wind = True
TNW = TAMU_NCEP_wind(subset_wind)
TNW.writeGNOME('%s/wind.nc'%GNOME_dir)
print 'running GNOME !!!\n'
oilspill_wrapper.run_mul_GNOME(i, utm_x, utm_y, self.starttime2, self.period, 900, opt='SUNTANS')
oilspill_wrapper.GNOME_GM_visualization(i, opt='SUNTANS')
oilspill_wrapper.GNOME_GE_animation(i, self.starttime2, opt='SUNTANS')
#### pause a while for new data to be available ####
end = time.time()
timer(start, end, i, self.number, self.interval)
if self.runGNOME and self.probability_map:
oilspill_wrapper.ensemble_combination(self.number, opt='SUNTANS')
print 'creating probability map!!!\n'
bbox=[-95.22,-94.44,28.80,29.85] # the map range
Pmap('GNOME_combined.nc', 400, 400, self.starttime2, bbox, self.mpl, self.google_earth)
def run_roms(self):
"""
use ROMS velocity to run GNOME
A good testing date that the particles will hit SUNTANS domain is 2014-08-22~2014-08-29
A good testing initial location is 28.353786, -95.315109
"""
(utm_x,utm_y)=utm.from_latlon(self.init_latlon[0], self.init_latlon[1])[0:2]
for i in range(self.number):
start = time.time()
print "Running simulation #%s !!\n"%str(i)
if self.hydro_run:
#### download ROMS ####
downloadROMS(self.starttime, self.endtime, self.ROMS_datasource, ROMSsrc='forecast')
if self.runGNOME:
## Prepare for GNOME run
GNOME_dir = "GNOME/%s"%str(i)
if os.path.exists(GNOME_dir):
shutil.rmtree(GNOME_dir)
os.makedirs(GNOME_dir)
ROMS_file='DATA/txla_subset_HIS.nc'
ROMS_out = '%s/hiroms_ss_rho.nc'%GNOME_dir
oilspill_wrapper.init_model(i, opt='ROMS')
oilspill_wrapper.HIROMS(ROMS_file, ROMS_out)
#### wind ####
print "Forecast simulation, downloading TAMU-NCEP wind !!!\n"
subset_wind = False
TNW = TAMU_NCEP_wind(subset_wind)
TNW.writeGNOME('%s/wind.nc'%GNOME_dir)
print 'running GNOME !!!\n'
oilspill_wrapper.run_mul_GNOME(i, utm_x, utm_y, self.starttime2, self.period, 900, opt='ROMS')
oilspill_wrapper.GNOME_GM_visualization(i, opt='ROMS')
oilspill_wrapper.GNOME_GE_animation(i, self.starttime2, opt='ROMS')
#### run TracPy ####
if self.runTracPy:
## create TracPy directory
TRACPY_dir = 'TRACPY/%s'%str(i)
if os.path.exists(TRACPY_dir):
shutil.rmtree(TRACPY_dir)
os.makedirs(TRACPY_dir)
## move files
oilspill_wrapper.init_tracpy(i)
## run TracPy
print 'running TracPy !!!\n'
oilspill_wrapper.TRACPY(i, utm_x, utm_y, self.starttime2, self.period, opt='ROMS')
oilspill_wrapper.TRACPY_GM_visualization(i, opt='ROMS')
oilspill_wrapper.TRACPY_GE_animation(i, self.starttime2, opt='ROMS')
##### pause a while for new data to be available ####
## timer
end = time.time()
timer(start, end, i, self.number, self.interval)
#### probability map ####
if self.runGNOME and self.probability_map:
oilspill_wrapper.ensemble_combination(self.number, opt='ROMS')
print 'creating probability map!!!\n'
bbox=[-95.97,-94.025,27.24,29.89] # the map range
Pmap('GNOME_combined.nc', 400, 400, self.starttime2, bbox, self.mpl, self.google_earth)
def run_blended(self):
"""
use blended model velocity to run GNOME
"""
(utm_x,utm_y)=utm.from_latlon(self.init_latlon[0], self.init_latlon[1])[0:2]
for i in range(self.number):
start = time.time()
print "Running simulation #%s !!\n"%str(i)
if self.hydro_run:
## Step One: run SUNTANS
hydro_wrapper.runSUNTANS(self.starttime, self.endtime, 'ROMSFILE', 'ROMS', ROMS_datasource=self.ROMS_datasource)
## Step Two: Blend SUNTANS and ROMS
BL = blend(self.starttime, self.endtime)
BL.model_velocity()
## Step Three: run GNOME
if self.runGNOME:
## Prepare for GNOME run
GNOME_dir = "GNOME/%s"%str(i)
if os.path.exists(GNOME_dir):
shutil.rmtree(GNOME_dir)
os.makedirs(GNOME_dir)
blended_file = 'DATA/blended_uv.nc'
blended_out = '%s/hiroms_ss_rho.nc'%GNOME_dir
oilspill_wrapper.init_model(i, opt='blended')
oilspill_wrapper.HIROMS(blended_file, blended_out, subset=self.gnome_subset, bbox=self.gnome_bbox)
## GNOME wind
print "Forecast simulation, downloading TAMU-NCEP wind !!!\n"
subset_wind = False
TNW = TAMU_NCEP_wind(subset_wind)
TNW.writeGNOME('%s/wind.nc'%GNOME_dir)
print 'running GNOME !!!\n'
oilspill_wrapper.run_mul_GNOME(i, utm_x, utm_y, self.starttime2, self.period, 900, opt='blended')
oilspill_wrapper.GNOME_GM_visualization(i, opt='blended')
oilspill_wrapper.GNOME_GE_animation(i, self.starttime2, opt='blended')
#### Step Four: run TracPy ####
if self.runTracPy:
## create TracPy directory
TRACPY_dir = 'TRACPY/%s'%str(i)
if os.path.exists(TRACPY_dir):
shutil.rmtree(TRACPY_dir)
os.makedirs(TRACPY_dir)
## move files
oilspill_wrapper.init_tracpy(i)
## run TracPy
print 'running TracPy !!!\n'
oilspill_wrapper.TRACPY(i, utm_x, utm_y, self.starttime2, self.period, opt='blended')
oilspill_wrapper.TRACPY_GM_visualization(i, opt='blended')
oilspill_wrapper.TRACPY_GE_animation(i, self.starttime2, opt='blended')
#### pause a while for new data to be available ####
## timer
end = time.time()
timer(start, end, i, self.number, self.interval)
if self.runGNOME and self.probability_map:
oilspill_wrapper.ensemble_combination(self.number, opt='blended')
print 'creating probability map!!!\n'
bbox=[-95.97,-94.025,27.24,29.89] # the map range
Pmap('GNOME_combined.nc', 400, 400, self.starttime2, bbox, self.mpl, self.google_earth)
def timer(start, end, i, number, interval):
"""
pause time
"""
simulation_time = int(end-start)
#### Start pausing ####
if i!= number-1:
sleep_time = interval - simulation_time
for j in xrange(int(sleep_time/60.)*60, 0, -60):
mm = j/60
print 'Starting new simulation in %d minutes ...'%mm
time.sleep(60)
#### For testing only
if __name__ == "__main__":
starttime='2016-03-15-00'
endtime='2016-03-19-00'
endtime='2016-03-16-00'
#starttime='2017-03-15-00'
#endtime='2017-03-19-00'
UW = upper_wrapper()
UW(starttime, endtime, 20, init_latlon=[28.353786, -95.315109]) #ROMS domain
#UW(starttime, endtime, 90, init_latlon=[29.463089, -94.843460]) #SUNTANS domain
|
{
"content_hash": "f8cdbce6d1aa123bbf290e7403f2145a",
"timestamp": "",
"source": "github",
"line_count": 323,
"max_line_length": 135,
"avg_line_length": 34.411764705882355,
"alnum_prop": 0.5493477282950967,
"repo_name": "UT-CWE/Hyospy",
"id": "ef72809a074b652ab2bd20c68a02550118279837",
"size": "11139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Hyospy_ensemble/upper_wrapper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5513"
},
{
"name": "C++",
"bytes": "3170"
},
{
"name": "Makefile",
"bytes": "2822"
},
{
"name": "Python",
"bytes": "1221903"
},
{
"name": "Shell",
"bytes": "648"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from xml.dom.minidom import parseString
from xml.etree.ElementTree import tostring, SubElement, Element
from datetime import datetime
from dateutil.parser import parse
from decimal import Decimal
import requests
from six.moves.urllib.parse import parse_qs
import six
from .constants import XERO_API_URL
from .exceptions import *
def isplural(word):
return word[-1].lower() == 's'
def singular(word):
if isplural(word):
return word[:-1]
return word
class Manager(object):
DECORATED_METHODS = (
'get',
'save',
'filter',
'all',
'put',
'get_attachments',
'get_attachment_data',
'put_attachment_data',
)
DATETIME_FIELDS = (
'UpdatedDateUTC',
'Updated',
'FullyPaidOnDate',
'DateTimeUTC',
'CreatedDateUTC',
)
DATE_FIELDS = (
'DueDate',
'Date',
'PaymentDate',
'StartDate',
'EndDate',
'PeriodLockDate',
'DateOfBirth',
'OpeningBalanceDate',
)
BOOLEAN_FIELDS = (
'IsSupplier',
'IsCustomer',
'IsDemoCompany',
'PaysTax',
'IsAuthorisedToApproveTimesheets',
'IsAuthorisedToApproveLeave',
'HasHELPDebt',
'AustralianResidentForTaxPurposes',
'TaxFreeThresholdClaimed',
'HasSFSSDebt',
'EligibleToReceiveLeaveLoading',
'IsExemptFromTax',
'IsExemptFromSuper',
'SentToContact',
)
DECIMAL_FIELDS = ('Hours', 'NumberOfUnit')
INTEGER_FIELDS = ('FinancialYearEndDay', 'FinancialYearEndMonth')
PLURAL_EXCEPTIONS = {'Addresse': 'Address'}
NO_SEND_FIELDS = ('UpdatedDateUTC',)
def __init__(self, name, credentials):
self.credentials = credentials
self.name = name
self.base_url = credentials.base_url + XERO_API_URL
# setup our singular variants of the name
# only if the name ends in 's'
if name[-1] == "s":
self.singular = name[:len(name)-1]
else:
self.singular = name
for method_name in self.DECORATED_METHODS:
method = getattr(self, '_%s' % method_name)
setattr(self, method_name, self._get_data(method))
def walk_dom(self, dom):
tree_list = tuple()
for node in dom.childNodes:
tagName = getattr(node, 'tagName', None)
if tagName:
tree_list += (tagName, self.walk_dom(node),)
else:
data = node.data.strip()
if data:
tree_list += (node.data.strip(),)
return tree_list
def convert_to_dict(self, deep_list):
out = {}
if len(deep_list) > 2:
lists = [l for l in deep_list if isinstance(l, tuple)]
keys = [l for l in deep_list if isinstance(l, six.string_types)]
if len(keys) > 1 and len(set(keys)) == 1:
# This is a collection... all of the keys are the same.
return [self.convert_to_dict(data) for data in lists]
for key, data in zip(keys, lists):
if not data:
# Skip things that are empty tags?
continue
if len(data) == 1:
# we're setting a value
# check to see if we need to apply any special
# formatting to the value
val = data[0]
if key in self.DECIMAL_FIELDS:
val = Decimal(val)
elif key in self.BOOLEAN_FIELDS:
val = True if val.lower() == 'true' else False
elif key in self.DATETIME_FIELDS:
val = parse(val)
elif key in self.DATE_FIELDS:
if val.isdigit():
val = int(val)
else:
val = parse(val).date()
elif key in self.INTEGER_FIELDS:
val = int(val)
data = val
else:
# We have a deeper data structure, that we need
# to recursively process.
data = self.convert_to_dict(data)
# Which may itself be a collection. Quick, check!
if isinstance(data, dict) and isplural(key) and [singular(key)] == data.keys():
data = [data[singular(key)]]
out[key] = data
elif len(deep_list) == 2:
key = deep_list[0]
data = self.convert_to_dict(deep_list[1])
# If our key is repeated in our child object, but in singular
# form (and is the only key), then this object is a collection.
if isplural(key) and [singular(key)] == data.keys():
data = [data[singular(key)]]
out[key] = data
else:
out = deep_list[0]
return out
def dict_to_xml(self, root_elm, data):
for key in data.keys():
# Xero will complain if we send back these fields.
if key in self.NO_SEND_FIELDS:
continue
sub_data = data[key]
elm = SubElement(root_elm, key)
is_list = isinstance(sub_data, list) or isinstance(sub_data, tuple)
is_plural = key[len(key)-1] == "s"
plural_name = key[:len(key)-1]
# Key references a dict. Unroll the dict
# as it's own XML node with subnodes
if isinstance(sub_data, dict):
self.dict_to_xml(elm, sub_data)
# Key references a list/tuple
elif is_list:
# key name is a plural. This means each item
# in the list needs to be wrapped in an XML
# node that is a singular version of the list name.
if is_plural:
for d in sub_data:
plural_name = self.PLURAL_EXCEPTIONS.get(plural_name, plural_name)
self.dict_to_xml(SubElement(elm, plural_name), d)
# key name isn't a plural. Just insert the content
# as an XML node with subnodes
else:
for d in sub_data:
self.dict_to_xml(elm, d)
# Normal element - just insert the data.
else:
if key in self.BOOLEAN_FIELDS:
val = 'true' if sub_data else 'false'
else:
val = six.text_type(sub_data)
elm.text = val
return root_elm
def _prepare_data_for_save(self, data):
if isinstance(data, list) or isinstance(data, tuple):
root_elm = Element(self.name)
for d in data:
sub_elm = SubElement(root_elm, self.singular)
self.dict_to_xml(sub_elm, d)
else:
root_elm = self.dict_to_xml(Element(self.singular), data)
return tostring(root_elm)
def _get_results(self, data):
response = data['Response']
if self.name in response:
result = response[self.name]
elif 'Attachments' in response:
result = response['Attachments']
else:
return None
if isinstance(result, tuple) or isinstance(result, list):
return result
if isinstance(result, dict) and self.singular in result:
return result[self.singular]
def _get_data(self, func):
""" This is the decorator for our DECORATED_METHODS.
Each of the decorated methods must return:
uri, params, method, body, headers, singleobject
"""
def wrapper(*args, **kwargs):
uri, params, method, body, headers, singleobject = func(*args, **kwargs)
cert = getattr(self.credentials, 'client_cert', None)
response = getattr(requests, method)(
uri, data=body, headers=headers, auth=self.credentials.oauth,
params=params, cert=cert)
if response.status_code == 200:
if not response.headers['content-type'].startswith('text/xml'):
# return a byte string without doing any Unicode conversions
return response.content
# parseString takes byte content, not unicode.
dom = parseString(response.text.encode(response.encoding))
data = self.convert_to_dict(self.walk_dom(dom))
results = self._get_results(data)
# If we're dealing with Manager.get, return a single object.
if singleobject and isinstance(results, list):
return results[0]
return results
elif response.status_code == 400:
raise XeroBadRequest(response)
elif response.status_code == 401:
raise XeroUnauthorized(response)
elif response.status_code == 403:
raise XeroForbidden(response)
elif response.status_code == 404:
raise XeroNotFound(response)
elif response.status_code == 500:
raise XeroInternalError(response)
elif response.status_code == 501:
raise XeroNotImplemented(response)
elif response.status_code == 503:
# Two 503 responses are possible. Rate limit errors
# return encoded content; offline errors don't.
# If you parse the response text and there's nothing
# encoded, it must be a not-available error.
payload = parse_qs(response.text)
if payload:
raise XeroRateLimitExceeded(response, payload)
else:
raise XeroNotAvailable(response)
else:
raise XeroExceptionUnknown(response)
return wrapper
def _get(self, id, headers=None):
uri = '/'.join([self.base_url, self.name, id])
return uri, {}, 'get', None, headers, True
def _get_attachments(self, id):
"""Retrieve a list of attachments associated with this Xero object."""
uri = '/'.join([self.base_url, self.name, id, 'Attachments']) + '/'
return uri, {}, 'get', None, None, False
def _get_attachment_data(self, id, filename):
"""
Retrieve the contents of a specific attachment (identified by filename).
"""
uri = '/'.join([self.base_url, self.name, id, 'Attachments', filename])
return uri, {}, 'get', None, None, False
def get_attachment(self, id, filename, file):
"""
Retrieve the contents of a specific attachment (identified by filename).
Writes data to file object, returns length of data written.
"""
data = self.get_attachment_data(id, filename)
file.write(data)
return len(data)
def save_or_put(self, data, method='post', headers=None, summarize_errors=True):
uri = '/'.join([self.base_url, self.name])
body = {'xml': self._prepare_data_for_save(data)}
if summarize_errors:
params = {}
else:
params = {'summarizeErrors': 'false'}
return uri, params, method, body, headers, False
def _save(self, data):
return self.save_or_put(data, method='post')
def _put(self, data, summarize_errors=True):
return self.save_or_put(data, method='put', summarize_errors=summarize_errors)
def _put_attachment_data(self, id, filename, data, content_type, include_online=False):
"""Upload an attachment to the Xero object."""
uri = '/'.join([self.base_url, self.name, id, 'Attachments', filename])
params = {'IncludeOnline': 'true'} if include_online else {}
headers = {'Content-Type': content_type, 'Content-Length': len(data)}
return uri, params, 'put', data, headers, False
def put_attachment(self, id, filename, file, content_type, include_online=False):
"""Upload an attachment to the Xero object (from file object)."""
self.put_attachment_data(id, filename, file.read(), content_type,
include_online=include_online)
def prepare_filtering_date(self, val):
if isinstance(val, datetime):
val = val.strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
val = '"%s"' % val
return {'If-Modified-Since': val}
def _filter(self, **kwargs):
params = {}
headers = None
uri = '/'.join([self.base_url, self.name])
if kwargs:
if 'since' in kwargs:
val = kwargs['since']
headers = self.prepare_filtering_date(val)
del kwargs['since']
def get_filter_params(key, value):
last_key = key.split('_')[-1]
if last_key.upper().endswith('ID'):
return 'Guid("%s")' % six.text_type(value)
if key in self.BOOLEAN_FIELDS:
return 'true' if value else 'false'
elif key in self.DATETIME_FIELDS:
return value.isoformat()
else:
return '"%s"' % six.text_type(value)
def generate_param(key, value):
parts = key.split("__")
field = key.replace('_', '.')
fmt = '%s==%s'
if len(parts) == 2:
# support filters:
# Name__Contains=John becomes Name.Contains("John")
if parts[1] in ["contains", "startswith", "endswith"]:
field = parts[0]
fmt = ''.join(['%s.', parts[1], '(%s)'])
elif parts[1] in ["isnull"]:
sign = '=' if value else '!'
return '%s%s=null' % (parts[0], sign)
return fmt % (
field,
get_filter_params(key, value)
)
# Move any known parameter names to the query string
KNOWN_PARAMETERS = ['order', 'offset', 'page']
for param in KNOWN_PARAMETERS:
if param in kwargs:
params[param] = kwargs.pop(param)
# Treat any remaining arguments as filter predicates
# Xero will break if you search without a check for null in the first position:
# http://developer.xero.com/documentation/getting-started/http-requests-and-responses/#title3
sortedkwargs = sorted(six.iteritems(kwargs),
key=lambda item: -1 if 'isnull' in item[0] else 0)
filter_params = [generate_param(key, value) for key, value in sortedkwargs]
if filter_params:
params['where'] = '&&'.join(filter_params)
return uri, params, 'get', None, headers, False
def _all(self):
uri = '/'.join([self.base_url, self.name])
return uri, {}, 'get', None, None, False
|
{
"content_hash": "4b08e1047a9aac127118ebe9f7dcfd6c",
"timestamp": "",
"source": "github",
"line_count": 408,
"max_line_length": 105,
"avg_line_length": 37.39950980392157,
"alnum_prop": 0.5284749983616226,
"repo_name": "jacobg/pyxero",
"id": "f588fe4190d6f5b3f6f8feb2e66e29381c745716",
"size": "15259",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xero/manager.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "70296"
}
],
"symlink_target": ""
}
|
from antlr4.RuleContext import RuleContext
from antlr4.Token import Token
from antlr4.error.ErrorListener import ProxyErrorListener, ConsoleErrorListener
# need forward delcaration
RecognitionException = None
class Recognizer(object):
tokenTypeMapCache = dict()
ruleIndexMapCache = dict()
def __init__(self):
self._listeners = [ ConsoleErrorListener.INSTANCE ]
self._interp = None
self._stateNumber = -1
def extractVersion(self, version):
pos = version.find(".")
major = version[0:pos]
version = version[pos+1:]
pos = version.find(".")
if pos==-1:
pos = version.find("-")
if pos==-1:
pos = len(version)
minor = version[0:pos]
return major, minor
def checkVersion(self, toolVersion):
runtimeVersion = "4.7.2"
rvmajor, rvminor = self.extractVersion(runtimeVersion)
tvmajor, tvminor = self.extractVersion(toolVersion)
if rvmajor!=tvmajor or rvminor!=tvminor:
print("ANTLR runtime and generated code versions disagree: "+runtimeVersion+"!="+toolVersion)
def addErrorListener(self, listener):
self._listeners.append(listener)
def removeErrorListener(self, listener):
self._listeners.remove(listener)
def removeErrorListeners(self):
self._listeners = []
def getTokenTypeMap(self):
tokenNames = self.getTokenNames()
if tokenNames is None:
from antlr4.error.Errors import UnsupportedOperationException
raise UnsupportedOperationException("The current recognizer does not provide a list of token names.")
result = self.tokenTypeMapCache.get(tokenNames, None)
if result is None:
result = zip( tokenNames, range(0, len(tokenNames)))
result["EOF"] = Token.EOF
self.tokenTypeMapCache[tokenNames] = result
return result
# Get a map from rule names to rule indexes.
#
# <p>Used for XPath and tree pattern compilation.</p>
#
def getRuleIndexMap(self):
ruleNames = self.getRuleNames()
if ruleNames is None:
from antlr4.error.Errors import UnsupportedOperationException
raise UnsupportedOperationException("The current recognizer does not provide a list of rule names.")
result = self.ruleIndexMapCache.get(ruleNames, None)
if result is None:
result = zip( ruleNames, range(0, len(ruleNames)))
self.ruleIndexMapCache[ruleNames] = result
return result
def getTokenType(self, tokenName:str):
ttype = self.getTokenTypeMap().get(tokenName, None)
if ttype is not None:
return ttype
else:
return Token.INVALID_TYPE
# What is the error header, normally line/character position information?#
def getErrorHeader(self, e:RecognitionException):
line = e.getOffendingToken().line
column = e.getOffendingToken().column
return "line "+line+":"+column
# How should a token be displayed in an error message? The default
# is to display just the text, but during development you might
# want to have a lot of information spit out. Override in that case
# to use t.toString() (which, for CommonToken, dumps everything about
# the token). This is better than forcing you to override a method in
# your token objects because you don't have to go modify your lexer
# so that it creates a new Java type.
#
# @deprecated This method is not called by the ANTLR 4 Runtime. Specific
# implementations of {@link ANTLRErrorStrategy} may provide a similar
# feature when necessary. For example, see
# {@link DefaultErrorStrategy#getTokenErrorDisplay}.
#
def getTokenErrorDisplay(self, t:Token):
if t is None:
return "<no token>"
s = t.text
if s is None:
if t.type==Token.EOF:
s = "<EOF>"
else:
s = "<" + str(t.type) + ">"
s = s.replace("\n","\\n")
s = s.replace("\r","\\r")
s = s.replace("\t","\\t")
return "'" + s + "'"
def getErrorListenerDispatch(self):
return ProxyErrorListener(self._listeners)
# subclass needs to override these if there are sempreds or actions
# that the ATN interp needs to execute
def sempred(self, localctx:RuleContext, ruleIndex:int, actionIndex:int):
return True
def precpred(self, localctx:RuleContext , precedence:int):
return True
@property
def state(self):
return self._stateNumber
# Indicate that the recognizer has changed internal state that is
# consistent with the ATN state passed in. This way we always know
# where we are in the ATN as the parser goes along. The rule
# context objects form a stack that lets us see the stack of
# invoking rules. Combine this and we have complete ATN
# configuration information.
@state.setter
def state(self, atnState:int):
self._stateNumber = atnState
del RecognitionException
import unittest
class Test(unittest.TestCase):
def testVersion(self):
major, minor = Recognizer().extractVersion("1.2")
self.assertEqual("1", major)
self.assertEqual("2", minor)
major, minor = Recognizer().extractVersion("1.2.3")
self.assertEqual("1", major)
self.assertEqual("2", minor)
major, minor = Recognizer().extractVersion("1.2-snapshot")
self.assertEqual("1", major)
self.assertEqual("2", minor)
|
{
"content_hash": "06920ea85cb4e95b9773531df708ec81",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 113,
"avg_line_length": 36.21935483870968,
"alnum_prop": 0.6442821517634485,
"repo_name": "damorim/compilers-cin",
"id": "01017351f62a7646e8a78e87a5c7ca2f11e0f35e",
"size": "5808",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "2020_3/projeto3/antlr4-python3-runtime-4.7.2/src/antlr4/Recognizer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "11266"
},
{
"name": "C",
"bytes": "118657"
},
{
"name": "C++",
"bytes": "13919"
},
{
"name": "Java",
"bytes": "137247"
},
{
"name": "Makefile",
"bytes": "8093"
},
{
"name": "Python",
"bytes": "930934"
}
],
"symlink_target": ""
}
|
import time
#path to
test_dataset = "/home/admin/compresion/dataset/test_set_tweets.txt"
training_dataset = "/home/admin/compresion/dataset/training_set_tweets.txt"
tweetstest = []
tweetstrai = []
#open files
try:
t1 = time.clock()
#output filename
w1 = open("todostweets", "a")
f1 = open(test_dataset, "r")
lines = f1.readlines()
for line in lines:
lst = line.split()[2:-2]
joined = " ".join(lst)
tweetstest.append(joined)
for tweet in tweetstest:
w1.write(tweet + "\n")
f1.close()
t2 = time.clock()
print("tiempo para tweets de test", t2 - t1)
f2 = open(training_dataset, "r")
lines = f2.readlines()
for line in lines:
lst = line.split()[2:-2]
joined = " ".join(lst)
tweetstrai.append(joined)
for tweet in tweetstrai:
w1.write(tweet + "\n")
f2.close()
t3 = time.clock()
print("tiempo para tweets de training", t3 - t2)
w1.close()
except IOError as mess:
print "Error in files"
print(mess)
|
{
"content_hash": "7266313b4445fcaaea002e7b702aeb86",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 75,
"avg_line_length": 22.4,
"alnum_prop": 0.6180555555555556,
"repo_name": "pasilvagh/U",
"id": "8142c7386d009c30155c59b5e58ee40a3c4f69b3",
"size": "1029",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compresionTexto/trabajo/pruebas.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "18415"
},
{
"name": "Makefile",
"bytes": "621"
},
{
"name": "Python",
"bytes": "63822"
}
],
"symlink_target": ""
}
|
"""Utils for manipulating program data.
"""
from datetime import date
from soc.models.document import Document
from soc.models.org_app_survey import OrgAppSurvey
from soc.models.site import Site
from soc.models.sponsor import Sponsor
from soc.models.user import User
from soc.modules.gci.models.organization import GCIOrganization
from soc.modules.gci.models.program import GCIProgram
from soc.modules.gci.models.timeline import GCITimeline
from soc.modules.gsoc.models.organization import GSoCOrganization
from soc.modules.gsoc.models.program import GSoCProgram
from soc.modules.gsoc.models.timeline import GSoCTimeline
from soc.modules.seeder.logic.providers.string import DocumentKeyNameProvider
from soc.modules.seeder.logic.seeder import logic as seeder_logic
class ProgramHelper(object):
"""Helper class to aid in manipulating program data.
"""
def __init__(self):
"""Initializes the ProgramHelper.
Args:
program: a program
org_app: an organization application
org: an organization
site: a site
"""
self.founder = None
self.sponsor = None
self.program = None
self.org_app = None
self.org = None
self.site = None
self.createOrg = self.createOrUpdateOrg
def seed(self, model, properties,
auto_seed_optional_properties=True):
return seeder_logic.seed(model, properties, recurse=False,
auto_seed_optional_properties=auto_seed_optional_properties)
def seedn(self, model, properties, n,
auto_seed_optional_properties=True):
return seeder_logic.seedn(model, n, properties, recurse=False,
auto_seed_optional_properties=auto_seed_optional_properties)
def createFounder(self, override={}):
"""Creates a founder for the defined properties.
"""
if self.founder:
return self.founder
properties = {}
properties.update(override)
self.founder = self.seed(User, properties)
return self.founder
def createSponsor(self, override={}):
"""Creates a sponsor for the defined properties.
"""
if self.sponsor:
return self.sponsor
if self.founder is None:
self.createFounder()
properties = {'founder': self.founder, 'home': None}
properties.update(override)
self.sponsor = self.seed(Sponsor, properties)
return self.sponsor
def createProgram(self, override={}):
"""Creates a program for the defined properties.
"""
if self.sponsor is None:
self.createSponsor()
def createOrgApp(self, override={}):
"""Creates an organization application for the defined properties.
"""
if self.org_app:
return self.org_app
if self.program is None:
self.createProgram()
# TODO (Madhu): Remove scope and author fields once the data
# conversion is done.
properties = {
'key_name': 'gci_program/%s/orgapp' % self.program.key().name(),
'scope': self.program, 'program': self.program,
'modified_by': self.founder,
'created_by': self.founder,
'author': self.founder,
'schema': ('[["item"],{"item":{"field_type":"input_text",'
'"required":false, "label":"test"}}]'),
'survey_content': None,
}
properties.update(override)
self.org_app = self.seed(OrgAppSurvey, properties)
return self.org_app
def _updateEntity(self, entity, override):
"""Updates self.<entity> with override.
"""
properties = entity.properties()
for name, value in override.iteritems():
properties[name].__set__(entity, value)
entity.put()
return entity
def _updateOrg(self, override):
"""Updates self.org with override.
"""
return self._updateEntity(self.org, override)
def createOrUpdateOrg(self, override={}):
"""Creates or updates an org (self.org) for the defined properties.
"""
if self.org:
if not override:
return self.org
else:
return self._updateOrg(override)
self.org = self.createNewOrg(override)
return self.org
def createNewOrg(self, override={}):
"""Creates a new organization for the defined properties.
This new organization will not be stored in self.org but returned.
"""
if self.program is None:
self.createProgram()
def createSite(self, override={}):
"""Creates a site for the defined properties.
"""
if self.program is None:
self.createProgram()
properties = {'key_name': 'site', 'link_id': 'site',
'active_program': self.program}
properties.update(override)
self.site = Site(**properties)
self.site.put()
return self.site
class GSoCProgramHelper(ProgramHelper):
"""Helper class to aid in manipulating GSoC program data.
"""
def __init__(self):
"""Initializes the GSoCProgramHelper.
"""
super(GSoCProgramHelper, self).__init__()
def createProgram(self, override={}):
"""Creates a program for the defined properties.
"""
if self.program:
return self.program
super(GSoCProgramHelper, self).createProgram()
properties = {'scope': self.sponsor}
self.program_timeline = self.seed(GSoCTimeline, properties)
properties = {'timeline': self.program_timeline,
'status': 'visible', 'apps_tasks_limit': 20,
'scope': self.sponsor,
'student_agreement': None, 'events_page': None,
'help_page': None, 'connect_with_us_page': None,
'mentor_agreement': None, 'org_admin_agreement': None,
'terms_and_conditions': None,
'home': None, 'about_page': None}
properties.update(override)
self.program = self.seed(GSoCProgram, properties)
properties = {
'prefix': 'gsoc_program', 'scope': self.program,
'read_access': 'public', 'key_name': DocumentKeyNameProvider(),
'modified_by': self.founder, 'author': self.founder,
'home_for': None,
}
document = self.seed(Document, properties=properties)
self.program.about_page = document
self.program.events_page = document
self.program.help_page = document
self.program.connect_with_us_page = document
self.program.privacy_policy = document
self.program.put()
return self.program
def createNewOrg(self, override={}):
"""Creates a new organization for the defined properties.
This new organization will not be stored in self.org but returned.
"""
super(GSoCProgramHelper, self).createNewOrg(override)
properties = {'scope': self.program, 'status': 'active',
'scoring_disabled': False, 'max_score': 5,
'founder': self.founder, 'home': None,}
properties.update(override)
return self.seed(GSoCOrganization, properties)
class GCIProgramHelper(ProgramHelper):
"""Helper class to aid in manipulating GCI program data.
"""
def __init__(self):
"""Initializes the GCIProgramHelper.
"""
super(GCIProgramHelper, self).__init__()
def createProgram(self, override={}):
"""Creates a program for the defined properties.
"""
if self.program:
return self.program
super(GCIProgramHelper, self).createProgram()
properties = {'scope': self.sponsor}
self.program_timeline = self.seed(GCITimeline, properties)
properties = {
'timeline': self.program_timeline,
'status': 'visible',
'scope': self.sponsor,
'student_agreement': None, 'events_page': None,
'help_page': None, 'connect_with_us_page': None,
'mentor_agreement': None, 'org_admin_agreement': None,
'terms_and_conditions': None, 'home': None, 'about_page': None,
'nr_simultaneous_tasks': 5,
'student_min_age': 13, 'student_max_age': 17,
'student_min_age_as_of': date.today(),
'task_types': ['code', 'documentation', 'design'],
}
properties.update(override)
self.program = self.seed(GCIProgram, properties)
properties = {
'prefix': 'gci_program', 'scope': self.program,
'read_access': 'public', 'key_name': DocumentKeyNameProvider(),
'modified_by': self.founder, 'author': self.founder,
'home_for': None,
}
document = self.seed(Document, properties=properties)
self.program.about_page = document
self.program.events_page = document
self.program.help_page = document
self.program.connect_with_us_page = document
self.program.privacy_policy = document
self.program.put()
return self.program
def createNewOrg(self, override={}):
"""Creates a new organization for the defined properties.
This new organization will not be stored in self.org but returned.
"""
super(GCIProgramHelper, self).createNewOrg(override)
properties = {'scope': self.program, 'status': 'active',
'founder': self.founder,
'home': None,
'task_quota_limit': 100}
properties.update(override)
return self.seed(GCIOrganization, properties)
|
{
"content_hash": "32d163e08308adfa173ee2bc3233a534",
"timestamp": "",
"source": "github",
"line_count": 270,
"max_line_length": 77,
"avg_line_length": 33.34444444444444,
"alnum_prop": 0.6551149616794402,
"repo_name": "adviti/melange",
"id": "ded354c6397f8d13b3d6e473b441c0f36e78b3c3",
"size": "9614",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/program_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
def make_it_proper_to_grammar(transitions_str):
"""
:param transitions_str: {string} grammar
"""
grammar = transitions_str.split(' ')
initial_simbol = grammar[0]
non_terminals = set()
terminals = set()
temporary_alfa = ''
temporary_beta = set()
productionDict = dict()
grammar_length = len(grammar)
for i in range(0, grammar_length):
if grammar[i].isupper():
non_terminals.add(grammar[i])
if not grammar[i] == '|' and not grammar[i] == '->' and not i == 0 and not grammar[i].isupper():
temporary_beta.add(grammar[i])
terminals.add(grammar[i][0])
if grammar[i] == '->' or i == grammar_length - 1:
if len(temporary_beta) > 0:
productionDict[temporary_alfa] = temporary_beta
temporary_beta = set()
temporary_alfa = grammar[i-1]
return regular_grammar(non_terminals, terminals, productionDict, initial_simbol)
class regular_grammar:
def __init__(self, non_terminals, terminals, dict_of_productions, initial_simbol):
self.non_terminals = non_terminals
self.terminals = terminals
self.productions = dict_of_productions
self.initial_simbol = initial_simbol
def to_automata(self):
"""
:return: productions of automata
"""
states = self.non_terminals
states.add('qAccept')
delta = dict()
temp = dict()
for _alfa in self.productions:
for _beta in self.productions[_alfa]:
if _beta[0] in temp:
if len(_beta) == 2:
temp[_beta[0]].add(_beta[1])
else:
temp[_beta[0]].add('qAccept')
else:
if len(_beta) == 2:
temp[_beta[0]] = {_beta[1]}
else:
temp[_beta[0]] = {'qAccept'}
delta[_alfa] = temp
temp = dict()
return delta
|
{
"content_hash": "85133f09ce5ecfc284a9ed30761f26d2",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 108,
"avg_line_length": 31.514705882352942,
"alnum_prop": 0.49416705552963136,
"repo_name": "w2srobinho/formais",
"id": "351d34d57c9a8e63b2cd23c0b7c800fd93ac3f76",
"size": "2145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "regular_sets/regular_grammar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20988"
}
],
"symlink_target": ""
}
|
import os
import sys
from pyqode.core.api import CodeEdit
from pyqode.core.backend import NotRunning
from pyqode.qt import QtWidgets
import pytest
from pyqode.qt.QtTest import QTest
from pyqode.core import backend
from pyqode.core.managers.backend import BackendManager
from ..helpers import cwd_at, python2_path, server_path, wait_for_connected
from ..helpers import editor_open, ensure_connected
@editor_open(__file__)
@ensure_connected
def test_exit_code(editor):
assert editor.backend.running
assert editor.backend.exit_code is None
editor.backend.stop()
assert not editor.backend.running
backend_manager = None
def _on_receive(results):
"""
Assert recevied data is the same as the data we send, a string which contains
'some data'.
"""
assert results == 'some data'
app = QtWidgets.QApplication.instance()
app.exit(0)
def _send_request():
"""
Sends a request to the server. The request data is a simple string which
contains 'some data'.
"""
global backend_manager
backend_manager.send_request(backend.echo_worker, 'some data',
on_receive=_on_receive)
@cwd_at('test')
def test_client_server():
"""
Checks that the client-server works as expected. We will send
a request using the echo worker and assert it has the same data as we send,
providing assurance that the client-server communication and protocol is
OK.
Once the result has been received we quit the qt app.
"""
global backend_manager
win = QtWidgets.QMainWindow()
backend_manager = BackendManager(win)
with pytest.raises(NotRunning):
backend_manager.send_request(
backend.echo_worker, 'some data', on_receive=_on_receive)
backend_manager.start(os.path.join(os.getcwd(), 'server.py'))
backend_manager._process.started.connect(_send_request)
QTest.qWait(1000)
backend_manager.stop()
del backend_manager
del win
def test_client_server_py2(editor):
"""
Test client/server with a python2 server.
"""
editor.backend.stop()
with pytest.raises(NotRunning):
editor.backend.send_request(
backend.echo_worker, 'some data', on_receive=_on_receive)
if os.path.exists(python2_path()):
editor.backend.start(server_path(), interpreter=python2_path())
wait_for_connected(editor)
editor.backend.send_request(
backend.echo_worker, 'some data', on_receive=_on_receive)
QTest.qWait(500)
editor.backend.stop()
editor.backend.start(server_path())
def test_frozen_server():
global backend_manager
win = QtWidgets.QMainWindow()
backend_manager = BackendManager(win)
with pytest.raises(NotRunning):
backend_manager.send_request(
backend.echo_worker, 'some data', on_receive=_on_receive)
backend_manager.start('server.exe')
|
{
"content_hash": "2e91c0b626d70c47758bc3a866102d9d",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 81,
"avg_line_length": 30.02061855670103,
"alnum_prop": 0.6854395604395604,
"repo_name": "pyQode/pyqode.core",
"id": "cf354b14e764ef4cad9cb4aba1cc7a167d69ca29",
"size": "2912",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/test_managers/test_backend.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1998256"
},
{
"name": "Shell",
"bytes": "1139"
}
],
"symlink_target": ""
}
|
import sys, os
sys.path.insert(0, os.path.normpath(os.path.join(os.getcwd(), '..', '..')))
import synctools
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'synctools'
copyright = u'2013, Grant Garcia'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(synctools.__version__.split('.')[:2])
# The full version, including alpha/beta/rc tags.
release = synctools.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'synctoolsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'synctools.tex', u'synctools Documentation',
u'Grant Garcia', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'synctools', u'synctools Documentation',
[u'Grant Garcia'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'synctools', u'synctools Documentation',
u'Grant Garcia', 'synctools', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
{
"content_hash": "8808b8891f8d37b08c4739a829609471",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 80,
"avg_line_length": 32.35983263598327,
"alnum_prop": 0.7020946470131885,
"repo_name": "garcia/synctools",
"id": "04fee73aefb3d681154d027e6487485f31c85512",
"size": "8154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "8456"
},
{
"name": "Python",
"bytes": "53560"
},
{
"name": "Shell",
"bytes": "2865"
},
{
"name": "VBScript",
"bytes": "80"
}
],
"symlink_target": ""
}
|
'''OpenGL extension VERSION.GL_1_3_DEPRECATED
This module customises the behaviour of the
OpenGL.raw.GL.VERSION.GL_1_3_DEPRECATED to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/VERSION/GL_1_3_DEPRECATED.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.VERSION.GL_1_3_DEPRECATED import *
### END AUTOGENERATED SECTION
|
{
"content_hash": "1b240b349d50db4373f0bc1520453eb5",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 66,
"avg_line_length": 35,
"alnum_prop": 0.8,
"repo_name": "frederica07/Dragon_Programming_Process",
"id": "fedb384f3fc511676ffe261914e8c9aaa366bdb9",
"size": "525",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "PyOpenGL-3.0.2/OpenGL/GL/VERSION/GL_1_3_DEPRECATED.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "1548"
},
{
"name": "Python",
"bytes": "2558317"
}
],
"symlink_target": ""
}
|
import unittest
from dedupe import predicates
from future.builtins import str
class TestPuncStrip(unittest.TestCase):
def test_sevenchar(self) :
s1 = predicates.StringPredicate(predicates.sameSevenCharStartPredicate,
'foo')
assert s1({'foo' : u'fo,18v*1vaad80'}) == s1({'foo' : u'fo18v1vaad80'})
def test_set(self) :
s1 = predicates.SimplePredicate(predicates.wholeSetPredicate,
'foo')
colors = set(['red', 'blue', 'green'])
assert s1({'foo' : colors}) == (str(colors),)
class TestMetaphone(unittest.TestCase):
def test_metaphone_token(self) :
block_val = predicates.metaphoneToken('9301 S. State St. ')
assert block_val == set([u'STT', u'SS'])
class TestWholeSet(unittest.TestCase):
def setUp(self):
self.s1 = set(['red', 'blue', 'green'])
def test_full_set(self):
block_val = predicates.wholeSetPredicate(self.s1)
self.assertEqual(block_val, (str(self.s1),))
class TestSetElement(unittest.TestCase):
def setUp(self):
self.s1 = set(['red', 'blue', 'green'])
def test_long_set(self):
block_val = predicates.commonSetElementPredicate(self.s1)
self.assertEqual(set(block_val), set(('blue', 'green', 'red')))
def test_empty_set(self):
block_val = predicates.commonSetElementPredicate(set())
self.assertEqual(block_val, tuple())
def test_first_last(self) :
block_val = predicates.lastSetElementPredicate(self.s1)
assert block_val == ('red',)
block_val = predicates.firstSetElementPredicate(self.s1)
assert block_val == ('blue',)
def test_magnitude(self) :
block_val = predicates.magnitudeOfCardinality(self.s1)
assert block_val == (u'0', )
block_val = predicates.magnitudeOfCardinality(())
assert block_val == ()
class TestLatLongGrid(unittest.TestCase):
def setUp(self):
self.latlong1 = (42.535, -5.012)
def test_precise_latlong(self):
block_val = predicates.latLongGridPredicate(self.latlong1)
assert block_val == (u'[42.5, -5.0]',)
block_val = predicates.latLongGridPredicate((0,0))
assert block_val == ()
def test_exists(self) :
block_val = predicates.existsPredicate(self.latlong1)
assert block_val == (u'1',)
block_val = predicates.existsPredicate((0,0))
assert block_val == (u'0',)
class TestNumericPredicates(unittest.TestCase) :
def test_order_of_magnitude(self) :
assert predicates.orderOfMagnitude(10) == (u'1',)
assert predicates.orderOfMagnitude(9) == (u'1',)
assert predicates.orderOfMagnitude(2) == (u'0',)
assert predicates.orderOfMagnitude(-2) == ()
def test_round_to_1(self) :
assert predicates.roundTo1(22315) == (u'20000',)
assert predicates.roundTo1(-22315) == (u'-20000',)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "bb7516627ee7730546f6311c1b0af976",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 79,
"avg_line_length": 34.13636363636363,
"alnum_prop": 0.6168442077230359,
"repo_name": "01-/dedupe",
"id": "6d81e1abf1a195beb4831a85e4688394dd3dd0af",
"size": "3004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_predicates.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "186833"
},
{
"name": "Shell",
"bytes": "194"
}
],
"symlink_target": ""
}
|
from django.utils.encoding import smart_str
from hashlib import sha1
class Generator(object):
def generate_version(self, key, url, content):
return sha1(smart_str(content)).hexdigest()
def get_output(self):
"""
Generates content for production mode.
Yields tuples of the form:
key, url, content
Here, key must be the same as for get_dev_output_names().
"""
for key, url, hash in self.get_dev_output_names():
yield key, url, self.get_dev_output(url)[0]
def get_dev_output(self, name):
"""
Generates content for dev mode.
Yields tuples of the form:
content, mimetype
"""
raise NotImplementedError()
def get_dev_output_names(self):
"""
Generates file names for dev mode.
Yields tuples of the form:
key, url, version_hash
Here, key must be the same as for get_output_names().
"""
raise NotImplementedError()
|
{
"content_hash": "7254d0aa2cffcd7f2bdb6f06a79b9bf0",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 65,
"avg_line_length": 27.55263157894737,
"alnum_prop": 0.5740210124164279,
"repo_name": "brunogamacatao/portalsaladeaula",
"id": "fe039cc182344df3ecc036db2980fd1090282c79",
"size": "1047",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mediagenerator/base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "84537"
},
{
"name": "JavaScript",
"bytes": "616811"
},
{
"name": "Python",
"bytes": "4545655"
},
{
"name": "Ruby",
"bytes": "2070"
},
{
"name": "Shell",
"bytes": "53"
}
],
"symlink_target": ""
}
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("SGDClassifier" , "digits" , "oracle")
|
{
"content_hash": "9d3e1bcc0e19ca463b86a16ebfbe6151",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 72,
"avg_line_length": 33.75,
"alnum_prop": 0.7777777777777778,
"repo_name": "antoinecarme/sklearn2sql_heroku",
"id": "312db9304b937fb63b458879fc6f3cab46577749",
"size": "135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/classification/digits/ws_digits_SGDClassifier_oracle_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "507043"
},
{
"name": "Procfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "1021137"
},
{
"name": "R",
"bytes": "2521"
}
],
"symlink_target": ""
}
|
__author__ = 'CubexX'
import hashlib
import time
from sqlalchemy import Boolean, Column, Integer, Text
from config import CONFIG
from confstat import cache
from main import make_db_session
from . import Base
class User(Base):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True)
uid = Column('uid', Integer)
username = Column('username', Text)
fullname = Column('fullname', Text)
public = Column('public', Boolean, default=False)
def __init__(self, id=None, uid=None, username=None, fullname=None, public=None):
self.id = id
self.uid = uid
self.username = username
self.fullname = fullname
self.public = public
def __repr__(self):
return "<User('{}', '{}')>".format(self.uid, self.fullname)
@make_db_session
def add(self, uid, username, fullname, db):
user = self.get(uid)
update = {}
if user:
if user.username != username:
update['username'] = username
if user.fullname != fullname:
update['fullname'] = fullname
if update:
self.update(uid, update)
else:
db.add(User(uid=uid,
username=username,
fullname=fullname))
db.commit()
cache.set('user_'.format(uid), User(uid=uid,
username=username,
fullname=fullname))
cache.delete('web_user_{}'.format(uid))
@staticmethod
@make_db_session
def get(uid, db):
cached = cache.get('user_{}'.format(uid))
if cached:
return cached
else:
q = db.query(User) \
.filter(User.uid == uid) \
.limit(1) \
.all()
if q:
cache.set('user_{}'.format(uid), q[0])
cache.delete('web_user_{}'.format(uid))
return q[0]
else:
return False
@staticmethod
@make_db_session
def update(uid, update, db):
user = db.query(User).filter(User.uid == uid)
user.update(update)
db.commit()
@staticmethod
def generate_token(uid):
salt = str(CONFIG['salt']).encode('utf-8')
current_time = str(time.time()).encode('utf-8')
uid = str(uid).encode('utf-8')
t = hashlib.md5(uid + salt + current_time)
return t.hexdigest()[:8]
|
{
"content_hash": "b683aa455ba7de154197bcfe40739ad4",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 85,
"avg_line_length": 27.096774193548388,
"alnum_prop": 0.515079365079365,
"repo_name": "CubexX/confstat-bot",
"id": "96bdc8f69e3c70f35672852eb73a7be8a8c3463c",
"size": "2544",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "confstat/models/user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30685"
}
],
"symlink_target": ""
}
|
import snowflake.connector
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from airflow.hooks.dbapi_hook import DbApiHook
class SnowflakeHook(DbApiHook):
"""
Interact with Snowflake.
get_sqlalchemy_engine() depends on snowflake-sqlalchemy
"""
conn_name_attr = 'snowflake_conn_id'
default_conn_name = 'snowflake_default'
supports_autocommit = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.account = kwargs.pop("account", None)
self.warehouse = kwargs.pop("warehouse", None)
self.database = kwargs.pop("database", None)
self.region = kwargs.pop("region", None)
self.role = kwargs.pop("role", None)
def _get_conn_params(self):
"""
one method to fetch connection params as a dict
used in get_uri() and get_connection()
"""
conn = self.get_connection(self.snowflake_conn_id)
account = conn.extra_dejson.get('account', None)
warehouse = conn.extra_dejson.get('warehouse', None)
database = conn.extra_dejson.get('database', None)
region = conn.extra_dejson.get("region", None)
role = conn.extra_dejson.get('role', None)
conn_config = {
"user": conn.login,
"password": conn.password or '',
"schema": conn.schema or '',
"database": self.database or database or '',
"account": self.account or account or '',
"warehouse": self.warehouse or warehouse or '',
"region": self.region or region or '',
"role": self.role or role or '',
}
"""
If private_key_file is specified in the extra json, load the contents of the file as a private
key and specify that in the connection configuration. The connection password then becomes the
passphrase for the private key. If your private key file is not encrypted (not recommended), then
leave the password empty.
"""
private_key_file = conn.extra_dejson.get('private_key_file', None)
if private_key_file:
with open(private_key_file, "rb") as key:
passphrase = None
if conn.password:
passphrase = conn.password.strip().encode()
p_key = serialization.load_pem_private_key(
key.read(),
password=passphrase,
backend=default_backend()
)
pkb = p_key.private_bytes(encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption())
conn_config['private_key'] = pkb
conn_config.pop('password', None)
return conn_config
def get_uri(self):
"""
override DbApiHook get_uri method for get_sqlalchemy_engine()
"""
conn_config = self._get_conn_params()
uri = 'snowflake://{user}:{password}@{account}/{database}/'
uri += '{schema}?warehouse={warehouse}&role={role}'
return uri.format(**conn_config)
def get_conn(self):
"""
Returns a snowflake.connection object
"""
conn_config = self._get_conn_params()
conn = snowflake.connector.connect(**conn_config)
return conn
def _get_aws_credentials(self):
"""
returns aws_access_key_id, aws_secret_access_key
from extra
intended to be used by external import and export statements
"""
if self.snowflake_conn_id:
connection_object = self.get_connection(self.snowflake_conn_id)
if 'aws_secret_access_key' in connection_object.extra_dejson:
aws_access_key_id = connection_object.extra_dejson.get(
'aws_access_key_id')
aws_secret_access_key = connection_object.extra_dejson.get(
'aws_secret_access_key')
return aws_access_key_id, aws_secret_access_key
def set_autocommit(self, conn, autocommit):
conn.autocommit(autocommit)
|
{
"content_hash": "e4924e6ff2b987916f1ce325e5144789",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 105,
"avg_line_length": 37.60176991150443,
"alnum_prop": 0.5900211814544599,
"repo_name": "r39132/airflow",
"id": "685d94c347e7ff23af667e82a91f5842bce05e6e",
"size": "5061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/contrib/hooks/snowflake_hook.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "4111"
},
{
"name": "HTML",
"bytes": "128531"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5928206"
},
{
"name": "Shell",
"bytes": "41869"
}
],
"symlink_target": ""
}
|
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "Amazon Connect"
prefix = "connect"
class Action(BaseAction):
def __init__(self, action: str = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
AssociateApprovedOrigin = Action("AssociateApprovedOrigin")
AssociateBot = Action("AssociateBot")
AssociateCustomerProfilesDomain = Action("AssociateCustomerProfilesDomain")
AssociateDefaultVocabulary = Action("AssociateDefaultVocabulary")
AssociateInstanceStorageConfig = Action("AssociateInstanceStorageConfig")
AssociateLambdaFunction = Action("AssociateLambdaFunction")
AssociateLexBot = Action("AssociateLexBot")
AssociatePhoneNumberContactFlow = Action("AssociatePhoneNumberContactFlow")
AssociateQueueQuickConnects = Action("AssociateQueueQuickConnects")
AssociateRoutingProfileQueues = Action("AssociateRoutingProfileQueues")
AssociateSecurityKey = Action("AssociateSecurityKey")
BatchAssociateAnalyticsDataSet = Action("BatchAssociateAnalyticsDataSet")
BatchDisassociateAnalyticsDataSet = Action("BatchDisassociateAnalyticsDataSet")
ClaimPhoneNumber = Action("ClaimPhoneNumber")
CreateAgentStatus = Action("CreateAgentStatus")
CreateContactFlow = Action("CreateContactFlow")
CreateContactFlowModule = Action("CreateContactFlowModule")
CreateHoursOfOperation = Action("CreateHoursOfOperation")
CreateInstance = Action("CreateInstance")
CreateIntegrationAssociation = Action("CreateIntegrationAssociation")
CreateQueue = Action("CreateQueue")
CreateQuickConnect = Action("CreateQuickConnect")
CreateRoutingProfile = Action("CreateRoutingProfile")
CreateSecurityProfile = Action("CreateSecurityProfile")
CreateTaskTemplate = Action("CreateTaskTemplate")
CreateTrafficDistributionGroup = Action("CreateTrafficDistributionGroup")
CreateUseCase = Action("CreateUseCase")
CreateUser = Action("CreateUser")
CreateUserHierarchyGroup = Action("CreateUserHierarchyGroup")
CreateVocabulary = Action("CreateVocabulary")
DeleteContactFlow = Action("DeleteContactFlow")
DeleteContactFlowModule = Action("DeleteContactFlowModule")
DeleteHoursOfOperation = Action("DeleteHoursOfOperation")
DeleteInstance = Action("DeleteInstance")
DeleteIntegrationAssociation = Action("DeleteIntegrationAssociation")
DeleteQuickConnect = Action("DeleteQuickConnect")
DeleteSecurityProfile = Action("DeleteSecurityProfile")
DeleteTaskTemplate = Action("DeleteTaskTemplate")
DeleteTrafficDistributionGroup = Action("DeleteTrafficDistributionGroup")
DeleteUseCase = Action("DeleteUseCase")
DeleteUser = Action("DeleteUser")
DeleteUserHierarchyGroup = Action("DeleteUserHierarchyGroup")
DeleteVocabulary = Action("DeleteVocabulary")
DescribeAgentStatus = Action("DescribeAgentStatus")
DescribeContact = Action("DescribeContact")
DescribeContactFlow = Action("DescribeContactFlow")
DescribeContactFlowModule = Action("DescribeContactFlowModule")
DescribeForecastingPlanningSchedulingIntegration = Action(
"DescribeForecastingPlanningSchedulingIntegration"
)
DescribeHoursOfOperation = Action("DescribeHoursOfOperation")
DescribeInstance = Action("DescribeInstance")
DescribeInstanceAttribute = Action("DescribeInstanceAttribute")
DescribeInstanceStorageConfig = Action("DescribeInstanceStorageConfig")
DescribePhoneNumber = Action("DescribePhoneNumber")
DescribeQueue = Action("DescribeQueue")
DescribeQuickConnect = Action("DescribeQuickConnect")
DescribeRoutingProfile = Action("DescribeRoutingProfile")
DescribeSecurityProfile = Action("DescribeSecurityProfile")
DescribeTrafficDistributionGroup = Action("DescribeTrafficDistributionGroup")
DescribeUser = Action("DescribeUser")
DescribeUserHierarchyGroup = Action("DescribeUserHierarchyGroup")
DescribeUserHierarchyStructure = Action("DescribeUserHierarchyStructure")
DescribeVocabulary = Action("DescribeVocabulary")
DestroyInstance = Action("DestroyInstance")
DisassociateApprovedOrigin = Action("DisassociateApprovedOrigin")
DisassociateBot = Action("DisassociateBot")
DisassociateCustomerProfilesDomain = Action("DisassociateCustomerProfilesDomain")
DisassociateInstanceStorageConfig = Action("DisassociateInstanceStorageConfig")
DisassociateLambdaFunction = Action("DisassociateLambdaFunction")
DisassociateLexBot = Action("DisassociateLexBot")
DisassociatePhoneNumberContactFlow = Action("DisassociatePhoneNumberContactFlow")
DisassociateQueueQuickConnects = Action("DisassociateQueueQuickConnects")
DisassociateRoutingProfileQueues = Action("DisassociateRoutingProfileQueues")
DisassociateSecurityKey = Action("DisassociateSecurityKey")
DismissUserContact = Action("DismissUserContact")
GetContactAttributes = Action("GetContactAttributes")
GetCurrentMetricData = Action("GetCurrentMetricData")
GetCurrentUserData = Action("GetCurrentUserData")
GetFederationToken = Action("GetFederationToken")
GetFederationTokens = Action("GetFederationTokens")
GetMetricData = Action("GetMetricData")
GetTaskTemplate = Action("GetTaskTemplate")
GetTrafficDistribution = Action("GetTrafficDistribution")
ListAgentStatuses = Action("ListAgentStatuses")
ListApprovedOrigins = Action("ListApprovedOrigins")
ListBots = Action("ListBots")
ListContactFlowModules = Action("ListContactFlowModules")
ListContactFlows = Action("ListContactFlows")
ListContactReferences = Action("ListContactReferences")
ListDefaultVocabularies = Action("ListDefaultVocabularies")
ListHoursOfOperations = Action("ListHoursOfOperations")
ListInstanceAttributes = Action("ListInstanceAttributes")
ListInstanceStorageConfigs = Action("ListInstanceStorageConfigs")
ListInstances = Action("ListInstances")
ListIntegrationAssociations = Action("ListIntegrationAssociations")
ListLambdaFunctions = Action("ListLambdaFunctions")
ListLexBots = Action("ListLexBots")
ListPhoneNumbers = Action("ListPhoneNumbers")
ListPhoneNumbersV2 = Action("ListPhoneNumbersV2")
ListPrompts = Action("ListPrompts")
ListQueueQuickConnects = Action("ListQueueQuickConnects")
ListQueues = Action("ListQueues")
ListQuickConnects = Action("ListQuickConnects")
ListRealtimeContactAnalysisSegments = Action("ListRealtimeContactAnalysisSegments")
ListRoutingProfileQueues = Action("ListRoutingProfileQueues")
ListRoutingProfiles = Action("ListRoutingProfiles")
ListSecurityKeys = Action("ListSecurityKeys")
ListSecurityProfilePermissions = Action("ListSecurityProfilePermissions")
ListSecurityProfiles = Action("ListSecurityProfiles")
ListTagsForResource = Action("ListTagsForResource")
ListTaskTemplates = Action("ListTaskTemplates")
ListTrafficDistributionGroups = Action("ListTrafficDistributionGroups")
ListUseCases = Action("ListUseCases")
ListUserHierarchyGroups = Action("ListUserHierarchyGroups")
ListUsers = Action("ListUsers")
ModifyInstance = Action("ModifyInstance")
PutUserStatus = Action("PutUserStatus")
ReleasePhoneNumber = Action("ReleasePhoneNumber")
ReplicateInstance = Action("ReplicateInstance")
ResumeContactRecording = Action("ResumeContactRecording")
SearchAvailablePhoneNumbers = Action("SearchAvailablePhoneNumbers")
SearchQueues = Action("SearchQueues")
SearchRoutingProfiles = Action("SearchRoutingProfiles")
SearchSecurityProfiles = Action("SearchSecurityProfiles")
SearchUsers = Action("SearchUsers")
SearchVocabularies = Action("SearchVocabularies")
StartChatContact = Action("StartChatContact")
StartContactRecording = Action("StartContactRecording")
StartContactStreaming = Action("StartContactStreaming")
StartForecastingPlanningSchedulingIntegration = Action(
"StartForecastingPlanningSchedulingIntegration"
)
StartOutboundVoiceContact = Action("StartOutboundVoiceContact")
StartTaskContact = Action("StartTaskContact")
StopContact = Action("StopContact")
StopContactRecording = Action("StopContactRecording")
StopContactStreaming = Action("StopContactStreaming")
StopForecastingPlanningSchedulingIntegration = Action(
"StopForecastingPlanningSchedulingIntegration"
)
SuspendContactRecording = Action("SuspendContactRecording")
TagResource = Action("TagResource")
TransferContact = Action("TransferContact")
UntagResource = Action("UntagResource")
UpdateAgentStatus = Action("UpdateAgentStatus")
UpdateContact = Action("UpdateContact")
UpdateContactAttributes = Action("UpdateContactAttributes")
UpdateContactFlowContent = Action("UpdateContactFlowContent")
UpdateContactFlowMetadata = Action("UpdateContactFlowMetadata")
UpdateContactFlowModuleContent = Action("UpdateContactFlowModuleContent")
UpdateContactFlowModuleMetadata = Action("UpdateContactFlowModuleMetadata")
UpdateContactFlowName = Action("UpdateContactFlowName")
UpdateContactSchedule = Action("UpdateContactSchedule")
UpdateHoursOfOperation = Action("UpdateHoursOfOperation")
UpdateInstanceAttribute = Action("UpdateInstanceAttribute")
UpdateInstanceStorageConfig = Action("UpdateInstanceStorageConfig")
UpdatePhoneNumber = Action("UpdatePhoneNumber")
UpdateQueueHoursOfOperation = Action("UpdateQueueHoursOfOperation")
UpdateQueueMaxContacts = Action("UpdateQueueMaxContacts")
UpdateQueueName = Action("UpdateQueueName")
UpdateQueueOutboundCallerConfig = Action("UpdateQueueOutboundCallerConfig")
UpdateQueueStatus = Action("UpdateQueueStatus")
UpdateQuickConnectConfig = Action("UpdateQuickConnectConfig")
UpdateQuickConnectName = Action("UpdateQuickConnectName")
UpdateRoutingProfileConcurrency = Action("UpdateRoutingProfileConcurrency")
UpdateRoutingProfileDefaultOutboundQueue = Action(
"UpdateRoutingProfileDefaultOutboundQueue"
)
UpdateRoutingProfileName = Action("UpdateRoutingProfileName")
UpdateRoutingProfileQueues = Action("UpdateRoutingProfileQueues")
UpdateSecurityProfile = Action("UpdateSecurityProfile")
UpdateTaskTemplate = Action("UpdateTaskTemplate")
UpdateTrafficDistribution = Action("UpdateTrafficDistribution")
UpdateUserHierarchy = Action("UpdateUserHierarchy")
UpdateUserHierarchyGroupName = Action("UpdateUserHierarchyGroupName")
UpdateUserHierarchyStructure = Action("UpdateUserHierarchyStructure")
UpdateUserIdentityInfo = Action("UpdateUserIdentityInfo")
UpdateUserPhoneConfig = Action("UpdateUserPhoneConfig")
UpdateUserRoutingProfile = Action("UpdateUserRoutingProfile")
UpdateUserSecurityProfiles = Action("UpdateUserSecurityProfiles")
UpdatedescribeContent = Action("UpdatedescribeContent")
|
{
"content_hash": "8627c8be190853d76755b94ee2633ed2",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 88,
"avg_line_length": 51.55721393034826,
"alnum_prop": 0.8458940461256393,
"repo_name": "cloudtools/awacs",
"id": "53fab37fbfcc58bc196075ea1e5bcffd9f8705c4",
"size": "10479",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "awacs/connect.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "343"
},
{
"name": "Python",
"bytes": "963483"
}
],
"symlink_target": ""
}
|
from __future__ import division, unicode_literals
"""
This is essentially just a clone of the MPRester object in pymatgen with
slight modifications to work with MaterialsWeb.
This module provides classes to interface with the MaterialsWeb REST
API v2 to enable the creation of data structures and pymatgen objects using
MaterialsWeb data.
"""
import json
import warnings
from monty.json import MontyDecoder
from pymatgen.core.structure import Structure
__author__ = "Michael Ashton"
__copyright__ = "Copyright 2017, Henniggroup"
__maintainer__ = "Joshua J. Gabriel"
__email__ = "joshgabriel92@gmail.com"
__status__ = "Production"
__date__ = "March 3, 2017"
class MWRester(object):
"""
A class to conveniently interface with the MaterialsWeb REST
interface. The recommended way to use MWRester is with the "with" context
manager to ensure that sessions are properly closed after usage::
with MWRester("API_KEY") as m:
do_something
MWRester uses the "requests" package, which provides for HTTP connection
pooling. All connections are made via https for security.
Args:
api_key (str): A String API key for accessing the MaterialsWeb
REST interface. Please obtain your API key at
https://www.materialsweb.org.
endpoint (str): Url of endpoint to access the MaterialsWeb REST
interface. Defaults to the standard MaterialsWeb REST
address, but can be changed to other urls implementing a similar
interface.
"""
supported_properties = ("energy", "energy_per_atom", "volume",
"formation_energy_per_atom", "nsites",
"unit_cell_formula", "pretty_formula",
"is_hubbard", "elements", "nelements",
"e_above_hull", "hubbards", "is_compatible",
"spacegroup", "task_ids", "band_gap", "density",
"icsd_id", "icsd_ids", "cif", "total_magnetization",
"material_id", "oxide_type", "tags", "elasticity")
supported_task_properties = ("energy", "energy_per_atom", "volume",
"formation_energy_per_atom", "nsites",
"unit_cell_formula", "pretty_formula",
"is_hubbard",
"elements", "nelements", "e_above_hull",
"hubbards",
"is_compatible", "spacegroup",
"band_gap", "density", "icsd_id", "cif")
def __init__(self, api_key=None,
endpoint="https://2dmaterialsweb.org/rest"):
if api_key is not None:
self.api_key = api_key
else:
self.api_key = ""
self.preamble = endpoint
import requests
self.session = requests.Session()
self.session.headers = {"x-api-key": self.api_key}
def __enter__(self):
"""
Support for "with" context.
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Support for "with" context.
"""
self.session.close()
def _make_request(self, sub_url, payload=None, method="GET",
mp_decode=True):
response = None
url = self.preamble + sub_url + "/" + self.api_key
try:
if method == "POST":
response = self.session.post(url, data=payload, verify=True)
else:
response = self.session.get(url, params=payload, verify=True)
if response.status_code in [200, 400]:
if mp_decode:
data = json.loads(response.text, cls=MontyDecoder)
else:
data = json.loads(response.text)
if data["valid_response"]:
if data.get("warning"):
warnings.warn(data["warning"])
return data["response"]
else:
raise MWRestError(data["error"])
raise MWRestError("REST query returned with error status code {}"
.format(response.status_code))
except Exception as ex:
msg = "{}. Content: {}".format(str(ex), response.content)\
if hasattr(response, "content") else str(ex)
raise MWRestError(msg)
def get_data(self, query, prop=""):
"""
Flexible method to get any data using the MaterialsWeb REST
interface. Generally used by other methods for more specific queries.
Format of REST return is *always* a list of dict (regardless of the
number of pieces of data returned. The general format is as follows:
[{"material_id": material_id, "property_name" : value}, ...]
Args:
query (str): A chemical system (e.g., Li-Fe-O),
or formula (e.g., Fe2O3) or materials_id (e.g., mp-1234).
prop (str): Property to be obtained. Should be one of the
MWRester.supported_task_properties. Leave as empty string for a
general list of useful properties.
"""
sub_url = "/materials/%s" % (query)
if prop:
sub_url += "/" + prop
return self._make_request(sub_url)
def get_entry_by_material_id(self, material_id):
"""
"""
data = self.get_data(material_id)
return data
def get_structure_by_material_id(self, material_id, final=True):
"""
Get a Structure corresponding to a material_id.
Args:
material_id (str): MaterialsWeb material_id (a string,
e.g., mp-1234).
final (bool): Whether to get the final structure, or the initial
(pre-relaxation) structure. Defaults to True.
Returns:
Structure object.
"""
prop = "final_structure" if final else "initial_structure"
data = self.get_data(material_id)
return Structure.from_str(data[0][prop], fmt="json")
def get_all_structures(self, final=True):
"""
Download all MaterialsWeb structures as pymatgen Structure
objects.
Args:
final (bool): Whether to get the final structures, or the initial
(pre-relaxation) structures. Defaults to True.
Returns:
List of Structure objects.
"""
prop = "final_structure" if final else "initial_structure"
data = self.get_data("all")
structures = [Structure.from_str(data[i][prop], fmt="json") for i in
range(len(data))]
return structures
def get_all_material_ids(self):
"""
Return a list of all active material ID's.
Returns:
material ID's (list).
"""
data = self.get_data(query="all", prop="ids")
return data
def get_maximum_material_id(self):
"""
Return the highest material ID currently in the database.
Useful for indexing new materials to add.
Returns:
material ID (str): e.g. mw-745.
"""
data = self.get_data(query="all", prop="ids")
return data[-1]
class MWRestError(Exception):
"""
Exception class for MWRestAdaptor.
Raised when the query has problems, e.g., bad query format.
"""
pass
|
{
"content_hash": "24dfd3653ef21940786f68d3a1921108",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 80,
"avg_line_length": 36.6,
"alnum_prop": 0.5568439290950287,
"repo_name": "joshgabriel/MPInterfaces",
"id": "2c599b3d4bb757ebbb85ba2a62a2331595135a73",
"size": "7503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mpinterfaces/rest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "514956"
},
{
"name": "Shell",
"bytes": "3252"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
from functools import partial
from django.http import Http404
from waffle import switch_is_active, flag_is_active, sample_is_active
class BaseWaffleMixin:
def validate_waffle(self, waffle, func):
if waffle.startswith('!'):
active = not func(waffle[1:])
else:
active = func(waffle)
return active
def invalid_waffle(self):
raise Http404('Inactive waffle')
class WaffleFlagMixin(BaseWaffleMixin):
"""
Checks that as flag is active, or 404. Operates like the FBV decorator
waffle_flag
"""
waffle_flag: str | None = None
def dispatch(self, request, *args, **kwargs):
func = partial(flag_is_active, request)
active = self.validate_waffle(self.waffle_flag, func)
if not active:
return self.invalid_waffle()
return super().dispatch(request, *args, **kwargs)
class WaffleSampleMixin(BaseWaffleMixin):
"""
Checks that as switch is active, or 404. Operates like the FBV decorator
waffle_sample.
"""
waffle_sample: str | None = None
def dispatch(self, request, *args, **kwargs):
active = self.validate_waffle(self.waffle_sample, sample_is_active)
if not active:
return self.invalid_waffle()
return super().dispatch(request, *args, **kwargs)
class WaffleSwitchMixin(BaseWaffleMixin):
"""
Checks that as switch is active, or 404. Operates like the FBV decorator
waffle_switch.
"""
waffle_switch: str | None = None
def dispatch(self, request, *args, **kwargs):
active = self.validate_waffle(self.waffle_switch, switch_is_active)
if not active:
return self.invalid_waffle()
return super().dispatch(request, *args, **kwargs)
|
{
"content_hash": "5319d8947a43388e42ae08073a70b8f9",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 76,
"avg_line_length": 25.27777777777778,
"alnum_prop": 0.6434065934065935,
"repo_name": "rsalmaso/django-waffle",
"id": "1896ca3e762025d98642c86585f12d73bcdb9112",
"size": "1820",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "waffle/mixins.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "929"
},
{
"name": "JavaScript",
"bytes": "1250"
},
{
"name": "Python",
"bytes": "171291"
},
{
"name": "Shell",
"bytes": "1130"
}
],
"symlink_target": ""
}
|
"""
workplane
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from OpenGL import GL
from mcedit2.rendering.scenegraph import scenenode
from mcedit2.rendering.scenegraph.matrix import TranslateNode
from mcedit2.rendering.scenegraph.vertex_array import VertexNode
from mcedit2.rendering.vertexarraybuffer import VertexArrayBuffer
log = logging.getLogger(__name__)
class WorkplaneNode(scenenode.Node):
def __init__(self):
super(WorkplaneNode, self).__init__()
self.translateNode = TranslateNode()
self.addChild(self.translateNode)
self.axis = 1
vertexNode = None
_axis = 1
@property
def axis(self):
return self._axis
@axis.setter
def axis(self, axis):
self._axis = axis
self.dirty = True
gridSize = 64
left = -gridSize//2
right = gridSize//2
gridArrayBuffer = VertexArrayBuffer((gridSize * 4,),
GL.GL_LINES, textures=False, lights=False)
gridArrayBuffer.rgba[:] = 255, 255, 255, 100
# y=0, move by translating
gridArrayBuffer.vertex[:, axis] = 0
axis1 = (axis-1) % 3
axis2 = (axis+1) % 3
# left edge
gridArrayBuffer.vertex[0:gridSize*2:2, axis2] = left
gridArrayBuffer.vertex[0:gridSize*2:2, axis1] = range(left, right)
# right edge
gridArrayBuffer.vertex[1:gridSize*2:2, axis2] = right-1
gridArrayBuffer.vertex[1:gridSize*2:2, axis1] = range(left, right)
# bottom edge
gridArrayBuffer.vertex[gridSize*2::2, axis1] = left
gridArrayBuffer.vertex[gridSize*2::2, axis2] = range(left, right)
# top edge
gridArrayBuffer.vertex[gridSize*2+1::2, axis1] = right-1
gridArrayBuffer.vertex[gridSize*2+1::2, axis2] = range(left, right)
if self.vertexNode:
self.translateNode.removeChild(self.vertexNode)
self.vertexNode = VertexNode([gridArrayBuffer])
self.translateNode.addChild(self.vertexNode)
@property
def position(self):
return self.translateNode.translateOffset
@position.setter
def position(self, value):
self.translateNode.translateOffset = value
|
{
"content_hash": "3dcadff7a415a0f77c3bfd2a490ae55c",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 86,
"avg_line_length": 28.22222222222222,
"alnum_prop": 0.6408573928258967,
"repo_name": "vorburger/mcedit2",
"id": "886cd08e38a57b1c642009401611a813495ffd69",
"size": "2286",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mcedit2/rendering/workplane.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "8578"
},
{
"name": "Makefile",
"bytes": "156"
},
{
"name": "Python",
"bytes": "1639144"
}
],
"symlink_target": ""
}
|
"""Tests for swift.obj.server"""
import six.moves.cPickle as pickle
import datetime
import json
import errno
import operator
import os
import mock
import six
from six import StringIO
import unittest
import math
import random
from shutil import rmtree
from time import gmtime, strftime, time, struct_time
from tempfile import mkdtemp
from hashlib import md5
from collections import defaultdict
from contextlib import contextmanager
from textwrap import dedent
from eventlet import sleep, spawn, wsgi, Timeout, tpool, greenthread
from eventlet.green import httplib
from swift import __version__ as swift_version
from swift.common.http import is_success
from test import listen_zero
from test.unit import FakeLogger, debug_logger, mocked_http_conn, \
make_timestamp_iter, DEFAULT_TEST_EC_TYPE, skip_if_no_xattrs, \
connect_tcp, readuntil2crlfs, patch_policies, encode_frag_archive_bodies, \
mock_check_drive
from swift.obj import server as object_server
from swift.obj import updater
from swift.obj import diskfile
from swift.common import utils, bufferedhttp
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
NullLogger, storage_directory, public, replication, encode_timestamps, \
Timestamp
from swift.common import constraints
from swift.common.swob import Request, WsgiBytesIO
from swift.common.splice import splice
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
POLICIES, EC_POLICY)
from swift.common.exceptions import DiskFileDeviceUnavailable, \
DiskFileNoSpace, DiskFileQuarantined
from swift.common.wsgi import init_request_processor
def mock_time(*args, **kwargs):
return 5000.0
test_policies = [
StoragePolicy(0, name='zero', is_default=True),
ECStoragePolicy(1, name='one', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=4),
]
@contextmanager
def fake_spawn():
"""
Spawn and capture the result so we can later wait on it. This means we can
test code executing in a greenthread but still wait() on the result to
ensure that the method has completed.
"""
greenlets = []
def _inner_fake_spawn(func, *a, **kw):
gt = greenthread.spawn(func, *a, **kw)
greenlets.append(gt)
return gt
object_server.spawn = _inner_fake_spawn
with mock.patch('swift.obj.server.spawn', _inner_fake_spawn):
try:
yield
finally:
for gt in greenlets:
gt.wait()
class TestTpoolSize(unittest.TestCase):
def test_default_config(self):
with mock.patch('eventlet.tpool.set_num_threads') as mock_snt:
object_server.ObjectController({})
self.assertEqual([], mock_snt.mock_calls)
def test_explicit_setting(self):
conf = {'eventlet_tpool_num_threads': '17'}
with mock.patch('eventlet.tpool.set_num_threads') as mock_snt:
object_server.ObjectController(conf)
self.assertEqual([mock.call(17)], mock_snt.mock_calls)
def test_servers_per_port_no_explicit_setting(self):
conf = {'servers_per_port': '3'}
with mock.patch('eventlet.tpool.set_num_threads') as mock_snt:
object_server.ObjectController(conf)
self.assertEqual([mock.call(1)], mock_snt.mock_calls)
def test_servers_per_port_with_explicit_setting(self):
conf = {'eventlet_tpool_num_threads': '17',
'servers_per_port': '3'}
with mock.patch('eventlet.tpool.set_num_threads') as mock_snt:
object_server.ObjectController(conf)
self.assertEqual([mock.call(17)], mock_snt.mock_calls)
def test_servers_per_port_empty(self):
# run_wsgi is robust to this, so we should be too
conf = {'servers_per_port': ''}
with mock.patch('eventlet.tpool.set_num_threads') as mock_snt:
object_server.ObjectController(conf)
self.assertEqual([], mock_snt.mock_calls)
@patch_policies(test_policies)
class TestObjectController(unittest.TestCase):
"""Test swift.obj.server.ObjectController"""
def setUp(self):
"""Set up for testing swift.object.server.ObjectController"""
skip_if_no_xattrs()
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = 'startcap'
self.tmpdir = mkdtemp()
self.testdir = os.path.join(self.tmpdir,
'tmp_test_object_server_ObjectController')
mkdirs(os.path.join(self.testdir, 'sda1'))
self.conf = {'devices': self.testdir, 'mount_check': 'false',
'container_update_timeout': 0.0}
self.object_controller = object_server.ObjectController(
self.conf, logger=debug_logger())
self.object_controller.bytes_per_sync = 1
self._orig_tpool_exc = tpool.execute
tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
self.df_mgr = diskfile.DiskFileManager(self.conf,
self.object_controller.logger)
self.logger = debug_logger('test-object-controller')
self.ts = make_timestamp_iter()
self.ec_policies = [p for p in POLICIES if p.policy_type == EC_POLICY]
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
rmtree(self.tmpdir)
tpool.execute = self._orig_tpool_exc
def _stage_tmp_dir(self, policy):
mkdirs(os.path.join(self.testdir, 'sda1',
diskfile.get_tmp_dir(policy)))
def iter_policies(self):
for policy in POLICIES:
self.policy = policy
yield policy
def check_all_api_methods(self, obj_name='o', alt_res=None):
path = '/sda1/p/a/c/%s' % obj_name
body = 'SPECIAL_STRING'
op_table = {
"PUT": (body, alt_res or 201, ''), # create one
"GET": ('', alt_res or 200, body), # check it
"POST": ('', alt_res or 202, ''), # update it
"HEAD": ('', alt_res or 200, ''), # head it
"DELETE": ('', alt_res or 204, '') # delete it
}
for method in ["PUT", "GET", "POST", "HEAD", "DELETE"]:
in_body, res, out_body = op_table[method]
timestamp = normalize_timestamp(time())
req = Request.blank(
path, environ={'REQUEST_METHOD': method},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = in_body
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, res)
if out_body and (200 <= res < 300):
self.assertEqual(resp.body, out_body)
def test_REQUEST_SPECIAL_CHARS(self):
obj = 'special昆%20/%'
self.check_all_api_methods(obj)
def test_device_unavailable(self):
def raise_disk_unavail(*args, **kwargs):
raise DiskFileDeviceUnavailable()
self.object_controller.get_diskfile = raise_disk_unavail
self.check_all_api_methods(alt_res=507)
def test_allowed_headers(self):
dah = ['content-disposition', 'content-encoding', 'x-delete-at',
'x-object-manifest', 'x-static-large-object']
conf = {'devices': self.testdir, 'mount_check': 'false',
'allowed_headers': ','.join(['content-length'] + dah)}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.assertEqual(self.object_controller.allowed_headers, set(dah))
def test_POST_update_meta(self):
# Test swift.obj.server.ObjectController.POST
original_headers = self.object_controller.allowed_headers
test_headers = 'content-encoding foo bar'.split()
self.object_controller.allowed_headers = set(test_headers)
put_timestamp = normalize_timestamp(time())
headers = {'X-Timestamp': put_timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Sysmeta-Color': 'blue',
'X-Object-Transient-Sysmeta-Shape': 'circle',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
etag = '"%s"' % md5('VERIFY').hexdigest()
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'Etag': etag,
})
post_timestamp = normalize_timestamp(time())
headers = {'X-Timestamp': post_timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Foo': 'fooheader',
'Bar': 'barheader',
'Content-Type': 'application/x-test'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=headers)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'blue',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
expected_headers = {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'blue',
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Foo': 'fooheader',
'Bar': 'barheader',
'Content-Encoding': 'gzip',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
}
self.assertEqual(dict(resp.headers), expected_headers)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), expected_headers)
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'X-Object-Sysmeta-Color': 'red',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'blue',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'blue',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
# test defaults
self.object_controller.allowed_headers = original_headers
put_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-1': 'One',
'X-Object-Manifest': 'c/bar',
'Content-Encoding': 'gzip',
'Content-Disposition': 'bar',
'X-Static-Large-Object': 'True',
})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'Etag': etag,
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-1': 'One',
'Content-Encoding': 'gzip',
'X-Object-Manifest': 'c/bar',
'Content-Disposition': 'bar',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': put_timestamp,
'X-Timestamp': put_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(put_timestamp)))),
})
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'X-Object-Meta-3': 'Three',
'Foo': 'fooheader',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'red',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-3': 'Three',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
# Test for empty metadata
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-3': ''})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'red',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-3': '',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
def test_POST_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# Same timestamp should result in 409
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': orig_timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
# Earlier timestamp should result in 409
timestamp = normalize_timestamp(ts - 1)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-5': 'Five',
'X-Object-Meta-6': 'Six',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_POST_conflicts_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_post1 = next(self.ts).internal
t_post2 = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post2})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post1})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_post2 + '.meta')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post1 + '.meta')
self.assertFalse(os.path.isfile(meta_file))
def test_POST_not_exist(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/fail',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_POST_invalid_path(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_bad_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': 'bad',
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_container_connection(self):
# Test that POST does call container_update and returns success
# whether update to container server succeeds or fails
def mock_http_connect(calls, response, with_exc=False):
class FakeConn(object):
def __init__(self, calls, status, with_exc):
self.calls = calls
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
calls[0] += 1
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(calls, response, with_exc)
ts = time()
timestamp = normalize_timestamp(ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 1),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 202)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 2),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 202, with_exc=True)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 3),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new2'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 500)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def _test_POST_container_updates(self, policy, update_etag=None):
# Test that POST requests result in correct calls to container_update
t = [next(self.ts) for _ in range(0, 5)]
calls_made = []
update_etag = update_etag or '098f6bcd4621d373cade4e832627b4f6'
def mock_container_update(ctlr, op, account, container, obj, request,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
body = 'test'
headers = {
'X-Timestamp': t[1].internal,
'Content-Type': 'application/octet-stream;swift_bytes=123456789',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
# EC fragments will typically have a different size to the body and
# for small bodies the fragments may be longer. For this test all
# that matters is that the fragment and body lengths differ.
body = body + 'ec_overhead'
headers['X-Backend-Container-Update-Override-Etag'] = update_etag
headers['X-Backend-Container-Update-Override-Size'] = '4'
headers['X-Object-Sysmeta-Ec-Etag'] = update_etag
headers['X-Object-Sysmeta-Ec-Content-Length'] = '4'
headers['X-Object-Sysmeta-Ec-Frag-Index'] = 2
headers['Content-Length'] = str(len(body))
req = Request.blank('/sda1/p/a/c/o', body=body,
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'application/octet-stream;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with no metadata newer than the data should return 409,
# container update not expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[0].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
t[1].internal)
self.assertEqual(0, len(calls_made))
# POST with newer metadata returns success and container update
# is expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[3].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'application/octet-stream;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[1].internal,
'x-meta-timestamp': t[3].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with no metadata newer than existing metadata should return
# 409, container update not expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
t[3].internal)
self.assertEqual(0, len(calls_made))
# POST with newer content-type but older metadata returns success
# and container update is expected newer content-type should have
# existing swift_bytes appended
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[2].internal,
'Content-Type': 'text/plain',
'Content-Type-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/plain;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[2].internal,
'x-meta-timestamp': t[3].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with older content-type but newer metadata returns success
# and container update is expected
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[4].internal,
'Content-Type': 'older',
'Content-Type-Timestamp': t[1].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/plain;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[2].internal,
'x-meta-timestamp': t[4].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with same-time content-type and metadata returns 409
# and no container update is expected
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[4].internal,
'Content-Type': 'ignored',
'Content-Type-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(0, len(calls_made))
# POST with implicit newer content-type but older metadata
# returns success and container update is expected,
# update reports existing metadata timestamp
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[3].internal,
'Content-Type': 'text/newer',
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/newer;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[3].internal,
'x-meta-timestamp': t[4].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
def test_POST_container_updates_with_replication_policy(self):
self._test_POST_container_updates(POLICIES[0])
def test_POST_container_updates_with_EC_policy(self):
self._test_POST_container_updates(
POLICIES[1], update_etag='override_etag')
def test_POST_container_updates_precedence(self):
# Verify correct etag and size being sent with container updates for a
# PUT and for a subsequent POST.
def do_test(body, headers, policy):
def mock_container_update(ctlr, op, account, container, obj, req,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
calls_made = []
ts_put = next(self.ts)
# make PUT with given headers and verify correct etag is sent in
# container update
headers.update({
'Content-Type':
'application/octet-stream;swift_bytes=123456789',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 2,
'X-Timestamp': ts_put.internal,
'Content-Length': len(body)})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers, body=body)
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type':
'application/octet-stream;swift_bytes=123456789',
'x-timestamp': ts_put.internal,
'x-etag': 'expected'})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# make a POST and verify container update has the same etag
calls_made = []
ts_post = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': ts_post.internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers.update({
'x-content-type-timestamp': ts_put.internal,
'x-meta-timestamp': ts_post.internal})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# sanity check - EC headers are ok
headers = {
'X-Backend-Container-Update-Override-Etag': 'expected',
'X-Backend-Container-Update-Override-Size': '4',
'X-Object-Sysmeta-Ec-Etag': 'expected',
'X-Object-Sysmeta-Ec-Content-Length': '4'}
do_test('test ec frag longer than 4', headers, POLICIES[1])
# middleware overrides take precedence over EC/older overrides
headers = {
'X-Backend-Container-Update-Override-Etag': 'unexpected',
'X-Backend-Container-Update-Override-Size': '3',
'X-Object-Sysmeta-Ec-Etag': 'unexpected',
'X-Object-Sysmeta-Ec-Content-Length': '3',
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('test ec frag longer than 4', headers, POLICIES[1])
# overrides with replication policy
headers = {
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('longer than 4', headers, POLICIES[0])
# middleware overrides take precedence over EC/older overrides with
# replication policy
headers = {
'X-Backend-Container-Update-Override-Etag': 'unexpected',
'X-Backend-Container-Update-Override-Size': '3',
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('longer than 4', headers, POLICIES[0])
def _test_PUT_then_POST_async_pendings(self, policy, update_etag=None):
# Test that PUT and POST requests result in distinct async pending
# files when sync container update fails.
def fake_http_connect(*args):
raise Exception('test')
device_dir = os.path.join(self.testdir, 'sda1')
t_put = next(self.ts)
update_etag = update_etag or '098f6bcd4621d373cade4e832627b4f6'
put_headers = {
'X-Trans-Id': 'put_trans_id',
'X-Timestamp': t_put.internal,
'Content-Type': 'application/octet-stream;swift_bytes=123456789',
'Content-Length': '4',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice'}
if policy.policy_type == EC_POLICY:
put_headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Backend-Container-Update-Override-Etag': update_etag,
'X-Object-Sysmeta-Ec-Etag': update_etag})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=put_headers, body='test')
with mock.patch('swift.obj.server.http_connect', fake_http_connect), \
mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''), \
fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
async_pending_file_put = os.path.join(
device_dir, diskfile.get_async_dir(policy), 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' % t_put.internal)
self.assertTrue(os.path.isfile(async_pending_file_put),
'Expected %s to be a file but it is not.'
% async_pending_file_put)
expected_put_headers = {
'Referer': 'PUT http://localhost/sda1/p/a/c/o',
'X-Trans-Id': 'put_trans_id',
'X-Timestamp': t_put.internal,
'X-Content-Type': 'application/octet-stream;swift_bytes=123456789',
'X-Size': '4',
'X-Etag': '098f6bcd4621d373cade4e832627b4f6',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': '%d' % int(policy)}
if policy.policy_type == EC_POLICY:
expected_put_headers['X-Etag'] = update_etag
self.assertDictEqual(
pickle.load(open(async_pending_file_put)),
{'headers': expected_put_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# POST with newer metadata returns success and container update
# is expected
t_post = next(self.ts)
post_headers = {
'X-Trans-Id': 'post_trans_id',
'X-Timestamp': t_post.internal,
'Content-Type': 'application/other',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=post_headers)
with mock.patch('swift.obj.server.http_connect', fake_http_connect), \
mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''), \
fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.maxDiff = None
# check async pending file for PUT is still intact
self.assertDictEqual(
pickle.load(open(async_pending_file_put)),
{'headers': expected_put_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# check distinct async pending file for POST
async_pending_file_post = os.path.join(
device_dir, diskfile.get_async_dir(policy), 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' % t_post.internal)
self.assertTrue(os.path.isfile(async_pending_file_post),
'Expected %s to be a file but it is not.'
% async_pending_file_post)
expected_post_headers = {
'Referer': 'POST http://localhost/sda1/p/a/c/o',
'X-Trans-Id': 'post_trans_id',
'X-Timestamp': t_put.internal,
'X-Content-Type': 'application/other;swift_bytes=123456789',
'X-Size': '4',
'X-Etag': '098f6bcd4621d373cade4e832627b4f6',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': '%d' % int(policy),
'X-Meta-Timestamp': t_post.internal,
'X-Content-Type-Timestamp': t_post.internal,
}
if policy.policy_type == EC_POLICY:
expected_post_headers['X-Etag'] = update_etag
self.assertDictEqual(
pickle.load(open(async_pending_file_post)),
{'headers': expected_post_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# verify that only the POST (most recent) async update gets sent by the
# object updater, and that both update files are deleted
with mock.patch(
'swift.obj.updater.ObjectUpdater.object_update') as mock_update, \
mock.patch('swift.obj.updater.dump_recon_cache'):
object_updater = updater.ObjectUpdater(
{'devices': self.testdir,
'mount_check': 'false'}, logger=debug_logger())
node = {'id': 1}
mock_ring = mock.MagicMock()
mock_ring.get_nodes.return_value = (99, [node])
object_updater.container_ring = mock_ring
mock_update.return_value = ((True, 1))
object_updater.run_once()
self.assertEqual(1, mock_update.call_count)
self.assertEqual((node, 99, 'PUT', '/a/c/o'),
mock_update.call_args_list[0][0][0:4])
actual_headers = mock_update.call_args_list[0][0][4]
# User-Agent is updated.
expected_post_headers['User-Agent'] = 'object-updater %s' % os.getpid()
self.assertDictEqual(expected_post_headers, actual_headers)
self.assertFalse(
os.listdir(os.path.join(
device_dir, diskfile.get_async_dir(policy))))
def test_PUT_then_POST_async_pendings_with_repl_policy(self):
self._test_PUT_then_POST_async_pendings(POLICIES[0])
def test_PUT_then_POST_async_pendings_with_EC_policy(self):
self._test_PUT_then_POST_async_pendings(
POLICIES[1], update_etag='override_etag')
def test_POST_quarantine_zbyte(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
objfile.open()
file_name = os.path.basename(objfile._data_file)
with open(objfile._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(objfile._data_file)
with open(objfile._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(objfile._datadir)[0], file_name)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(objfile._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_PUT_invalid_path(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT',
'CONTENT_LENGTH': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_bad_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 'bad'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_invalid_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': '\xff\xff'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('Content-Type' in resp.body)
def test_PUT_no_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
del req.headers['Content-Length']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 411)
def test_PUT_zero_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = ''
self.assertEqual(req.headers['Content-Length'], '0')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_bad_transfer_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
req.headers['Transfer-Encoding'] = 'bad'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_if_none_match_star(self):
# First PUT should succeed
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# File should already exist so it should fail
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': next(self.ts).normal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_if_none_match(self):
# PUT with if-none-match set and nothing there should succeed
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': 'notthere'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# PUT with if-none-match of the object etag should fail
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '0b4c12d7e0a73840c1c4f148fda3b037'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_PUT_if_none_match_but_expired(self):
inital_put = next(self.ts)
put_before_expire = next(self.ts)
delete_at_timestamp = int(next(self.ts))
time_after_expire = next(self.ts)
put_after_expire = next(self.ts)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': inital_put.normal,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# PUT again before object has expired should fail
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_before_expire.normal,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'TEST'
with mock.patch("swift.obj.server.time.time",
lambda: float(put_before_expire.normal)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# PUT again after object has expired should succeed
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_after_expire.normal,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'TEST'
with mock.patch("swift.obj.server.time.time",
lambda: float(time_after_expire.normal)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_common(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'x-object-meta-test': 'one',
'Custom-Header': '*',
'X-Backend-Replication-Headers':
'Content-Type Content-Length'})
req.body = 'VERIFY'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
self.object_controller.allowed_headers = ['Custom-Header']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]),
'p', hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '6',
'ETag': '0b4c12d7e0a73840c1c4f148fda3b037',
'Content-Type': 'application/octet-stream',
'name': '/a/c/o',
'X-Object-Meta-Test': 'one',
'Custom-Header': '*'})
def test_PUT_overwrite(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_overwrite_to_older_ts_success(self):
old_timestamp = next(self.ts)
new_timestamp = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': old_timestamp.normal,
'Content-Length': '0',
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': new_timestamp.normal,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
new_timestamp.internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(
diskfile.read_metadata(objfile),
{'X-Timestamp': new_timestamp.internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_overwrite_to_newer_ts_failed(self):
old_timestamp = next(self.ts)
new_timestamp = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': new_timestamp.normal,
'Content-Length': '0',
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': old_timestamp.normal,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
with mock.patch(
'swift.obj.diskfile.BaseDiskFile.create') as mock_create:
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(mock_create.call_count, 0)
# data file doesn't exist there (This is sanity because
# if .data written unexpectedly, it will be removed
# by cleanup_ondisk_files)
datafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
old_timestamp.internal + '.data')
self.assertFalse(os.path.exists(datafile))
# ts file sitll exists
tsfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
new_timestamp.internal + '.ts')
self.assertTrue(os.path.isfile(tsfile))
def test_PUT_overwrite_w_delete_at(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': 9999999999,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(ts),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(ts - 1),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_PUT_new_object_really_old_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '-1', # 1969-12-31 23:59:59
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1', # 1970-01-01 00:00:01
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_object_really_new_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '9999999999', # 2286-11-20 17:46:40
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# roll over to 11 digits before the decimal
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '10000000000',
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_invalid_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain',
'ETag': 'invalid'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_user_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY THREE')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '12',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
def test_PUT_etag_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Etag': 'other-etag',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5("obj data").hexdigest()
footer_meta = json.dumps({"Etag": obj_etag})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.etag, obj_etag)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
with open(objfile) as fh:
self.assertEqual(fh.read(), "obj data")
def _check_container_override_etag_preference(self, override_headers,
override_footers):
def mock_container_update(ctlr, op, account, container, obj, req,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
calls_made = []
ts_put = next(self.ts)
headers = {
'X-Timestamp': ts_put.internal,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Etag': 'other-etag',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'}
headers.update(override_headers)
req = Request.blank(
'/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5("obj data").hexdigest()
footers = {'Etag': obj_etag}
footers.update(override_footers)
footer_meta = json.dumps(footers)
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.etag, obj_etag)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
self.assertEqual({
'X-Size': str(len('obj data')),
'X-Etag': 'update-etag',
'X-Content-Type': 'text/plain',
'X-Timestamp': ts_put.internal,
}, calls_made[0][0])
self.assertEqual(POLICIES[0], calls_made[0][1])
def test_override_etag_lone_header_footer(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Backend-Container-Update-Override-Etag': 'update-etag'})
self._check_container_override_etag_preference(
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}),
def test_override_etag_footer_trumps_header(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'ignored-etag'},
{'X-Backend-Container-Update-Override-Etag': 'update-etag'})
self._check_container_override_etag_preference(
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'ignored-etag'},
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'})
def test_override_etag_sysmeta_trumps_backend(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'ignored-etag',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Backend-Container-Update-Override-Etag': 'ignored-etag',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'})
def test_override_etag_sysmeta_header_trumps_backend_footer(self):
headers = {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}
footers = {'X-Backend-Container-Update-Override-Etag':
'ignored-etag'}
self._check_container_override_etag_preference(headers, footers)
def test_override_etag_sysmeta_footer_trumps_backend_header(self):
headers = {'X-Backend-Container-Update-Override-Etag':
'ignored-etag'}
footers = {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}
self._check_container_override_etag_preference(headers, footers)
def test_PUT_etag_in_footer_mismatch(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("green").hexdigest()})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"blue",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_meta_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Object-Meta-X': 'Z',
'X-Object-Sysmeta-X': 'Z',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({
'X-Object-Meta-X': 'Y',
'X-Object-Sysmeta-X': 'Y',
})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"stuff stuff stuff",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp},
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.headers.get('X-Object-Meta-X'), 'Y')
self.assertEqual(resp.headers.get('X-Object-Sysmeta-X'), 'Y')
def test_PUT_missing_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
# no Content-MD5
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_bad_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
bad_footer_meta_cksum = md5(footer_meta + "bad").hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + bad_footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_bad_footer_json(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = "{{{[[{{[{[[{[{[[{{{[{{{{[[{{[{["
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_extra_mime_docs_ignored(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({'X-Object-Meta-Mint': 'pepper'})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary",
"This-Document-Is-Useless: yes",
"",
"blah blah I take up space",
"--boundary--"
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# swob made this into a StringIO for us
wsgi_input = req.environ['wsgi.input']
self.assertEqual(wsgi_input.tell(), len(wsgi_input.getvalue()))
def test_PUT_user_metadata_no_xattr(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
def mock_get_and_setxattr(*args, **kargs):
error_num = errno.ENOTSUP if hasattr(errno, 'ENOTSUP') else \
errno.EOPNOTSUPP
raise IOError(error_num, 'Operation not supported')
with mock.patch('xattr.getxattr', mock_get_and_setxattr):
with mock.patch('xattr.setxattr', mock_get_and_setxattr):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_PUT_client_timeout(self):
class FakeTimeout(BaseException):
def __enter__(self):
raise self
def __exit__(self, typ, value, tb):
pass
# This is just so the test fails when run on older object server code
# instead of exploding.
if not hasattr(object_server, 'ChunkReadTimeout'):
object_server.ChunkReadTimeout = None
with mock.patch.object(object_server, 'ChunkReadTimeout', FakeTimeout):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '6'})
req.environ['wsgi.input'] = WsgiBytesIO(b'VERIFY')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_PUT_client_closed_connection(self):
class fake_input(object):
def read(self, *a, **kw):
# On client disconnect during a chunked transfer, eventlet
# may raise a ValueError (or ChunkReadError, following
# https://github.com/eventlet/eventlet/commit/c3ce3ee -- but
# that inherits from ValueError)
raise ValueError
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '6'})
req.environ['wsgi.input'] = fake_input()
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 499)
def test_PUT_system_metadata(self):
# check that sysmeta is stored in diskfile
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
def test_PUT_succeeds_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_put2 = next(self.ts).internal
t_post = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put2,
'Content-Length': 0,
'Content-Type': 'plain/text'},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_put2 + '.data')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_POST_system_metadata(self):
# check that diskfile sysmeta is not changed by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
# .meta file metadata should have only user meta items
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'X-Object-Meta-1': 'Not One'})
def test_POST_then_fetch_content_type(self):
# check that content_type is updated by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'Content-Type': 'text/html'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One'})
# .meta file metadata should have updated content-type
metafile_name = encode_timestamps(Timestamp(timestamp2),
Timestamp(timestamp2),
explicit=True)
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
metafile_name + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'Content-Type': 'text/html',
'Content-Type-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One'})
def check_response(resp):
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/html')
self.assertEqual(resp.headers['content-type'], 'text/html')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp2)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'Not One')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_POST_transient_sysmeta(self):
# check that diskfile transient system meta is changed by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertDictEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
# .meta file metadata should have only user meta items
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertDictEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'X-Object-Meta-1': 'Not One',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
def test_PUT_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def check_response(resp):
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
self.assertEqual(resp.headers['x-object-transient-sysmeta-foo'],
'Bar')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_then_POST_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-0': 'deleted by post',
'X-Object-Sysmeta-0': 'Zero',
'X-Object-Transient-Sysmeta-0': 'deleted by post',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def check_response(resp):
# user meta should be updated but not sysmeta
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp2)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'Not One')
self.assertEqual(resp.headers['x-object-sysmeta-0'], 'Zero')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
self.assertEqual(resp.headers['x-object-transient-sysmeta-foo'],
'Not Bar')
self.assertNotIn('x-object-meta-0', resp.headers)
self.assertNotIn('x-object-transient-sysmeta-0', resp.headers)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_with_replication_headers(self):
# check that otherwise disallowed headers are accepted when specified
# by X-Backend-Replication-Headers
# first PUT object
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
req.body = 'VERIFY SYSMETA'
# restrict set of allowed headers on this server
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
# X-Static-Large-Object is disallowed.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1'})
# PUT object again with X-Backend-Replication-Headers
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False',
'X-Backend-Replication-Headers':
'X-Static-Large-Object'})
req.body = 'VERIFY SYSMETA'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.data')
# X-Static-Large-Object should be copied since it is now allowed by
# replication headers.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
def test_PUT_container_connection(self):
def mock_http_connect(response, with_exc=False):
class FakeConn(object):
def __init__(self, status, with_exc):
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(response, with_exc)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect', mock_http_connect(201)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect', mock_http_connect(500)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect',
mock_http_connect(500, with_exc=True)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_EC_GET_PUT_data(self):
for policy in self.ec_policies:
raw_data = ('VERIFY' * policy.ec_segment_size)[:-432]
frag_archives = encode_frag_archive_bodies(policy, raw_data)
frag_index = random.randint(0, len(frag_archives) - 1)
# put EC frag archive
req = Request.blank('/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/verify',
'Content-Length': len(frag_archives[frag_index]),
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy),
})
req.body = frag_archives[frag_index]
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# get EC frag archive
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, frag_archives[frag_index])
def test_EC_GET_quarantine_invalid_frag_archive(self):
policy = random.choice(self.ec_policies)
raw_data = ('VERIFY' * policy.ec_segment_size)[:-432]
frag_archives = encode_frag_archive_bodies(policy, raw_data)
frag_index = random.randint(0, len(frag_archives) - 1)
content_length = len(frag_archives[frag_index])
# put EC frag archive
req = Request.blank('/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/verify',
'Content-Length': content_length,
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy),
})
corrupt = 'garbage' + frag_archives[frag_index]
req.body = corrupt[:content_length]
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# get EC frag archive
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
with self.assertRaises(DiskFileQuarantined) as ctx:
resp.body
self.assertIn("Invalid EC metadata", str(ctx.exception))
# nothing is logged on *our* loggers
errors = self.object_controller.logger.get_lines_for_level('error')
self.assertEqual(errors, [])
# get EC frag archive - it's gone
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_PUT_ssync_multi_frag(self):
timestamp = utils.Timestamp.now().internal
def put_with_index(expected_rsp, frag_index, node_index=None):
data_file_tail = '#%d#d.data' % frag_index
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Ssync-Frag-Index': node_index,
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy)}
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(
resp.status_int, expected_rsp,
'got %s != %s for frag_index=%s node_index=%s' % (
resp.status_int, expected_rsp,
frag_index, node_index))
if expected_rsp == 409:
return
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
for policy in POLICIES:
if policy.policy_type == EC_POLICY:
# upload with a ec-frag-index
put_with_index(201, 3)
# same timestamp will conflict a different ec-frag-index
put_with_index(409, 2)
# but with the ssync-frag-index (primary node) it will just
# save both!
put_with_index(201, 2, 2)
# but even with the ssync-frag-index we can still get a
# timestamp collisison if the file already exists
put_with_index(409, 3, 3)
# FWIW, ssync will never send in-consistent indexes - but if
# something else did, from the object server perspective ...
# ... the ssync-frag-index is canonical on the
# read/pre-existance check
put_with_index(409, 7, 2)
# ... but the ec-frag-index is canonical when it comes to on
# disk file
put_with_index(201, 7, 6)
def test_PUT_commits_data(self):
for policy in POLICIES:
timestamp = utils.Timestamp(int(time())).internal
data_file_tail = '.data'
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
# commit renames data file
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
data_file_tail = '#2#d.data'
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
rmtree(obj_dir)
def test_HEAD(self):
# Test swift.obj.server.ObjectController.HEAD
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_HEAD_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
file_name = os.path.basename(disk_file._data_file)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_OPTIONS(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
server_handler = object_server.ObjectController(
conf, logger=debug_logger())
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = server_handler.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE \
SSYNC'.split():
self.assertTrue(
verb in resp.headers['Allow'].split(', '))
self.assertEqual(len(resp.headers['Allow'].split(', ')), 8)
self.assertEqual(resp.headers['Server'],
(server_handler.server_type + '/' + swift_version))
def test_insufficient_storage_mount_check_true(self):
conf = {'devices': self.testdir, 'mount_check': 'true'}
object_controller = object_server.ObjectController(conf)
for policy in POLICIES:
mgr = object_controller._diskfile_router[policy]
self.assertTrue(mgr.mount_check)
for method in object_controller.allowed_methods:
if method in ('OPTIONS', 'SSYNC'):
continue
path = '/sda1/p/'
if method == 'REPLICATE':
path += 'suff'
else:
path += 'a/c/o'
req = Request.blank(path, method=method,
headers={'x-timestamp': '1',
'content-type': 'app/test',
'content-length': 0})
with mock_check_drive() as mocks:
try:
resp = req.get_response(object_controller)
self.assertEqual(resp.status_int, 507)
mocks['ismount'].return_value = True
resp = req.get_response(object_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method in ('PUT', 'REPLICATE') else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_insufficient_storage_mount_check_false(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
object_controller = object_server.ObjectController(conf)
for policy in POLICIES:
mgr = object_controller._diskfile_router[policy]
self.assertFalse(mgr.mount_check)
for method in object_controller.allowed_methods:
if method in ('OPTIONS', 'SSYNC'):
continue
path = '/sda1/p/'
if method == 'REPLICATE':
path += 'suff'
else:
path += 'a/c/o'
req = Request.blank(path, method=method,
headers={'x-timestamp': '1',
'content-type': 'app/test',
'content-length': 0})
with mock_check_drive() as mocks:
try:
resp = req.get_response(object_controller)
self.assertEqual(resp.status_int, 507)
mocks['isdir'].return_value = True
resp = req.get_response(object_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method in ('PUT', 'REPLICATE') else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_GET(self):
# Test swift.obj.server.ObjectController.GET
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'VERIFY')
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-length'], '6')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-3'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERI')
self.assertEqual(resp.headers['content-length'], '3')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERIFY')
self.assertEqual(resp.headers['content-length'], '5')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=-2'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'FY')
self.assertEqual(resp.headers['content-length'], '2')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application:octet-stream',
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_GET_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertIn(
'"HEAD /sda1/p/a/c/o" 412 - ',
self.object_controller.logger.get_lines_for_level('info')[-1])
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertIn(
'"GET /sda1/p/a/c/o" 412 - ',
self.object_controller.logger.get_lines_for_level('info')[-1])
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_match_etag_is_at(self):
headers = {
'X-Timestamp': utils.Timestamp.now().internal,
'Content-Type': 'application/octet-stream',
'X-Object-Meta-Xtag': 'madeup',
'X-Object-Sysmeta-Xtag': 'alternate madeup',
}
req = Request.blank('/sda1/p/a/c/o', method='PUT',
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
real_etag = resp.etag
# match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, using first in list of alternates
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At':
'X-Object-Meta-Xtag,X-Object-Sysmeta-Z'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, using second in list of alternates
alts = 'X-Object-Sysmeta-Y,X-Object-Meta-Xtag,X-Object-Sysmeta-Z'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, choosing first of multiple alternates
alts = 'X-Object-Sysmeta-Y,X-Object-Meta-Xtag,X-Object-Sysmeta-Xtag'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, choosing first of multiple alternates
# (switches order of second two alternates from previous assertion)
alts = 'X-Object-Sysmeta-Y,X-Object-Sysmeta-Xtag,X-Object-Meta-Xtag'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'alternate madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# no match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# etag-is-at metadata doesn't exist, default to real etag
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Missing'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity no-match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# sanity match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity with no if-match
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'X-Object-Meta-Soup': 'gazpacho',
'Content-Type': 'application/fizzbuzz',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
self.assertEqual(resp.headers['Content-Type'], 'application/fizzbuzz')
self.assertEqual(resp.headers['X-Object-Meta-Soup'], 'gazpacho')
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_HEAD_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_GET_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_HEAD_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = self.object_controller.GET(req)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_GET_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'X-Object-Meta-Burr': 'ito',
'Content-Type': 'application/cat-picture',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertEqual(resp.headers['Content-Type'],
'application/cat-picture')
self.assertEqual(resp.headers['X-Object-Meta-Burr'], 'ito')
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def assertECBodyEqual(self, resp, expected):
# we pull the policy index from the request environ since it seems to
# be missing from the response headers
policy_index = int(
resp.request.headers['X-Backend-Storage-Policy-Index'])
policy = POLICIES[policy_index]
frags = encode_frag_archive_bodies(policy, expected)
frag_index = int(resp.headers['X-Object-Sysmeta-Ec-Frag-Index'])
self.assertEqual(resp.body, frags[frag_index])
def _create_ondisk_fragments(self, policy):
# Create some on disk files...
ts_iter = make_timestamp_iter()
# PUT at ts_0
ts_0 = next(ts_iter)
body = 'OLDER'
headers = {'X-Timestamp': ts_0.internal,
'Content-Length': '5',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
body = encode_frag_archive_bodies(policy, body)[0]
headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '0',
'Content-Length': len(body),
})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = body
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# POST at ts_1
ts_1 = next(ts_iter)
headers = {'X-Timestamp': ts_1.internal,
'X-Backend-Storage-Policy-Index': int(policy)}
headers['X-Object-Meta-Test'] = 'abc'
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=headers)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# PUT again at ts_2 but without making the data file durable
ts_2 = next(ts_iter)
body = 'NEWER'
headers = {'X-Timestamp': ts_2.internal,
'Content-Length': '5',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
body = encode_frag_archive_bodies(policy, body)[2]
headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'Content-Length': len(body),
})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = body
# patch the commit method to do nothing so EC object is non-durable
with mock.patch('swift.obj.diskfile.ECDiskFileWriter.commit'):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
return ts_0, ts_1, ts_2
def test_GET_HEAD_with_fragment_preferences(self):
for policy in POLICIES:
ts_0, ts_1, ts_2 = self._create_ondisk_fragments(policy)
backend_frags = {ts_0.internal: [0], ts_2.internal: [2]}
def _assert_frag_0_at_ts_0(resp):
expect = {
'X-Timestamp': ts_1.normal,
'X-Backend-Timestamp': ts_1.internal,
'X-Backend-Data-Timestamp': ts_0.internal,
'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '0',
'X-Object-Meta-Test': 'abc'}
self.assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments']))
def _assert_repl_data_at_ts_2():
self.assertIn(resp.status_int, (200, 202))
expect = {
'X-Timestamp': ts_2.normal,
'X-Backend-Timestamp': ts_2.internal,
'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_2.internal}
self.assertDictContainsSubset(expect, resp.headers)
self.assertNotIn('X-Object-Meta-Test', resp.headers)
# Sanity check: Request with no preferences should default to the
# durable frag
headers = {'X-Backend-Storage-Policy-Index': int(policy)}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
self.assertECBodyEqual(resp, 'OLDER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preferences can select the older frag
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [1, 3]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
self.assertECBodyEqual(resp, 'OLDER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
else:
_assert_repl_data_at_ts_2()
def _assert_frag_2_at_ts_2(resp):
self.assertIn(resp.status_int, (200, 202))
# do not expect meta file to be included since it is older
expect = {
'X-Timestamp': ts_2.normal,
'X-Backend-Timestamp': ts_2.internal,
'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '2'}
self.assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments']))
self.assertNotIn('X-Object-Meta-Test', resp.headers)
# Request with preferences can select the newer non-durable frag
prefs = json.dumps(
[{'timestamp': ts_2.internal, 'exclude': [1, 3]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preference for ts_0 but excludes index 0 will
# default to newest frag
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [0]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preferences that exclude all frags get nothing
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [0]},
{'timestamp': ts_2.internal, 'exclude': [2]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
self.assertEqual(resp.status_int, 404)
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
self.assertEqual(resp.status_int, 404)
else:
_assert_repl_data_at_ts_2()
# Request with empty preferences will get non-durable
prefs = json.dumps([])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
def test_GET_quarantine(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
body = resp.body # actually does quarantining
self.assertEqual(body, 'VERIFY')
self.assertEqual(os.listdir(quar_dir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_GET_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_GET_quarantine_range(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-4' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=1-6' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-14' # full
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
resp.body
self.assertTrue(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
@mock.patch("time.time", mock_time)
def test_DELETE(self):
# Test swift.obj.server.ObjectController.DELETE
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
# The following should have created a tombstone file
timestamp = normalize_timestamp(1000)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_1000_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1000_file))
# There should now be a 1000 ts file.
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(999)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_999_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_999_file))
self.assertTrue(os.path.isfile(ts_1000_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
orig_timestamp = utils.Timestamp(1002).internal
headers = {'X-Timestamp': orig_timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# There should now be 1000 ts and a 1001 data file.
data_1002_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
orig_timestamp + '.data')
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(data_1002_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(1001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
ts_1001_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_1001_file))
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1001_file))), 1)
timestamp = normalize_timestamp(1003)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
ts_1003_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1003_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1003_file))), 1)
def test_DELETE_bad_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 'bad'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_succeeds_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_delete = next(self.ts).internal
t_post = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_delete + '.ts')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_DELETE_container_updates(self):
# Test swift.obj.server.ObjectController.DELETE and container
# updates, making sure container update is called in the correct
# state.
start = time()
orig_timestamp = utils.Timestamp(start)
headers = {'X-Timestamp': orig_timestamp.internal,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
calls_made = [0]
def our_container_update(*args, **kwargs):
calls_made[0] += 1
orig_cu = self.object_controller.container_update
self.object_controller.container_update = our_container_update
try:
# The following request should return 409 (HTTP Conflict). A
# tombstone file should not have been created with this timestamp.
timestamp = utils.Timestamp(start - 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
orig_timestamp.internal)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
self.assertEqual(0, calls_made[0])
# The following request should return 204, and the object should
# be truly deleted (container update is performed) because this
# timestamp is newer. A tombstone file should have been created
# with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(1, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, but it should have also performed a
# container update because the timestamp is newer, and a tombstone
# file should also exist with this timestamp.
timestamp = utils.Timestamp(start + 0.00002)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, and it should not have performed a
# container update because the timestamp is older, or created a
# tombstone file with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
finally:
self.object_controller.container_update = orig_cu
def test_DELETE_full_drive(self):
def mock_diskfile_delete(self, timestamp):
raise DiskFileNoSpace()
t_put = utils.Timestamp.now()
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put.internal,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
with mock.patch('swift.obj.diskfile.BaseDiskFile.delete',
mock_diskfile_delete):
t_delete = utils.Timestamp.now()
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_object_update_with_offset(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
# create a new object
create_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test1',
headers={'X-Timestamp': create_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/plain'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test1'),
'X-Etag': md5('test1').hexdigest(),
'X-Content-Type': 'text/plain',
'X-Timestamp': create_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back object
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(create_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
create_timestamp)
self.assertEqual(resp.body, 'test1')
# send an update with an offset
offset_timestamp = utils.Timestamp(
create_timestamp, offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test2',
headers={'X-Timestamp': offset_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/html'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test2'),
'X-Etag': md5('test2').hexdigest(),
'X-Content-Type': 'text/html',
'X-Timestamp': offset_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back new offset
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(offset_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
offset_timestamp)
self.assertEqual(resp.body, 'test2')
# now overwrite with a newer time
overwrite_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test3',
headers={'X-Timestamp': overwrite_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/enriched'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test3'),
'X-Etag': md5('test3').hexdigest(),
'X-Content-Type': 'text/enriched',
'X-Timestamp': overwrite_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back overwrite
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(overwrite_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
overwrite_timestamp)
self.assertEqual(resp.body, 'test3')
# delete with an offset
offset_delete = utils.Timestamp(overwrite_timestamp,
offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': offset_delete,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 204)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': offset_delete,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back offset delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertIsNone(resp.headers['X-Timestamp'])
self.assertEqual(resp.headers['X-Backend-Timestamp'], offset_delete)
# and one more delete with a newer timestamp
delete_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': delete_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 404)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': delete_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertIsNone(resp.headers['X-Timestamp'])
self.assertEqual(resp.headers['X-Backend-Timestamp'], delete_timestamp)
def test_call_bad_request(self):
# Test swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '400 ')
def test_call_not_found(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '404 ')
def test_call_bad_method(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'INVALID',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_call_name_collision(self):
def my_check(*args):
return False
def my_hash_path(*args):
return md5('collide').hexdigest()
with mock.patch("swift.obj.diskfile.hash_path", my_hash_path):
with mock.patch("swift.obj.server.check_object_creation",
my_check):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.2),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '201 ')
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/b/d/x',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.3),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '403 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_chunked_put(self):
listener = listen_zero()
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_chunked_content_length_mismatch_zero(self):
listener = listen_zero()
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Content-Length: 0\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_max_object_name_length(self):
timestamp = normalize_timestamp(time())
max_name_len = constraints.MAX_OBJECT_NAME_LENGTH
req = Request.blank(
'/sda1/p/a/c/' + ('1' * max_name_len),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/' + ('2' * (max_name_len + 1)),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_max_upload_time(self):
class SlowBody(object):
def __init__(self):
self.sent = 0
def read(self, size=-1):
if self.sent < 4:
sleep(0.1)
self.sent += 1
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.object_controller.max_upload_time = 0.1
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_short_body(self):
class ShortBody(object):
def __init__(self):
self.sent = False
def read(self, size=-1):
if not self.sent:
self.sent = True
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': ShortBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 499)
def test_bad_sinces(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
too_big_date_list = list(datetime.datetime.max.timetuple())
too_big_date_list[0] += 1 # bump up the year
too_big_date = strftime(
"%a, %d %b %Y %H:%M:%S UTC", struct_time(too_big_date_list))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': too_big_date})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_content_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
def test_async_update_http_connect(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
given_args = []
def fake_http_connect(*args):
given_args.extend(args)
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
self.assertEqual(
given_args,
['127.0.0.1', '1234', 'sdc1', 1, 'PUT', '/a/c/o', {
'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)}])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(37, 'fantastico')])
def test_updating_multiple_delete_at_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
policy = random.choice(list(POLICIES))
self.object_controller.expiring_objects_account = 'exp'
self.object_controller.expiring_objects_container_divisor = 60
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1',
'X-Delete-At': 9999999999,
'X-Delete-At-Container': '9999999960',
'X-Delete-At-Host': "10.1.1.1:6201,10.2.2.2:6202",
'X-Delete-At-Partition': '6237',
'X-Delete-At-Device': 'sdp,sdq'})
with mock.patch.object(
object_server, 'http_connect', fake_http_connect):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 3)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '10.1.1.1',
'port': '6201',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdp',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[2],
{'ipaddr': '10.2.2.2',
'port': '6202',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdq',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(26, 'twice-thirteen')])
def test_updating_multiple_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': '26',
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5, 6.7.8.9:10',
'X-Container-Device': 'sdb1, sdf1'})
with mock.patch.object(
object_server, 'http_connect', fake_http_connect):
with fake_spawn():
req.get_response(self.object_controller)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 2)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '6.7.8.9',
'port': '10',
'path': '/a/c/o',
'device': 'sdf1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
def test_object_delete_at_async_update(self):
policy = random.choice(list(POLICIES))
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
# put everything in the future; otherwise setting X-Delete-At may fail
self.ts = make_timestamp_iter(10)
put_timestamp = next(self.ts).internal
delete_at_timestamp = utils.normalize_delete_at_timestamp(
next(self.ts).normal)
delete_at_container = (
int(delete_at_timestamp) /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'X-Container-Host': '10.0.0.1:6201',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': 'p',
'X-Delete-At-Host': '10.0.0.2:6202',
'X-Delete-At-Device': 'sda1',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o', method='PUT', body='', headers=headers)
with mocked_http_conn(
500, 500, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(201, resp.status_int, resp.body)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(2, len(container_updates))
delete_at_update, container_update = container_updates
# delete_at_update
ip, port, method, path, headers = delete_at_update
self.assertEqual(ip, '10.0.0.2')
self.assertEqual(port, '6202')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/.expiring_objects/%s/%s-a/c/o' %
(delete_at_container, delete_at_timestamp))
expected = {
'X-Timestamp': put_timestamp,
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# container_update
ip, port, method, path, headers = container_update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '6201')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': put_timestamp,
'X-Backend-Storage-Policy-Index': int(policy),
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# check async pendings
async_dir = os.path.join(self.testdir, 'sda1',
diskfile.get_async_dir(policy))
found_files = []
for root, dirs, files in os.walk(async_dir):
for f in files:
async_file = os.path.join(root, f)
found_files.append(async_file)
data = pickle.load(open(async_file))
if data['account'] == 'a':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), int(policy))
elif data['account'] == '.expiring_objects':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), 0)
else:
self.fail('unexpected async pending data')
self.assertEqual(2, len(found_files))
def test_async_update_saves_on_exception(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(*args):
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
def test_async_update_saves_on_non_2xx(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (199, 300, 503):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status),
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': str(status),
'user-agent':
'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index':
int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o',
'op': 'PUT'})
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_does_not_save_on_2xx(self):
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1', 0)
self.assertFalse(
os.path.exists(os.path.join(
self.testdir, 'sda1', 'async_pending', 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000')))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_saves_on_timeout(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect():
class FakeConn(object):
def getresponse(self):
return sleep(1)
return lambda *args: FakeConn()
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect()
self.object_controller.node_timeout = 0.001
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertTrue(
os.path.exists(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal)))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_container_update_no_async_update(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
def test_container_update_success(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain'}, body='')
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp(1).internal,
'X-Backend-Storage-Policy-Index': '0', # default when not given
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o'}))
def test_PUT_container_update_overrides(self):
def do_test(override_headers):
container_updates = []
def capture_updates(
ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
ts_put = next(self.ts)
headers = {
'X-Timestamp': ts_put.internal,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
}
headers.update(override_headers)
req = Request.blank('/sda1/0/a/c/o', method='PUT',
headers=headers, body='')
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'override_etag',
'x-content-type': 'override_val',
'x-timestamp': ts_put.internal,
'X-Backend-Storage-Policy-Index': '0', # default
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o',
'x-foo': 'bar'}))
# EC policy override headers
do_test({
'X-Backend-Container-Update-Override-Etag': 'override_etag',
'X-Backend-Container-Update-Override-Content-Type': 'override_val',
'X-Backend-Container-Update-Override-Foo': 'bar',
'X-Backend-Container-Ignored': 'ignored'})
# middleware override headers
do_test({
'X-Object-Sysmeta-Container-Update-Override-Etag': 'override_etag',
'X-Object-Sysmeta-Container-Update-Override-Content-Type':
'override_val',
'X-Object-Sysmeta-Container-Update-Override-Foo': 'bar',
'X-Object-Sysmeta-Ignored': 'ignored'})
# middleware override headers take precedence over EC policy headers
do_test({
'X-Object-Sysmeta-Container-Update-Override-Etag': 'override_etag',
'X-Object-Sysmeta-Container-Update-Override-Content-Type':
'override_val',
'X-Object-Sysmeta-Container-Update-Override-Foo': 'bar',
'X-Backend-Container-Update-Override-Etag': 'ignored',
'X-Backend-Container-Update-Override-Content-Type': 'ignored',
'X-Backend-Container-Update-Override-Foo': 'ignored'})
def test_container_update_async(self):
policy = random.choice(list(POLICIES))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
'X-Object-Sysmeta-Ec-Frag-Index': 0,
'X-Backend-Storage-Policy-Index': int(policy)}, body='')
given_args = []
def fake_pickle_async_update(*args):
given_args[:] = args
diskfile_mgr = self.object_controller._diskfile_router[policy]
diskfile_mgr.pickle_async_update = fake_pickle_async_update
with mocked_http_conn(500) as fake_conn, fake_spawn():
resp = req.get_response(self.object_controller)
# fake_spawn() above waits on greenthreads to finish;
# don't start making assertions until then
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(given_args), 7)
(objdevice, account, container, obj, data, timestamp,
policy) = given_args
self.assertEqual(objdevice, 'sda1')
self.assertEqual(account, 'a')
self.assertEqual(container, 'c')
self.assertEqual(obj, 'o')
self.assertEqual(timestamp, utils.Timestamp(1).internal)
self.assertEqual(policy, policy)
self.assertEqual(data, {
'headers': HeaderKeyDict({
'X-Size': '0',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Content-Type': 'text/plain',
'X-Timestamp': utils.Timestamp(1).internal,
'X-Trans-Id': '123',
'Referer': 'PUT http://localhost/sda1/0/a/c/o',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}),
'obj': 'o',
'account': 'a',
'container': 'c',
'op': 'PUT'})
def test_container_update_as_greenthread(self):
greenthreads = []
saved_spawn_calls = []
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
saved_spawn_calls.append((func, a, kw))
return mock.MagicMock()
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'async_update',
local_fake_async_update):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that async_update hasn't been called
self.assertFalse(len(called_async_update_args))
# now do the work in greenthreads
for func, a, kw in saved_spawn_calls:
gt = spawn(func, *a, **kw)
greenthreads.append(gt)
# wait for the greenthreads to finish
for gt in greenthreads:
gt.wait()
# check that the calls to async_update have happened
headers_out = {'X-Size': '0',
'X-Content-Type': 'application/burrito',
'X-Timestamp': '0000012345.00000',
'X-Trans-Id': '-',
'Referer': 'PUT http://localhost/sda1/p/a/c/o',
'X-Backend-Storage-Policy-Index': '0',
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}
expected = [('PUT', 'a', 'c', 'o', '1.2.3.4:5', '20', 'sdb1',
headers_out, 'sda1', POLICIES[0]),
{'logger_thread_locals': (None, None)}]
self.assertEqual(called_async_update_args, [expected])
def test_container_update_as_greenthread_with_timeout(self):
'''
give it one container to update (for only one greenthred)
fake the greenthred so it will raise a timeout
test that the right message is logged and the method returns None
'''
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
m = mock.MagicMock()
def wait_with_error():
raise Timeout()
m.wait = wait_with_error # because raise can't be in a lambda
return m
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'container_update_timeout',
1.414213562):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that the timeout was logged
expected_logged_error = "Container update timeout (1.4142s) " \
"waiting for [('1.2.3.4:5', 'sdb1')]"
self.assertTrue(
expected_logged_error in
self.object_controller.logger.get_lines_for_level('debug'))
def test_container_update_bad_args(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost,badhost',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
errors = self.object_controller.logger.get_lines_for_level('error')
self.assertEqual(len(errors), 1)
msg = errors[0]
self.assertTrue('Container update failed' in msg)
self.assertTrue('different numbers of hosts and devices' in msg)
self.assertTrue('chost,badhost' in msg)
self.assertTrue('cdevice' in msg)
def test_delete_at_update_on_put(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.delete_at_update(
'DELETE', 2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '123',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_negative(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test negative is reset to 0
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234', 'X-Backend-Storage-Policy-Index':
int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '0000000000', '0000000000-a/c/o',
None, None, None,
HeaderKeyDict({
# the expiring objects account is always 0
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_cap(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test past cap is reset to cap
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', 12345678901, 'a', 'c', 'o', req, 'sda1', policy)
expiring_obj_container = given_args.pop(2)
expected_exp_cont = utils.get_expirer_container(
utils.normalize_delete_at_timestamp(12345678901),
86400, 'a', 'c', 'o')
self.assertEqual(expiring_obj_container, expected_exp_cont)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '9999999999-a/c/o',
None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info(self):
# Keep next test,
# test_delete_at_update_put_with_info_but_missing_container, in sync
# with this one but just missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Container': '0',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'PUT', '.expiring_objects', '0000000000', '0000000002-a/c/o',
'127.0.0.1:1234',
'3', 'sdc1', HeaderKeyDict({
# the .expiring_objects account is always policy-0
'X-Backend-Storage-Policy-Index': 0,
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info_but_missing_container(self):
# Same as previous test, test_delete_at_update_put_with_info, but just
# missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
self.object_controller.logger = self.logger
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
self.logger.get_lines_for_level('warning'),
['X-Delete-At-Container header must be specified for expiring '
'objects background PUT to work properly. Making best guess as '
'to the container name for now.'])
def test_delete_at_update_delete(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('DELETE', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None,
None, HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'DELETE http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_backend_replication(self):
# If X-Backend-Replication: True delete_at_update should completely
# short-circuit.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Replication': 'True',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [])
def test_POST_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 2})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_PUT_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 4})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_GET_but_expired(self):
now = time()
test_time = now + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
delete_at_timestamp = int(now + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
# fix server time to now: delete-at is in future, verify GET is ok
with mock.patch('swift.obj.server.time.time', return_value=now):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# fix server time to now + 2: delete-at is in past, verify GET fails...
with mock.patch('swift.obj.server.time.time', return_value=now + 2):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(now + 2)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
# ...unless X-Backend-Replication is sent
expected = {
'GET': 'TEST',
'HEAD': '',
}
for meth, expected_body in expected.items():
req = Request.blank(
'/sda1/p/a/c/o', method=meth,
headers={'X-Timestamp': normalize_timestamp(now + 2),
'X-Backend-Replication': 'True'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(expected_body, resp.body)
def test_HEAD_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# fix server time to now: delete-at is in future, verify GET is ok
now = time()
with mock.patch('swift.obj.server.time.time', return_value=now):
delete_at_timestamp = int(now + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
with mock.patch('swift.obj.server.time.time', return_value=now + 2):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(now + 2)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
def test_POST_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1500)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
delete_at_timestamp = int(time() + 2)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = time() + 3
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = test_time + 100
object_server.time.time = lambda: float(t)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_if_delete_at_expired_still_deletes(self):
test_time = time() + 10
test_timestamp = normalize_timestamp(test_time)
delete_at_time = int(test_time + 10)
delete_at_timestamp = str(delete_at_time)
delete_at_container = str(
delete_at_time /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': test_timestamp,
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# sanity
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'TEST')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(test_timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
# move time past expirery
with mock.patch('swift.obj.diskfile.time') as mock_time:
mock_time.time.return_value = test_time + 100
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
# request will 404
self.assertEqual(resp.status_int, 404)
# but file still exists
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with some wrong bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': int(time() + 1)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits (again)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at for some not found
req = Request.blank(
'/sda1/p/a/c/o-not-found',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_DELETE_if_delete_at(self):
test_time = time() + 10000
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 99),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 98)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 97),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 94),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': 'abc'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_calls_delete_at(self):
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = int(time() + 1000)
delete_at_container1 = str(
delete_at_timestamp1 /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': str(delete_at_timestamp1),
'X-Delete-At-Container': delete_at_container1})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(given_args, [
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
def test_PUT_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': str(int(time() - 1)),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_POST_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time() + 1),
'X-Delete-At': str(int(time() - 1))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_REPLICATE_works(self):
def fake_get_hashes(*args, **kwargs):
return 0, {1: 2}
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
p_data = pickle.loads(resp.body)
self.assertEqual(p_data, {1: 2})
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_timeout(self):
def fake_get_hashes(*args, **kwargs):
raise Timeout()
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
self.assertRaises(Timeout, self.object_controller.REPLICATE, req)
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_reclaims_tombstones(self):
conf = {'devices': self.testdir, 'mount_check': False,
'reclaim_age': 100}
self.object_controller = object_server.ObjectController(
conf, logger=self.logger)
for policy in self.iter_policies():
# create a tombstone
ts = next(self.ts)
delete_request = Request.blank(
'/sda1/0/a/c/o', method='DELETE',
headers={
'x-backend-storage-policy-index': int(policy),
'x-timestamp': ts.internal,
})
resp = delete_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = self.df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
policy=policy)
tombstone_file = os.path.join(objfile._datadir,
'%s.ts' % ts.internal)
self.assertTrue(os.path.exists(tombstone_file))
# REPLICATE will hash it
req = Request.blank(
'/sda1/0', method='REPLICATE',
headers={
'x-backend-storage-policy-index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
suffix = pickle.loads(resp.body).keys()[0]
self.assertEqual(suffix, os.path.basename(
os.path.dirname(objfile._datadir)))
# tombstone still exists
self.assertTrue(os.path.exists(tombstone_file))
# after reclaim REPLICATE will rehash
replicate_request = Request.blank(
'/sda1/0/%s' % suffix, method='REPLICATE',
headers={
'x-backend-storage-policy-index': int(policy),
})
the_future = time() + 200
with mock.patch('swift.obj.diskfile.time.time') as mock_time:
mock_time.return_value = the_future
resp = replicate_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual({}, pickle.loads(resp.body))
# and tombstone is reaped!
self.assertFalse(os.path.exists(tombstone_file))
# N.B. with a small reclaim age like this - if proxy clocks get far
# enough out of whack ...
with mock.patch('swift.obj.diskfile.time.time') as mock_time:
mock_time.return_value = the_future
resp = delete_request.get_response(self.object_controller)
# we won't even create the tombstone
self.assertFalse(os.path.exists(tombstone_file))
# hashdir sticks around tho
self.assertTrue(os.path.exists(objfile._datadir))
# REPLICATE will clean it all up
resp = replicate_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual({}, pickle.loads(resp.body))
self.assertFalse(os.path.exists(objfile._datadir))
def test_SSYNC_can_be_called(self):
req = Request.blank('/sda1/0',
environ={'REQUEST_METHOD': 'SSYNC'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_PUT_with_full_drive(self):
class IgnoredBody(object):
def __init__(self):
self.read_called = False
def read(self, size=-1):
if not self.read_called:
self.read_called = True
return 'VERIFY'
return ''
def fake_fallocate(fd, size):
raise OSError(errno.ENOSPC, os.strerror(errno.ENOSPC))
orig_fallocate = diskfile.fallocate
try:
diskfile.fallocate = fake_fallocate
timestamp = normalize_timestamp(time())
body_reader = IgnoredBody()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': body_reader},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'Expect': '100-continue'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
self.assertFalse(body_reader.read_called)
finally:
diskfile.fallocate = orig_fallocate
def test_global_conf_callback_does_nothing(self):
preloaded_app_conf = {}
global_conf = {}
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {})
self.assertEqual(global_conf.keys(), ['replication_semaphore'])
try:
value = global_conf['replication_semaphore'][0].get_value()
except NotImplementedError:
# On some operating systems (at a minimum, OS X) it's not possible
# to introspect the value of a semaphore
raise unittest.SkipTest
else:
self.assertEqual(value, 4)
def test_global_conf_callback_replication_semaphore(self):
preloaded_app_conf = {'replication_concurrency': 123}
global_conf = {}
with mock.patch.object(
object_server.multiprocessing, 'BoundedSemaphore',
return_value='test1') as mocked_Semaphore:
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {'replication_concurrency': 123})
self.assertEqual(global_conf, {'replication_semaphore': ['test1']})
mocked_Semaphore.assert_called_once_with(123)
def test_handling_of_replication_semaphore_config(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
objsrv = object_server.ObjectController(conf)
self.assertTrue(objsrv.replication_semaphore is None)
conf['replication_semaphore'] = ['sema']
objsrv = object_server.ObjectController(conf)
self.assertEqual(objsrv.replication_semaphore, 'sema')
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertEqual(
object_server.ObjectController(conf).replication_server, None)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(
object_server.ObjectController(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(
object_server.ObjectController(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE', 'SSYNC']
for method_name in obj_methods:
method = getattr(self.object_controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.object_controller, method_name)
self.assertEqual(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.app_factory(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x:
mock.MagicMock(return_value=method_res))
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}, logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['<html><h1>Method Not Allowed</h1><p>The method is not '
'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.object_controller, method,
new=mock_method):
mock_method.replication = True
with mock.patch('time.gmtime',
mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('os.getpid',
mock.MagicMock(return_value=1234)):
response = self.object_controller.__call__(
env, start_response)
self.assertEqual(response, answer)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['None - - [01/Jan/1970:02:46:41 +0000] "PUT'
' /sda1/p/a/c/o" 405 91 "-" "-" "-" 1.0000 "-"'
' 1234 -'])
def test_call_incorrect_replication_method(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'}, logger=FakeLogger())
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST', 'OPTIONS']
for method in obj_methods:
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
self.object_controller(env, start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_not_utf8_and_not_logging_requests(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=FakeLogger())
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/\x00%20/%',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['Invalid UTF8 or contains NULL']
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller.__call__(env, start_response)
self.assertEqual(response, answer)
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test__call__returns_500(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.logger = debug_logger('test')
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
@public
def mock_put_method(*args, **kwargs):
raise Exception()
with mock.patch.object(self.object_controller, method,
new=mock_put_method):
response = self.object_controller.__call__(env, start_response)
self.assertTrue(response[0].startswith(
'Traceback (most recent call last):'))
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR __call__ error with %(method)s %(path)s : ' % {
'method': 'PUT', 'path': '/sda1/p/a/c/o'},
])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test_PUT_slow(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false',
'slow': '10'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('swift.obj.server.sleep',
mock.MagicMock()) as ms:
self.object_controller.__call__(env, start_response)
ms.assert_called_with(9)
self.assertEqual(self.logger.get_lines_for_level('info'),
[])
def test_log_line_format(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
self.object_controller.logger = self.logger
with mock.patch(
'time.gmtime', mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch(
'time.time',
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0])):
with mock.patch(
'os.getpid', mock.MagicMock(return_value=1234)):
req.get_response(self.object_controller)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['1.2.3.4 - - [01/Jan/1970:02:46:41 +0000] "HEAD /sda1/p/a/c/o" '
'404 - "-" "-" "-" 2.0000 "-" 1234 -'])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False)])
def test_dynamic_datadir(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Backend-Storage-Policy-Index': 1,
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-1"
self.assertFalse(os.path.isdir(object_dir))
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
# make sure no idx in header uses policy 0 data_dir
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects"
self.assertFalse(os.path.isdir(object_dir))
with mock.patch.object(POLICIES, 'get_by_index',
lambda _: True):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
def test_storage_policy_index_is_validated(self):
# sanity check that index for existing policy is ok
methods = ('PUT', 'POST', 'GET', 'HEAD', 'REPLICATE', 'DELETE')
valid_indices = sorted([int(policy) for policy in POLICIES])
for index in valid_indices:
object_dir = self.testdir + "/sda1/objects"
if index > 0:
object_dir = "%s-%s" % (object_dir, index)
self.assertFalse(os.path.isdir(object_dir))
for method in methods:
headers = {
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index}
if POLICIES[index].policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertTrue(is_success(resp.status_int),
'%s method failed: %r' % (method, resp.status))
# index for non-existent policy should return 503
index = valid_indices[-1] + 1
for method in methods:
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-%s" % index
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 503)
self.assertFalse(os.path.isdir(object_dir))
def test_race_doesnt_quarantine(self):
existing_timestamp = normalize_timestamp(time())
delete_timestamp = normalize_timestamp(time() + 1)
put_timestamp = normalize_timestamp(time() + 2)
# make a .ts
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': existing_timestamp})
req.get_response(self.object_controller)
# force a PUT between the listdir and read_metadata of a DELETE
put_once = [False]
orig_listdir = os.listdir
def mock_listdir(path):
listing = orig_listdir(path)
if not put_once[0]:
put_once[0] = True
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'Content-Length': '9',
'Content-Type': 'application/octet-stream'})
req.body = 'some data'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
return listing
with mock.patch('os.listdir', mock_listdir):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
qdir = os.path.join(self.testdir, 'sda1', 'quarantined')
self.assertFalse(os.path.exists(qdir))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'], put_timestamp)
def test_multiphase_put_draining(self):
# We want to ensure that we read the whole response body even if
# it's multipart MIME and there's document parts that we don't
# expect or understand. This'll help save our bacon if we ever jam
# more stuff in there.
in_a_timeout = [False]
# inherit from BaseException so we get a stack trace when the test
# fails instead of just a 500
class NotInATimeout(BaseException):
pass
class FakeTimeout(BaseException):
def __enter__(self):
in_a_timeout[0] = True
def __exit__(self, typ, value, tb):
in_a_timeout[0] = False
class PickyWsgiBytesIO(WsgiBytesIO):
def read(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.read(self, *a, **kw)
def readline(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.readline(self, *a, **kw)
test_data = 'obj data'
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "7",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--"
))
if six.PY3:
test_doc = test_doc.encode('utf-8')
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
put_timestamp = utils.Timestamp.now().internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
wsgi_input = PickyWsgiBytesIO(test_doc)
req = Request.blank(
"/sda1/0/a/c/o",
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': wsgi_input},
headers=headers)
app = object_server.ObjectController(self.conf, logger=self.logger)
with mock.patch('swift.obj.server.ChunkReadTimeout', FakeTimeout):
resp = req.get_response(app)
self.assertEqual(resp.status_int, 201) # sanity check
in_a_timeout[0] = True # so we can check without an exception
self.assertEqual(wsgi_input.read(), '') # we read all the bytes
@patch_policies(test_policies)
class TestObjectServer(unittest.TestCase):
def setUp(self):
skip_if_no_xattrs()
# dirs
self.tmpdir = mkdtemp()
self.tempdir = os.path.join(self.tmpdir, 'tmp_test_obj_server')
self.devices = os.path.join(self.tempdir, 'srv/node')
for device in ('sda1', 'sdb1'):
os.makedirs(os.path.join(self.devices, device))
self.conf = {
'devices': self.devices,
'swift_dir': self.tempdir,
'mount_check': 'false',
}
self.logger = debug_logger('test-object-server')
self.app = object_server.ObjectController(
self.conf, logger=self.logger)
sock = listen_zero()
self.server = spawn(wsgi.server, sock, self.app, utils.NullLogger())
self.port = sock.getsockname()[1]
def tearDown(self):
rmtree(self.tmpdir)
def test_not_found(self):
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'GET', '/a/c/o')
resp = conn.getresponse()
self.assertEqual(resp.status, 404)
resp.read()
resp.close()
def test_expect_on_put(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp.now().internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
def test_expect_on_put_footer(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp.now().internal,
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Metadata-Footer'], 'yes')
resp.close()
def test_expect_on_put_conflict(self):
test_body = 'test'
put_timestamp = utils.Timestamp.now()
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': put_timestamp.internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# and again with same timestamp
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 409)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Backend-Timestamp'], put_timestamp)
resp.read()
resp.close()
def test_multiphase_put_no_mime_boundary(self):
test_data = 'obj data'
put_timestamp = utils.Timestamp.now().internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 400)
resp.read()
resp.close()
def test_expect_on_multiphase_put_diconnect(self):
put_timestamp = utils.Timestamp.now().internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': 0,
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Multiphase-Commit'], 'yes')
conn.send('c\r\n--boundary123\r\n')
# disconnect client
conn.sock.fd._sock.close()
for i in range(2):
sleep(0)
self.assertFalse(self.logger.get_lines_for_level('error'))
for line in self.logger.get_lines_for_level('info'):
self.assertIn(' 499 ', line)
def find_files(self):
ignore_files = {'.lock', 'hashes.invalid'}
found_files = defaultdict(list)
for root, dirs, files in os.walk(self.devices):
for filename in files:
if filename in ignore_files:
continue
_name, ext = os.path.splitext(filename)
file_path = os.path.join(root, filename)
found_files[ext].append(file_path)
return found_files
@contextmanager
def _check_multiphase_put_commit_handling(self,
test_doc=None,
headers=None,
finish_body=True):
"""
This helper will setup a multiphase chunked PUT request and yield at
the context at the commit phase (after getting the second expect-100
continue response.
It can setup a reasonable stub request, but you can over-ride some
characteristics of the request via kwargs.
:param test_doc: first part of the mime conversation before the object
server will send the 100-continue, this includes the
object body
:param headers: headers to send along with the initial request; some
object-metadata (e.g. X-Backend-Obj-Content-Length)
is generally expected to match the test_doc)
:param finish_body: boolean, if true send "0\r\n\r\n" after test_doc
and wait for 100-continue before yielding context
"""
test_data = encode_frag_archive_bodies(POLICIES[1], 'obj data')[0]
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = test_doc or "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
))
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
headers = headers or {
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
put_timestamp = utils.Timestamp(headers.setdefault(
'X-Timestamp', utils.Timestamp.now().internal))
container_update = \
'swift.obj.server.ObjectController.container_update'
with mock.patch(container_update) as _container_update:
conn = bufferedhttp.http_connect(
'127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
expect_headers = HeaderKeyDict(resp.getheaders())
to_send = "%x\r\n%s\r\n" % (len(test_doc), test_doc)
conn.send(to_send)
if finish_body:
conn.send("0\r\n\r\n")
# verify 100-continue response to mark end of phase1
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
# yield relevant context for test
yield {
'conn': conn,
'expect_headers': expect_headers,
'put_timestamp': put_timestamp,
'mock_container_update': _container_update,
}
# give the object server a little time to trampoline enough to
# recognize request has finished, or socket has closed or whatever
sleep(0.1)
def test_multiphase_put_client_disconnect_right_before_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# just bail stright out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data file write
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_client_disconnect_in_the_middle_of_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# start commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
# but don't quite the commit body
to_send = "%x\r\n%s" % \
(len(commit_confirmation_doc), commit_confirmation_doc[:-1])
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data file write
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_no_metadata_replicated(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
put_timestamp = utils.Timestamp.now().internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer(self):
with self._check_multiphase_put_commit_handling() as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
self.assertEqual(expect_headers['X-Obj-Metadata-Footer'], 'yes')
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
with open(obj_datafile) as fd:
actual_meta = diskfile.read_metadata(fd)
expected_meta = {'Content-Length': '82',
'name': '/a/c/o',
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Timestamp': put_timestamp.normal,
'Content-Type': 'text/plain'}
for k, v in actual_meta.items():
self.assertIsInstance(k, six.binary_type)
self.assertIsInstance(v, six.binary_type)
self.assertIsNotNone(actual_meta.pop('ETag', None))
self.assertEqual(expected_meta, actual_meta)
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer_disconnect(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, finish_body=False) as context:
conn = context['conn']
# make footer doc
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
# send most of the footer doc
footer_doc = "\r\n".join((
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
))
# but don't send final boundary nor last chunk
to_send = "%x\r\n%s\r\n" % \
(len(footer_doc), footer_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# no artifacts left on disk
found_files = self.find_files()
self.assertFalse(found_files)
# ... and no container update
_container_update = context['mock_container_update']
self.assertFalse(_container_update.called)
def test_multiphase_put_ec_fragment_in_headers_no_footers(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# phase1 - PUT request with multiphase commit conversation
# no object metadata in footer
put_timestamp = utils.Timestamp.now().internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
# normally the frag index gets sent in the MIME footer (which this
# test doesn't have, see `test_multiphase_put_metadata_footer`),
# but the proxy *could* send the frag index in the headers and
# this test verifies that would work.
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_bad_commit_message(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"junkjunk",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
resp = conn.getresponse()
self.assertEqual(resp.status, 500)
resp.read()
resp.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# verify that durable data file was NOT created
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_drains_extra_commit_junk(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
# make another request to validate the HTTP protocol state
conn.putrequest('GET', '/sda1/0/a/c/o')
conn.putheader('X-Backend-Storage-Policy-Index', '1')
conn.endheaders()
resp = conn.getresponse()
self.assertEqual(resp.status, 200)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_drains_extra_commit_junk_disconnect(self):
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation and some other stuff
# but don't send final boundary or last chunk
to_send = "%x\r\n%s\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# but no container update
self.assertFalse(context['mock_container_update'].called)
@patch_policies
class TestZeroCopy(unittest.TestCase):
"""Test the object server's zero-copy functionality"""
def _system_can_zero_copy(self):
if not splice.available:
return False
try:
utils.get_md5_socket()
except IOError:
return False
return True
def setUp(self):
skip_if_no_xattrs()
if not self._system_can_zero_copy():
raise unittest.SkipTest("zero-copy support is missing")
self.testdir = mkdtemp(suffix="obj_server_zero_copy")
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
conf = {'devices': self.testdir,
'mount_check': 'false',
'splice': 'yes',
'disk_chunk_size': '4096'}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.df_mgr = diskfile.DiskFileManager(
conf, self.object_controller.logger)
listener = listen_zero()
port = listener.getsockname()[1]
self.wsgi_greenlet = spawn(
wsgi.server, listener, self.object_controller, NullLogger())
self.http_conn = httplib.HTTPConnection('127.0.0.1', port)
self.http_conn.connect()
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
self.wsgi_greenlet.kill()
rmtree(self.testdir)
def test_GET(self):
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': '127082564.24709',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'obj contents')
def test_GET_big(self):
# Test with a large-ish object to make sure we handle full socket
# buffers correctly.
obj_contents = 'A' * 4 * 1024 * 1024 # 4 MiB
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, obj_contents,
{'X-Timestamp': '1402600322.52126',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, obj_contents)
def test_quarantine(self):
obj_hash = hash_path('a', 'c', 'o')
url_path = '/sda1/2100/a/c/o'
ts = '1402601849.47475'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': ts,
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
# go goof up the file on disk
fname = os.path.join(self.testdir, 'sda1', 'objects', '2100',
obj_hash[-3:], obj_hash, ts + '.data')
with open(fname, 'rb+') as fh:
fh.write('XYZ')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'XYZ contents')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
# it was quarantined by the previous request
self.assertEqual(response.status, 404)
response.read()
def test_quarantine_on_well_formed_zero_byte_file(self):
# Make sure we work around an oddity in Linux's hash sockets
url_path = '/sda1/2100/a/c/o'
ts = '1402700497.71333'
self.http_conn.request(
'PUT', url_path, '',
{'X-Timestamp': ts, 'Content-Length': '0',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, '')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200) # still there
contents = response.read()
self.assertEqual(contents, '')
class TestConfigOptionHandling(unittest.TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
def tearDown(self):
rmtree(self.tmpdir)
def _app_config(self, config):
contents = dedent(config)
conf_file = os.path.join(self.tmpdir, 'object-server.conf')
with open(conf_file, 'w') as f:
f.write(contents)
return init_request_processor(conf_file, 'object-server')[:2]
def test_default(self):
config = """
[DEFAULT]
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
"""
app, config = self._app_config(config)
self.assertNotIn('reclaim_age', config)
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 604800)
def test_option_in_app(self):
config = """
[DEFAULT]
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
reclaim_age = 100
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '100')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 100)
def test_option_in_default(self):
config = """
[DEFAULT]
reclaim_age = 200
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '200')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 200)
def test_option_in_both(self):
config = """
[DEFAULT]
reclaim_age = 300
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
reclaim_age = 400
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '300')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 300)
# use paste "set" syntax to override global config value
config = """
[DEFAULT]
reclaim_age = 500
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
set reclaim_age = 600
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '600')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 600)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "306e11300b309a20bc45ad963a101f84",
"timestamp": "",
"source": "github",
"line_count": 7838,
"max_line_length": 79,
"avg_line_length": 44.178744577698396,
"alnum_prop": 0.5278118709804114,
"repo_name": "nadeemsyed/swift",
"id": "a692a27b72f98d517420ac452729d2fa6062b936",
"size": "346886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/unit/obj/test_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "248"
},
{
"name": "PHP",
"bytes": "377"
},
{
"name": "Python",
"bytes": "8547418"
},
{
"name": "Shell",
"bytes": "1804"
}
],
"symlink_target": ""
}
|
import logging
import peewee
from tornado.options import options
from src.models.vote import Vote, PlaylistVote
from src.models.media_item import MediaItem
from src.models.playlist_item import PlaylistItem
__all__ = (
'MediaItem', 'Vote'
)
models = [MediaItem, PlaylistItem, Vote, PlaylistVote]
if options.create_tables or None:
for model in models:
try:
model.create_table()
except peewee.OperationalError as e:
logging.warning("Create tables: Operational error: %s" % e)
except peewee.IntegrityError as e:
logging.error("Create tables: %s" % e)
except peewee.InternalError as e:
logging.error("Create tables: Internal error: %s" % e)
|
{
"content_hash": "40a5bd26a45919ecbcb916194678cea9",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 71,
"avg_line_length": 31.608695652173914,
"alnum_prop": 0.6781292984869326,
"repo_name": "cthit/playIT-python",
"id": "e91b2c26c05cdefa0fa3178c13531beb74ad8758",
"size": "727",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "backend/src/models/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8767"
},
{
"name": "HTML",
"bytes": "456"
},
{
"name": "JavaScript",
"bytes": "57813"
},
{
"name": "Python",
"bytes": "70048"
},
{
"name": "Shell",
"bytes": "445"
}
],
"symlink_target": ""
}
|
import json
import aiohttp
from .settings import TELEGRAM_BOT_KEY
async def send_tg_message(chat_id, message):
async with aiohttp.ClientSession() as session:
async with session.post(
f'https://api.telegram.org/bot{TELEGRAM_BOT_KEY}/sendMessage',
json={"chat_id": chat_id, "text": message}
) as resp:
return await resp.json()
|
{
"content_hash": "7437843a38665864faeafea1cc490422",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 74,
"avg_line_length": 27.5,
"alnum_prop": 0.6415584415584416,
"repo_name": "sirmaahe/juan-the-manager",
"id": "9de7e2647a29afcccc19163739d48c825cabc9c5",
"size": "385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2078"
},
{
"name": "JavaScript",
"bytes": "6114"
},
{
"name": "Python",
"bytes": "5805"
}
],
"symlink_target": ""
}
|
"""
Builder for Nordic nRF51 series ARM microcontrollers.
"""
from os.path import join
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild, Default,
DefaultEnvironment, SConscript)
env = DefaultEnvironment()
SConscript(env.subst(join("$PIOBUILDER_DIR", "scripts", "basearm.py")))
#
# Target: Build executable and linkable firmware
#
target_elf = env.BuildFirmware()
#
# Target: Build the .bin file
#
if "uploadlazy" in COMMAND_LINE_TARGETS:
target_firm = join("$BUILD_DIR", "firmware.bin")
else:
target_firm = env.ElfToBin(join("$BUILD_DIR", "firmware"), target_elf)
#
# Target: Print binary size
#
target_size = env.Alias("size", target_elf, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Upload by default .bin file
#
upload = env.Alias(["upload", "uploadlazy"], target_firm, env.UploadToDisk)
AlwaysBuild(upload)
#
# Target: Define targets
#
Default([target_firm, target_size])
|
{
"content_hash": "133c66b1747ef45866b2213539e37d2c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 75,
"avg_line_length": 20.170212765957448,
"alnum_prop": 0.6951476793248945,
"repo_name": "bkudria/platformio",
"id": "15e67cc23f5eeb08a1ccc6d0a7dbbcf0b9505993",
"size": "1023",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "platformio/builder/scripts/nordicnrf51.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "21232"
},
{
"name": "Batchfile",
"bytes": "1897"
},
{
"name": "C",
"bytes": "15376"
},
{
"name": "C++",
"bytes": "57106"
},
{
"name": "JavaScript",
"bytes": "465"
},
{
"name": "PowerShell",
"bytes": "2904"
},
{
"name": "Processing",
"bytes": "1536"
},
{
"name": "Python",
"bytes": "229587"
},
{
"name": "QMake",
"bytes": "365"
},
{
"name": "Smarty",
"bytes": "29742"
}
],
"symlink_target": ""
}
|
"""Support for exposing a templated binary sensor."""
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES_SCHEMA,
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
CONF_DEVICE_CLASS,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_ICON_TEMPLATE,
CONF_SENSORS,
CONF_VALUE_TEMPLATE,
EVENT_HOMEASSISTANT_START,
MATCH_ALL,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.event import async_track_same_state, async_track_state_change
from . import extract_entities, initialise_templates
from .const import CONF_AVAILABILITY_TEMPLATE
_LOGGER = logging.getLogger(__name__)
CONF_DELAY_ON = "delay_on"
CONF_DELAY_OFF = "delay_off"
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
SENSOR_SCHEMA = vol.Schema(
{
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Optional(CONF_ATTRIBUTE_TEMPLATES): vol.Schema({cv.string: cv.template}),
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_DELAY_ON): vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_DELAY_OFF): vol.All(cv.time_period, cv.positive_timedelta),
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA)}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up template binary sensors."""
sensors = []
for device, device_config in config[CONF_SENSORS].items():
value_template = device_config[CONF_VALUE_TEMPLATE]
icon_template = device_config.get(CONF_ICON_TEMPLATE)
entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
attribute_templates = device_config.get(CONF_ATTRIBUTE_TEMPLATES, {})
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
device_class = device_config.get(CONF_DEVICE_CLASS)
delay_on = device_config.get(CONF_DELAY_ON)
delay_off = device_config.get(CONF_DELAY_OFF)
templates = {
CONF_VALUE_TEMPLATE: value_template,
CONF_ICON_TEMPLATE: icon_template,
CONF_ENTITY_PICTURE_TEMPLATE: entity_picture_template,
CONF_AVAILABILITY_TEMPLATE: availability_template,
}
initialise_templates(hass, templates, attribute_templates)
entity_ids = extract_entities(
device,
"binary sensor",
device_config.get(ATTR_ENTITY_ID),
templates,
attribute_templates,
)
sensors.append(
BinarySensorTemplate(
hass,
device,
friendly_name,
device_class,
value_template,
icon_template,
entity_picture_template,
availability_template,
entity_ids,
delay_on,
delay_off,
attribute_templates,
)
)
async_add_entities(sensors)
class BinarySensorTemplate(BinarySensorEntity):
"""A virtual binary sensor that triggers from another sensor."""
def __init__(
self,
hass,
device,
friendly_name,
device_class,
value_template,
icon_template,
entity_picture_template,
availability_template,
entity_ids,
delay_on,
delay_off,
attribute_templates,
):
"""Initialize the Template binary sensor."""
self.hass = hass
self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device, hass=hass)
self._name = friendly_name
self._device_class = device_class
self._template = value_template
self._state = None
self._icon_template = icon_template
self._availability_template = availability_template
self._entity_picture_template = entity_picture_template
self._icon = None
self._entity_picture = None
self._entities = entity_ids
self._delay_on = delay_on
self._delay_off = delay_off
self._available = True
self._attribute_templates = attribute_templates
self._attributes = {}
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def template_bsensor_state_listener(entity, old_state, new_state):
"""Handle the target device state changes."""
self.async_check_state()
@callback
def template_bsensor_startup(event):
"""Update template on startup."""
if self._entities != MATCH_ALL:
# Track state change only for valid templates
async_track_state_change(
self.hass, self._entities, template_bsensor_state_listener
)
self.async_check_state()
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, template_bsensor_startup
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
@property
def entity_picture(self):
"""Return the entity_picture to use in the frontend, if any."""
return self._entity_picture
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def device_class(self):
"""Return the sensor class of the sensor."""
return self._device_class
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def available(self):
"""Availability indicator."""
return self._available
@callback
def _async_render(self):
"""Get the state of template."""
state = None
try:
state = self._template.async_render().lower() == "true"
except TemplateError as ex:
if ex.args and ex.args[0].startswith(
"UndefinedError: 'None' has no attribute"
):
# Common during HA startup - so just a warning
_LOGGER.warning(
"Could not render template %s, the state is unknown", self._name
)
return
_LOGGER.error("Could not render template %s: %s", self._name, ex)
attrs = {}
if self._attribute_templates is not None:
for key, value in self._attribute_templates.items():
try:
attrs[key] = value.async_render()
except TemplateError as err:
_LOGGER.error("Error rendering attribute %s: %s", key, err)
self._attributes = attrs
templates = {
"_icon": self._icon_template,
"_entity_picture": self._entity_picture_template,
"_available": self._availability_template,
}
for property_name, template in templates.items():
if template is None:
continue
try:
value = template.async_render()
if property_name == "_available":
value = value.lower() == "true"
setattr(self, property_name, value)
except TemplateError as ex:
friendly_property_name = property_name[1:].replace("_", " ")
if ex.args and ex.args[0].startswith(
"UndefinedError: 'None' has no attribute"
):
# Common during HA startup - so just a warning
_LOGGER.warning(
"Could not render %s template %s, the state is unknown.",
friendly_property_name,
self._name,
)
else:
_LOGGER.error(
"Could not render %s template %s: %s",
friendly_property_name,
self._name,
ex,
)
return state
return state
@callback
def async_check_state(self):
"""Update the state from the template."""
state = self._async_render()
# return if the state don't change or is invalid
if state is None or state == self.state:
return
@callback
def set_state():
"""Set state of template binary sensor."""
self._state = state
self.async_write_ha_state()
# state without delay
if (state and not self._delay_on) or (not state and not self._delay_off):
set_state()
return
period = self._delay_on if state else self._delay_off
async_track_same_state(
self.hass,
period,
set_state,
entity_ids=self._entities,
async_check_same_func=lambda *args: self._async_render() == state,
)
async def async_update(self):
"""Force update of the state from the template."""
self.async_check_state()
|
{
"content_hash": "b4fb758fef7bab748c4011cde2629529",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 88,
"avg_line_length": 32.91776315789474,
"alnum_prop": 0.5786949135605076,
"repo_name": "robbiet480/home-assistant",
"id": "94d0f9d597bcb4f46affb2fc6addf488a7ffaba1",
"size": "10007",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/template/binary_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18837456"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import hashlib
from django.db import migrations, models
def setPassword( user, password ):
user.password = hashlib.sha256( password.encode( 'utf-8' ) ).hexdigest()
user.save()
def load_users( app, schema_editor ):
User = app.get_model( 'User', 'User' )
u = User( username='admin', nick_name='Admin', superuser=True )
u.save()
setPassword( u, 'adm1n' )
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Session',
fields=[
('token', models.CharField(max_length=64, serialize=False, primary_key=True)),
('last_hearbeat', models.DateTimeField()),
('created', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='User',
fields=[
('username', models.CharField(max_length=40, serialize=False, primary_key=True)),
('password', models.CharField(max_length=64, editable=False)),
('nick_name', models.CharField(max_length=100, blank=True, null=True)),
('can_promote', models.BooleanField(default=False)),
('can_create', models.BooleanField(default=False)),
('superuser', models.BooleanField(default=False, editable=False)),
('updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
],
),
migrations.AddField(
model_name='session',
name='user',
field=models.ForeignKey(to='User.User'),
),
migrations.RunPython( load_users ),
]
|
{
"content_hash": "93345da19e1a24d40b667913fdc5b56b",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 97,
"avg_line_length": 33.15094339622642,
"alnum_prop": 0.5737051792828686,
"repo_name": "Virtustream-OSS/packrat",
"id": "8fbb0678143c858fc4ac92a9eb8a1bfef13d8b3c",
"size": "1781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packrat/User/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "30"
},
{
"name": "HTML",
"bytes": "1170"
},
{
"name": "JavaScript",
"bytes": "48676"
},
{
"name": "Makefile",
"bytes": "2449"
},
{
"name": "Python",
"bytes": "57019"
},
{
"name": "Shell",
"bytes": "1725"
}
],
"symlink_target": ""
}
|
"""
https://github.com/imlucas/flask-virtualenv
"""
from setuptools import setup
setup(
name='Flask-Virtualenv',
version='0.2.0',
url='http://imlucas.com',
license='BSD',
author='Lucas Hrabovsky',
author_email='hrabovsky.lucas@gmail.com',
description='Manage a virtualenv for your flask app.',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'virtualenv>=1.5.1'
],
test_suite='nose.collector',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'License :: OSI Approved :: {{ license }} License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
{
"content_hash": "c9f148f5b2b992484d0613b9acb34c21",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 70,
"avg_line_length": 28.771428571428572,
"alnum_prop": 0.6067527308838133,
"repo_name": "marchon/flask-virtualenv",
"id": "aeef0823ee712dce0bd735f13a903bba37565c03",
"size": "1029",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from cactus.deployment.auth import BaseKeyringCredentialsManager
class CloudFilesCredentialsManager(BaseKeyringCredentialsManager):
_username_config_entry = "cloudfiles-username"
_password_display_name = "API Key"
_keyring_service = "cactus/cloudfiles"
|
{
"content_hash": "3ece156141f98efdce0bfcdb585bdf92",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 66,
"avg_line_length": 38.142857142857146,
"alnum_prop": 0.797752808988764,
"repo_name": "koenbok/Cactus",
"id": "fc2c1f2127b70bddd78ee083acc068d8da76dfd2",
"size": "281",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "cactus/deployment/cloudfiles/auth.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "261"
},
{
"name": "HTML",
"bytes": "8133"
},
{
"name": "JavaScript",
"bytes": "60"
},
{
"name": "Makefile",
"bytes": "448"
},
{
"name": "Python",
"bytes": "238893"
}
],
"symlink_target": ""
}
|
import numpy
import os.path
import glob
import re
import config
import util
import split
def components(arr):
'''Finds connected components of 2d array. Returns (mask,component_number) where:
mask: 2d array of the same size, where non-empty cell is marked by corresponding component number
component_number: number of connected components'''
def empty(k):
return k == 1
def dfs(x,y):
#check if (x,y) is inside array
if(not (0 <= x and x < arr.shape[0] and 0 <= y and y < arr.shape[1])):
return
#check if (x,y) should be visited
if(empty(arr[x][y])):
return
#check if (x,y) was not already visited
if(mask[x][y] != 0):
return
#visit (x,y)
mask[x][y] = component_number
dfs(x-1,y)
dfs(x+1,y)
dfs(x,y+1)
dfs(x,y-1)
#dfs(x-1,y+1)
#dfs(x+1,y+1)
#dfs(x-1,y-1)
#dfs(x+1,y-1)
mask = numpy.zeros(shape=arr.shape,dtype=numpy.uint8)
component_number = 0
for y in range(arr.shape[1]):
for x in range(arr.shape[0]):
if( (not empty(arr[x][y])) and (mask[x][y] == 0)):
component_number += 1
dfs(x,y)
return (mask,component_number)
def box_coordinates(mask,num_of_components):
'''Finds rectangulars in which cells located in one connected component can be placed.
Returns list of 4-tuples in (x1,x2,y1,y2) format'''
def x1(m):
for x in range(mask.shape[0]):
for y in range(mask.shape[1]):
if(mask[x][y] == m):
return x
def x2(m):
for x in reversed(range(mask.shape[0])):
for y in range(0,mask.shape[1]):
if(mask[x][y] == m):
return x
def y1(m):
for y in range(mask.shape[1]):
for x in range(mask.shape[0]):
if(mask[x][y] == m):
return y
def y2(m):
for y in reversed(range(mask.shape[1])):
for x in range(mask.shape[0]):
if(mask[x][y] == m):
return y
boxes = []
for m in range(1,num_of_components+1):
boxes.append((x1(m),x2(m),y1(m),y2(m)))
return boxes
def cut_boxes(mask,boxes):
'''Cuts rectangualar from 2d array containing elements from corresponding connected component only'''
cutted = []
for c in range(len(boxes)):
x1,x2,y1,y2 = boxes[c]
box = numpy.zeros(shape=(x2-x1+1,y2-y1+1),dtype=numpy.uint8)
for x in range(x1,x2+1):
for y in range(y1,y2+1):
#c-th box contains (c+1)th component
if(mask[x][y] == c+1):
box[x-x1][y-y1] = 1
cutted.append(box)
return cutted
def filter_dots(shape,box_coord):
'''filtering out dots over "i" and "j" letters by replacing with empyt box'''
def is_dot(z):
return (z[1]-z[0])+1 <= config.dot_size and z[1] < shape[0]*0.5
return list(map(lambda z: (0,0,0,0) if is_dot(z) else z,box_coord))
def filter_nulls(segments):
return list(filter(lambda x: x.shape[0] > 1,segments))
def segment_image(image):
'''Segments image into rectangular parts, each containing connected component'''
mask,num_of_components = components(image)
box_coord = box_coordinates(mask,num_of_components)
box_coord = filter_dots(image.shape,box_coord)
segments = cut_boxes(mask,box_coord)
segments = filter_nulls(segments)
return segments
def var_to_fixed(v_sgm):
'''Converts variable size 2d segment of characters into centered fixed size 1d segment'''
h_v = v_sgm.shape[0]
w_v = v_sgm.shape[1]
assert(config.sample_h > h_v)
assert(config.sample_w > w_v)
shift_h = int((config.sample_h - h_v)/2)
shift_w = int((config.sample_w - w_v)/2)
f_sgm = numpy.zeros(shape=(config.sample_h,config.sample_w),dtype=numpy.uint8)
for i in range(h_v):
for j in range(w_v):
f_sgm[i+shift_h][j+shift_w]=v_sgm[i][j]
return f_sgm.flatten()
def image_to_features(image,captcha):
'''Extracts feature and label matrix from image.'''
image_segments = segment_image(image)
if(len(captcha) != len(image_segments)):
return (numpy.zeros((0,config.sample_h*config.sample_w),dtype=numpy.uint8),numpy.array([],dtype=numpy.uint8))
X=numpy.array(list(map(var_to_fixed,image_segments)))
y = numpy.array(list(map(lambda c:ord(c)-ord(config.first_character),captcha)))
return (X,y)
def extract_features():
'''Extract features from all labeled images.'''
image_dir = util.get_image_dir()
images = glob.glob(image_dir+"/*.gif")
characters = []
var_segments = []
def extract_single(image_file):
captcha = re.match("(.*)\.gif",os.path.basename(image_file)).group(1)
image = util.read_grey_image(image_file)
return image_to_features(image,captcha)
X,y = list(zip(*list(map(extract_single,images))))
#return X,y
X = numpy.concatenate(X,axis=0)
y = numpy.concatenate(y,axis=0)
return (X,y)
def split_segments(image_segments):
'''Splitting of crossint characters.
image_segments: list of character segments
return value: list of splitted characters'''
image_segments_2 = []
for segment in image_segments:
if(segment.shape[1] >= config.double_character_width):
splitted = split.split_joint(segment, letters = 2)
image_segments_2 += splitted
else:
image_segments_2.append(segment)
return image_segments_2
|
{
"content_hash": "e26a8376eee0712df9e70514f429cddd",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 117,
"avg_line_length": 34.8159509202454,
"alnum_prop": 0.5806167400881057,
"repo_name": "nekomiko/wkcaptcha",
"id": "fc456ca3838efefaf533db8db3db5a6119facd69",
"size": "5675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "segment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Perl6",
"bytes": "6398"
},
{
"name": "Python",
"bytes": "30363"
}
],
"symlink_target": ""
}
|
from test_collective_base import TestCollectiveRunnerBase, runtime_main
import paddle
import paddle.fluid as fluid
import paddle.fluid.layers as layers
from paddle.fluid import core
paddle.enable_static()
class TestCollectiveReduce(TestCollectiveRunnerBase):
def __init__(self):
self.global_ring_id = 0
def get_model(self, main_prog, startup_program):
ring_id = 0
rootid = 1
with fluid.program_guard(main_prog, startup_program):
tindata = layers.data(
name="tindata", shape=[10, 1000], dtype='float32'
)
toutdata = main_prog.current_block().create_var(
name="outofreduce",
dtype='float32',
type=core.VarDesc.VarType.LOD_TENSOR,
persistable=False,
stop_gradient=False,
)
main_prog.global_block().append_op(
type="c_reduce_sum",
inputs={'X': tindata},
attrs={
'ring_id': ring_id,
'use_calc_stream': True,
'root_id': rootid,
},
outputs={'Out': toutdata},
)
main_prog.global_block().append_op(
type="c_sync_comm_stream",
inputs={'X': toutdata},
outputs={'Out': toutdata},
attrs={'ring_id': ring_id},
)
return toutdata
if __name__ == "__main__":
runtime_main(TestCollectiveReduce, "reduce", 0)
|
{
"content_hash": "232adb7380092b88973efe3a348db45b",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 71,
"avg_line_length": 31.591836734693878,
"alnum_prop": 0.5167958656330749,
"repo_name": "PaddlePaddle/Paddle",
"id": "7d122764f5d4da6b9a094faa4bbc9760e36c3774",
"size": "2159",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/collective/collective_reduce_op_calc_stream.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36848680"
},
{
"name": "CMake",
"bytes": "902619"
},
{
"name": "Cuda",
"bytes": "5227207"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36203874"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553177"
}
],
"symlink_target": ""
}
|
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm, password_reset
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render, redirect
from mysite.settings import HOME_PAGE
from portal.forms import UserForm, LoginForm
# Create your views here.
def register(request):
# Marker to see if user is registered
registered = False
# Only work on if POST
if request.method == 'POST':
# Grab data from request
user_form = UserForm(data=request.POST)
# If both forms are valid
if user_form.is_valid():
# Save the user's form data to the database.
user = user_form.save()
# Hash protect the user pass
# Once hashed, save
user.set_password(user.password)
user.username = user.email
user.save()
# Set to false due to manual setting
# Success
registered = True
# Invalid form or forms - mistakes or something else?
# Print problems to the terminal.
# They'll also be shown to the user.
else:
print(user_form.errors)
else:
user_form = UserForm()
return render(request,
'portal/register.html',
{'user_form': user_form, 'registered':registered})
def user_login(request):
if request.method == 'POST':
# Read the data provided
login_form = LoginForm(data=request.POST)
email = request.POST.get('email')
password = request.POST.get('password')
if login_form.is_valid():
# Use Django's system to authenticate the credentials
user = authenticate(username=email, password=password)
# If the user is valid
if user:
if user.is_active:
# Login and redirect
login(request, user)
return HttpResponseRedirect(HOME_PAGE)
else:
return HttpResponse("This account has been disabled")
else:
# Incorrect log in credentials
print(login_form.errors)
else:
print(login_form.errors)
else: # In case of get method
login_form = LoginForm()
return render(request, 'portal/login.html', {'login_form':login_form})
@login_required
def user_logout(request):
logout(request)
return HttpResponseRedirect(HOME_PAGE)
def index(request):
return redirect('register')
@login_required
def restricted(request):
return HttpResponse("open")
def reset_confirm(request, uidb36=None, token=None): # Call django's password reset function
return password_reset_confirm(request, template_name='portal/reset_confirm.html',
uidb36=uidb36, token=token, post_reset_redirect=reverse('portal:login'))
def reset(request): # Sends the template
return password_reset(request, template_name='portal/reset.html',
email_template_name='portal/reset_email.html',
subject_template_name='portal/reset_subject.txt',
post_reset_redirect=reverse('portal:login'))
|
{
"content_hash": "88a4f620dc7143b4506dd92f8803cb22",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 92,
"avg_line_length": 35.02105263157895,
"alnum_prop": 0.6299969942891493,
"repo_name": "MajinBui/django-portal",
"id": "9037b3c39d594f85a5d29612e237caf27d8af27f",
"size": "3327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "portal/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3689"
},
{
"name": "Python",
"bytes": "8008"
}
],
"symlink_target": ""
}
|
from runner.koan import *
class AboutStringManipulation(Koan):
def test_use_format_to_interpolate_variables(self):
value1 = 'one'
value2 = 2
string = "The values are {0} and {1}".format(value1, value2)
self.assertEqual("The values are one and 2", string)
def test_formatted_values_can_be_shown_in_any_order_or_be_repeated(self):
value1 = 'doh'
value2 = 'DOH'
string = "The values are {1}, {0}, {0} and {1}!".format(value1, value2)
self.assertEqual("The values are DOH, doh, doh and DOH!", string)
def test_any_python_expression_may_be_interpolated(self):
import math # import a standard python module with math functions
decimal_places = 4
string = "The square root of 5 is {0:.{1}f}".format(math.sqrt(5), \
decimal_places)
self.assertEqual("The square root of 5 is 2.2361", string)
def test_you_can_get_a_substring_from_a_string(self):
string = "Bacon, lettuce and tomato"
self.assertEqual("let", string[7:10])
def test_you_can_get_a_single_character_from_a_string(self):
string = "Bacon, lettuce and tomato"
self.assertEqual("a", string[1])
def test_single_characters_can_be_represented_by_integers(self):
self.assertEqual(97, ord('a'))
self.assertEqual(True, ord('b') == (ord('a') + 1))
def test_strings_can_be_split(self):
string = "Sausage Egg Cheese"
words = string.split()
self.assertEqual(["Sausage", "Egg", "Cheese"], words)
def test_strings_can_be_split_with_different_patterns(self):
import re # import python regular expression library
string = "the,rain;in,spain"
pattern = re.compile(',|;')
words = pattern.split(string)
self.assertEqual(["the", "rain", "in", "spain"], words)
# `pattern` is a Python regular expression pattern which matches
# ',' or ';'
def test_raw_strings_do_not_interpret_escape_characters(self):
string = r'\n'
self.assertNotEqual('\n', string)
self.assertEqual(r"\n", string)
self.assertEqual(2, len(string))
# Useful in regular expressions, file paths, URLs, etc.
def test_strings_can_be_joined(self):
words = ["Now", "is", "the", "time"]
self.assertEqual("Now is the time", ' '.join(words))
def test_strings_can_change_case(self):
self.assertEqual("Guido", 'guido'.capitalize())
self.assertEqual("GUIDO", 'guido'.upper())
self.assertEqual("timbot", 'TimBot'.lower())
self.assertEqual("Guido Van Rossum", 'guido van rossum'.title())
self.assertEqual("tOtAlLy AwEsOmE", 'ToTaLlY aWeSoMe'.swapcase())
|
{
"content_hash": "99706824374dc7bdf64f6bbc363f15f5",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 79,
"avg_line_length": 37.45205479452055,
"alnum_prop": 0.6185076810534016,
"repo_name": "klintwood/python_koans",
"id": "c9b5e8672a8ce806c6d09218a8041506b60f19e1",
"size": "2781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python2/koans/about_string_manipulation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "329916"
},
{
"name": "Shell",
"bytes": "1615"
}
],
"symlink_target": ""
}
|
import errno
import rospy
def wait_for(test, timeout=1.0, raise_on_error=True, rate=100,
timeout_msg="timeout expired", body=None):
"""
Waits until some condition evaluates to true.
@param test: zero param function to be evaluated
@param timeout: max amount of time to wait. negative/inf for indefinitely
@param raise_on_error: raise or just return False
@param rate: the rate at which to check
@param timout_msg: message to supply to the timeout exception
@param body: optional function to execute while waiting
"""
end_time = rospy.get_time() + timeout
rate = rospy.Rate(rate)
notimeout = (timeout < 0.0) or timeout == float("inf")
while not test():
if rospy.is_shutdown():
if raise_on_error:
raise OSError(errno.ESHUTDOWN, "ROS Shutdown")
return False
elif (not notimeout) and (rospy.get_time() >= end_time):
if raise_on_error:
raise OSError(errno.ETIMEDOUT, timeout_msg)
return False
if callable(body):
body()
rate.sleep()
return True
|
{
"content_hash": "e407b470f877dbf447fa35fe734cabdf",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 77,
"avg_line_length": 34.42424242424242,
"alnum_prop": 0.6214788732394366,
"repo_name": "UCRoboticsLab/BaxterTictactoe",
"id": "d3676d6014a1e83050429963623bd912879b5226",
"size": "2690",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/baxter_interface/src/baxter_dataflow/wait_for.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1667815"
},
{
"name": "C++",
"bytes": "768325"
},
{
"name": "CMake",
"bytes": "49275"
},
{
"name": "CSS",
"bytes": "11972"
},
{
"name": "HTML",
"bytes": "21143"
},
{
"name": "JavaScript",
"bytes": "31105"
},
{
"name": "Python",
"bytes": "586863"
},
{
"name": "Shell",
"bytes": "12242"
}
],
"symlink_target": ""
}
|
import ansible.runner
import ansible.inventory
import sys
import os
class AnsibleRunner(object):
def __init__(self, host=None, remote_user=None, remote_pass=None):
self.host_list = [host]
self.remote_user = remote_user
self.remote_pass = remote_pass
self.inventory = ansible.inventory.Inventory(self.host_list)
def do_reboot(self):
module_name = 'command'
module_args = 'reboot -f'
out = self._exec(module_name, module_args)
error_message = out['dark'].get(self.host_list[0], {}).get('msg')
failed = out['dark'].get(self.host_list[0], {}).get('failed')
if error_message and failed:
sys.stderr.write('Error, {}\n'.format(error_message))
raise Exception(error_message)
return out
@staticmethod
def execute_on_remote():
yml = os.getcwd() + os.sep + 'configs' + os.sep + 'jump.yaml'
out = os.system('ansible-playbook %s' % yml)
return out
def copy(self, filename, src, dest):
module_name = 'copy'
module_args = 'src=%s%s dest=%s' % (src, filename, dest)
return self._exec(module_name, module_args)
def fetch(self, filename, src, dest, flat='yes'):
module_name = 'fetch'
module_args = 'src=%s%s dest=%s flat=%s' % (src, filename, dest, flat)
return self._exec(module_name, module_args)
def shell(self, command):
module_name = 'shell'
module_args = command
return self._exec(module_name, module_args)
def _exec(self, module_name, module_args):
runner = ansible.runner.Runner(
module_name=module_name,
module_args=module_args,
remote_user=self.remote_user,
remote_pass=self.remote_pass,
inventory=self.inventory,
)
out = runner.run()
return out
|
{
"content_hash": "fa8435cb83d04f9cf65e81a9d46a1ad8",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 78,
"avg_line_length": 33.55357142857143,
"alnum_prop": 0.591271953166578,
"repo_name": "cisco-oss-eng/Cloud99",
"id": "059e00e54830a6a2204f34aafb2a7e67397818a7",
"size": "2482",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloud99/utils/ansible_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "76798"
}
],
"symlink_target": ""
}
|
from oneview_redfish_toolkit.api.redfish_json_validator \
import RedfishJsonValidator
class VLanNetworkInterface(RedfishJsonValidator):
"""Creates a VLanNetworkInterface Redfish dict
Populates self.redfish with ethernet network data retrieved
from OneView
"""
SCHEMA_NAME = 'VLanNetworkInterface'
def __init__(self, ethernet_network, endpoint):
"""VLanNetworkInterface constructor
Populates self.redfish with some hardcoded VLanNetworkInterface
values and with Ethernet Network data retrieved from OneView.
Args:
network: Ethernet network dict from OneView
endpoint: endpoint uri from original REST
"""
super().__init__(self.SCHEMA_NAME)
ethernet_network_id = \
ethernet_network["uri"].split("/")[-1]
self.redfish["@odata.type"] = self.get_odata_type()
self.redfish["Id"] = ethernet_network_id
self.redfish["Name"] = ethernet_network["name"]
self.redfish["VLANEnable"] = True
self.redfish["VLANId"] = ethernet_network["vlanId"]
self.redfish["@odata.context"] = \
"/redfish/v1/$metadata#VLanNetworkInterface"
self.redfish["@odata.id"] = endpoint
self._validate()
|
{
"content_hash": "968b2d09d11a423b09fa766521e8ff8b",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 75,
"avg_line_length": 33.256410256410255,
"alnum_prop": 0.6368542791056284,
"repo_name": "HewlettPackard/oneview-redfish-toolkit",
"id": "646fa97403998d169cc8417ddf56769b21f2880c",
"size": "1926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oneview_redfish_toolkit/api/vlan_network_interface.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "284"
},
{
"name": "Python",
"bytes": "979438"
},
{
"name": "Shell",
"bytes": "866"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import copy
import os
import sys
import time
import unittest
from nose.plugins.skip import SkipTest
from nose.tools import assert_raises
import numpy
from six.moves import xrange
import theano
from theano import tensor, config
from theano.sandbox import rng_mrg
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.sandbox.cuda import cuda_available
if cuda_available:
from theano.sandbox.cuda import float32_shared_constructor
from theano.tests import unittest_tools as utt
from theano.tests.unittest_tools import attr
# TODO: test gpu
# Done in test_consistency_GPU_{serial,parallel}
# TODO: test MRG_RandomStreams
# Partly done in test_consistency_randomstreams
# TODO: test optimizer mrg_random_make_inplace
# TODO: make tests work when no flags gived. Now need:
# THEANO_FLAGS=device=gpu0,floatX=float32
# Partly done, in test_consistency_GPU_{serial,parallel}
mode = config.mode
mode_with_gpu = theano.compile.mode.get_default_mode().including('gpu')
utt.seed_rng()
# Results generated by Java code using L'Ecuyer et al.'s code, with:
# main seed: [12345]*6 (default)
# 12 streams
# 7 substreams for each stream
# 5 samples drawn from each substream
java_samples = numpy.loadtxt(os.path.join(os.path.split(theano.__file__)[0],
'sandbox',
'samples_MRG31k3p_12_7_5.txt'))
def test_deterministic():
seed = utt.fetch_seed()
sample_size = (10, 20)
test_use_cuda = [False]
if cuda_available:
test_use_cuda.append(True)
for use_cuda in test_use_cuda:
# print 'use_cuda =', use_cuda
R = MRG_RandomStreams(seed=seed, use_cuda=use_cuda)
u = R.uniform(size=sample_size)
f = theano.function([], u)
fsample1 = f()
fsample2 = f()
assert not numpy.allclose(fsample1, fsample2)
R2 = MRG_RandomStreams(seed=seed, use_cuda=use_cuda)
u2 = R2.uniform(size=sample_size)
g = theano.function([], u2)
gsample1 = g()
gsample2 = g()
assert numpy.allclose(fsample1, gsample1)
assert numpy.allclose(fsample2, gsample2)
def test_consistency_randomstreams():
"""
Verify that the random numbers generated by MRG_RandomStreams
are the same as the reference (Java) implementation by L'Ecuyer et al.
"""
seed = 12345
n_samples = 5
n_streams = 12
n_substreams = 7
test_use_cuda = [False]
if cuda_available:
test_use_cuda.append(True)
for use_cuda in test_use_cuda:
# print 'use_cuda =', use_cuda
samples = []
rng = MRG_RandomStreams(seed=seed, use_cuda=use_cuda)
for i in range(n_streams):
stream_samples = []
u = rng.uniform(size=(n_substreams,), nstreams=n_substreams)
f = theano.function([], u)
for j in range(n_samples):
s = f()
stream_samples.append(s)
stream_samples = numpy.array(stream_samples)
stream_samples = stream_samples.T.flatten()
samples.append(stream_samples)
samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples))
def test_consistency_cpu_serial():
"""
Verify that the random numbers generated by mrg_uniform, serially,
are the same as the reference (Java) implementation by L'Ecuyer et al.
"""
seed = 12345
n_samples = 5
n_streams = 12
n_substreams = 7
samples = []
curr_rstate = numpy.array([seed] * 6, dtype='int32')
for i in range(n_streams):
stream_rstate = curr_rstate.copy()
for j in range(n_substreams):
rstate = theano.shared(numpy.array([stream_rstate.copy()],
dtype='int32'))
new_rstate, sample = rng_mrg.mrg_uniform.new(rstate, ndim=None,
dtype=config.floatX,
size=(1,))
# Not really necessary, just mimicking
# rng_mrg.MRG_RandomStreams' behavior
sample.rstate = rstate
sample.update = (rstate, new_rstate)
rstate.default_update = new_rstate
f = theano.function([], sample)
for k in range(n_samples):
s = f()
samples.append(s)
# next substream
stream_rstate = rng_mrg.ff_2p72(stream_rstate)
# next stream
curr_rstate = rng_mrg.ff_2p134(curr_rstate)
samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples))
def test_consistency_cpu_parallel():
"""
Verify that the random numbers generated by mrg_uniform, in parallel,
are the same as the reference (Java) implementation by L'Ecuyer et al.
"""
seed = 12345
n_samples = 5
n_streams = 12
n_substreams = 7 # 7 samples will be drawn in parallel
samples = []
curr_rstate = numpy.array([seed] * 6, dtype='int32')
for i in range(n_streams):
stream_samples = []
rstate = [curr_rstate.copy()]
for j in range(1, n_substreams):
rstate.append(rng_mrg.ff_2p72(rstate[-1]))
rstate = numpy.asarray(rstate)
rstate = theano.shared(rstate)
new_rstate, sample = rng_mrg.mrg_uniform.new(rstate, ndim=None,
dtype=config.floatX,
size=(n_substreams,))
# Not really necessary, just mimicking
# rng_mrg.MRG_RandomStreams' behavior
sample.rstate = rstate
sample.update = (rstate, new_rstate)
rstate.default_update = new_rstate
f = theano.function([], sample)
for k in range(n_samples):
s = f()
stream_samples.append(s)
samples.append(numpy.array(stream_samples).T.flatten())
# next stream
curr_rstate = rng_mrg.ff_2p134(curr_rstate)
samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples))
def test_consistency_GPU_serial():
"""
Verify that the random numbers generated by GPU_mrg_uniform, serially,
are the same as the reference (Java) implementation by L'Ecuyer et al.
"""
if not cuda_available:
raise SkipTest('Optional package cuda not available')
if config.mode == 'FAST_COMPILE':
mode = 'FAST_RUN'
else:
mode = config.mode
seed = 12345
n_samples = 5
n_streams = 12
n_substreams = 7
samples = []
curr_rstate = numpy.array([seed] * 6, dtype='int32')
for i in range(n_streams):
stream_rstate = curr_rstate.copy()
for j in range(n_substreams):
substream_rstate = numpy.array(stream_rstate.copy(), dtype='int32')
# HACK - we transfer these int32 to the GPU memory as float32
# (reinterpret_cast)
tmp_float_buf = numpy.frombuffer(substream_rstate.data,
dtype='float32')
# Transfer to device
rstate = float32_shared_constructor(tmp_float_buf)
new_rstate, sample = rng_mrg.GPU_mrg_uniform.new(rstate, ndim=None,
dtype='float32',
size=(1,))
rstate.default_update = new_rstate
# Not really necessary, just mimicking
# rng_mrg.MRG_RandomStreams' behavior
sample.rstate = rstate
sample.update = (rstate, new_rstate)
# We need the sample back in the main memory
cpu_sample = tensor.as_tensor_variable(sample)
f = theano.function([], cpu_sample, mode=mode)
for k in range(n_samples):
s = f()
samples.append(s)
# next substream
stream_rstate = rng_mrg.ff_2p72(stream_rstate)
# next stream
curr_rstate = rng_mrg.ff_2p134(curr_rstate)
samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples))
def test_consistency_GPU_parallel():
"""
Verify that the random numbers generated by GPU_mrg_uniform, in
parallel, are the same as the reference (Java) implementation by
L'Ecuyer et al.
"""
if not cuda_available:
raise SkipTest('Optional package cuda not available')
if config.mode == 'FAST_COMPILE':
mode = 'FAST_RUN'
else:
mode = config.mode
seed = 12345
n_samples = 5
n_streams = 12
n_substreams = 7 # 7 samples will be drawn in parallel
samples = []
curr_rstate = numpy.array([seed] * 6, dtype='int32')
for i in range(n_streams):
stream_samples = []
rstate = [curr_rstate.copy()]
for j in range(1, n_substreams):
rstate.append(rng_mrg.ff_2p72(rstate[-1]))
rstate = numpy.asarray(rstate).flatten()
# HACK - transfer these int32 to the GPU memory as float32
# (reinterpret_cast)
tmp_float_buf = numpy.frombuffer(rstate.data, dtype='float32')
# Transfer to device
rstate = float32_shared_constructor(tmp_float_buf)
new_rstate, sample = rng_mrg.GPU_mrg_uniform.new(rstate, ndim=None,
dtype='float32',
size=(n_substreams,))
rstate.default_update = new_rstate
# Not really necessary, just mimicking
# rng_mrg.MRG_RandomStreams' behavior
sample.rstate = rstate
sample.update = (rstate, new_rstate)
# We need the sample back in the main memory
cpu_sample = tensor.as_tensor_variable(sample)
f = theano.function([], cpu_sample, mode=mode)
for k in range(n_samples):
s = f()
stream_samples.append(s)
samples.append(numpy.array(stream_samples).T.flatten())
# next stream
curr_rstate = rng_mrg.ff_2p134(curr_rstate)
samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples))
def test_GPU_nstreams_limit():
"""
Verify that a ValueError is raised when n_streams
is greater than 2**20 on GPU. This is the value of
(NUM_VECTOR_OP_THREADS_PER_BLOCK * NUM_VECTOR_OP_BLOCKS).
"""
if not cuda_available:
raise SkipTest('Optional package cuda not available')
seed = 12345
R = MRG_RandomStreams(seed=seed, use_cuda=True)
def eval_uniform(size, nstreams):
if theano.config.mode == "FAST_COMPILE":
mode = "FAST_RUN"
else:
mode = copy.copy(theano.compile.get_default_mode())
mode.check_py_code = False
out = R.uniform(size=size, nstreams=nstreams, dtype='float32')
f = theano.function([], out, mode=mode)
return f()
eval_uniform((10,), 2**20)
assert_raises(ValueError, eval_uniform, (10,), 2**20 + 1)
def test_consistency_GPUA_serial():
"""
Verify that the random numbers generated by GPUA_mrg_uniform, serially,
are the same as the reference (Java) implementation by L'Ecuyer et al.
"""
from theano.sandbox.gpuarray.tests.test_basic_ops import \
mode_with_gpu as mode
from theano.sandbox.gpuarray.type import gpuarray_shared_constructor
seed = 12345
n_samples = 5
n_streams = 12
n_substreams = 7
samples = []
curr_rstate = numpy.array([seed] * 6, dtype='int32')
for i in range(n_streams):
stream_rstate = curr_rstate.copy()
for j in range(n_substreams):
substream_rstate = numpy.array([stream_rstate.copy()],
dtype='int32')
# Transfer to device
rstate = gpuarray_shared_constructor(substream_rstate)
new_rstate, sample = rng_mrg.GPUA_mrg_uniform.new(rstate,
ndim=None,
dtype='float32',
size=(1,))
rstate.default_update = new_rstate
# Not really necessary, just mimicking
# rng_mrg.MRG_RandomStreams' behavior
sample.rstate = rstate
sample.update = (rstate, new_rstate)
# We need the sample back in the main memory
cpu_sample = tensor.as_tensor_variable(sample)
f = theano.function([], cpu_sample, mode=mode)
for k in range(n_samples):
s = f()
samples.append(s)
# next substream
stream_rstate = rng_mrg.ff_2p72(stream_rstate)
# next stream
curr_rstate = rng_mrg.ff_2p134(curr_rstate)
samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples))
def test_consistency_GPUA_parallel():
"""
Verify that the random numbers generated by GPUA_mrg_uniform, in
parallel, are the same as the reference (Java) implementation by
L'Ecuyer et al.
"""
from theano.sandbox.gpuarray.tests.test_basic_ops import \
mode_with_gpu as mode
from theano.sandbox.gpuarray.type import gpuarray_shared_constructor
seed = 12345
n_samples = 5
n_streams = 12
n_substreams = 7 # 7 samples will be drawn in parallel
samples = []
curr_rstate = numpy.array([seed] * 6, dtype='int32')
for i in range(n_streams):
stream_samples = []
rstate = [curr_rstate.copy()]
for j in range(1, n_substreams):
rstate.append(rng_mrg.ff_2p72(rstate[-1]))
rstate = numpy.asarray(rstate)
rstate = gpuarray_shared_constructor(rstate)
new_rstate, sample = rng_mrg.GPUA_mrg_uniform.new(rstate, ndim=None,
dtype='float32',
size=(n_substreams,))
rstate.default_update = new_rstate
# Not really necessary, just mimicking
# rng_mrg.MRG_RandomStreams' behavior
sample.rstate = rstate
sample.update = (rstate, new_rstate)
# We need the sample back in the main memory
cpu_sample = tensor.as_tensor_variable(sample)
f = theano.function([], cpu_sample, mode=mode)
for k in range(n_samples):
s = f()
stream_samples.append(s)
samples.append(numpy.array(stream_samples).T.flatten())
# next stream
curr_rstate = rng_mrg.ff_2p134(curr_rstate)
samples = numpy.array(samples).flatten()
assert(numpy.allclose(samples, java_samples))
def basictest(f, steps, sample_size, prefix="", allow_01=False, inputs=None,
target_avg=0.5, target_std=None, mean_rtol=0.01, std_tol=0.01):
if inputs is None:
inputs = []
dt = 0.0
avg_var = 0.0
for i in xrange(steps):
t0 = time.time()
ival = f(*inputs)
assert ival.shape == sample_size
dt += time.time() - t0
ival = numpy.asarray(ival)
if i == 0:
mean = numpy.array(ival, copy=True)
avg_var = numpy.mean((ival - target_avg) ** 2)
min_ = ival.min()
max_ = ival.max()
else:
alpha = 1.0 / (1 + i)
mean = alpha * ival + (1 - alpha) * mean
avg_var = (alpha * numpy.mean((ival - target_avg) ** 2)
+ (1 - alpha) * avg_var)
min_ = min(min_, ival.min())
max_ = max(max_, ival.max())
if not allow_01:
assert min_ > 0
assert max_ < 1
if hasattr(target_avg, 'shape'): # looks if target_avg is an array
diff = numpy.mean(abs(mean - target_avg))
# print prefix, 'mean diff with mean', diff
assert numpy.all(diff < mean_rtol * (1 + abs(target_avg))), (
'bad mean? %s %s' % (mean, target_avg))
else: # if target_avg is a scalar, then we can do the mean of
# `mean` to get something more precise
mean = numpy.mean(mean)
# print prefix, 'mean', mean
assert abs(mean - target_avg) < mean_rtol * (1 + abs(target_avg)), (
'bad mean? %f %f' % (mean, target_avg))
std = numpy.sqrt(avg_var)
# print prefix, 'var', avg_var
# print prefix, 'std', std
if target_std is not None:
assert abs(std - target_std) < std_tol * (1 + abs(target_std)), (
'bad std? %f %f %f' % (std, target_std, std_tol))
# print prefix, 'time', dt
# print prefix, 'elements', steps * sample_size[0] * sample_size[1]
# print prefix, 'samples/sec', steps * sample_size[0] * sample_size[1] / dt
# print prefix, 'min', min_, 'max', max_
def test_uniform():
# TODO: test param low, high
# TODO: test size=None
# TODO: test ndim!=size.ndim
# TODO: test bad seed
# TODO: test size=Var, with shape that change from call to call
if (mode in ['DEBUG_MODE', 'DebugMode', 'FAST_COMPILE'] or
mode == 'Mode' and config.linker in ['py']):
sample_size = (10, 100)
steps = 50
else:
sample_size = (500, 50)
steps = int(1e3)
x = tensor.matrix()
for size, const_size, var_input, input in [
(sample_size, sample_size, [], []),
(x.shape, sample_size, [x],
[numpy.zeros(sample_size, dtype=config.floatX)]),
((x.shape[0], sample_size[1]), sample_size, [x],
[numpy.zeros(sample_size, dtype=config.floatX)]),
# test empty size (scalar)
((), (), [], []),
]:
#### TEST CPU IMPLEMENTATION ####
# The python and C implementation are tested with DebugMode
# print ''
# print 'ON CPU with size=(%s):' % str(size)
x = tensor.matrix()
R = MRG_RandomStreams(234, use_cuda=False)
# Note: we specify `nstreams` to avoid a warning.
# TODO Look for all occurrences of `guess_n_streams` and `30 * 256`
# for such situations: it would be better to instead filter the
# warning using the warning module.
u = R.uniform(size=size,
nstreams=rng_mrg.guess_n_streams(size, warn=False))
f = theano.function(var_input, u, mode=mode)
assert any([isinstance(node.op, theano.sandbox.rng_mrg.mrg_uniform)
for node in f.maker.fgraph.toposort()])
# theano.printing.debugprint(f)
cpu_out = f(*input)
# print 'CPU: random?[:10], random?[-10:]'
# print cpu_out[0, 0:10]
# print cpu_out[-1, -10:]
# Increase the number of steps if sizes implies only a few samples
if numpy.prod(const_size) < 10:
steps_ = steps * 100
else:
steps_ = steps
basictest(f, steps_, const_size, prefix='mrg cpu', inputs=input)
if mode != 'FAST_COMPILE' and cuda_available:
# print ''
# print 'ON GPU with size=(%s):' % str(size)
R = MRG_RandomStreams(234, use_cuda=True)
u = R.uniform(size=size, dtype='float32',
nstreams=rng_mrg.guess_n_streams(size, warn=False))
# well, it's really that this test w GPU doesn't make sense otw
assert u.dtype == 'float32'
f = theano.function(var_input, theano.Out(
theano.sandbox.cuda.basic_ops.gpu_from_host(u),
borrow=True), mode=mode_with_gpu)
assert any([isinstance(node.op,
theano.sandbox.rng_mrg.GPU_mrg_uniform)
for node in f.maker.fgraph.toposort()])
# theano.printing.debugprint(f)
gpu_out = numpy.asarray(f(*input))
# print 'GPU: random?[:10], random?[-10:]'
# print gpu_out[0, 0:10]
# print gpu_out[-1, -10:]
basictest(f, steps_, const_size, prefix='mrg gpu', inputs=input)
numpy.testing.assert_array_almost_equal(cpu_out, gpu_out,
decimal=6)
# print ''
# print 'ON CPU w Numpy with size=(%s):' % str(size)
RR = theano.tensor.shared_randomstreams.RandomStreams(234)
uu = RR.uniform(size=size)
ff = theano.function(var_input, uu, mode=mode)
# It's not our problem if numpy generates 0 or 1
basictest(ff, steps_, const_size, prefix='numpy',
allow_01=True, inputs=input)
@attr('slow')
def test_binomial():
# TODO: test size=None, ndim=X
# TODO: test size=X, ndim!=X.ndim
# TODO: test random seed in legal value(!=0 and other)
# TODO: test sample_size not a multiple of guessed #streams
# TODO: test size=Var, with shape that change from call to call
# we test size in a tuple of int and a tensor.shape.
# we test the param p with int.
if (mode in ['DEBUG_MODE', 'DebugMode', 'FAST_COMPILE'] or
mode == 'Mode' and config.linker in ['py']):
sample_size = (10, 50)
steps = 50
rtol = 0.02
else:
sample_size = (500, 50)
steps = int(1e3)
rtol = 0.01
x = tensor.matrix()
v = tensor.vector()
for mean in [0.1, 0.5]:
for size, const_size, var_input, input in [
(sample_size, sample_size, [], []),
(x.shape, sample_size, [x],
[numpy.zeros(sample_size, dtype=config.floatX)]),
((x.shape[0], sample_size[1]), sample_size, [x],
[numpy.zeros(sample_size, dtype=config.floatX)]),
# test empty size (scalar)
((), (), [], []),
]:
yield (t_binomial, mean, size, const_size, var_input, input,
steps, rtol)
def t_binomial(mean, size, const_size, var_input, input, steps, rtol):
R = MRG_RandomStreams(234, use_cuda=False)
u = R.binomial(size=size, p=mean)
f = theano.function(var_input, u, mode=mode)
out = f(*input)
# Increase the number of steps if sizes implies only a few samples
if numpy.prod(const_size) < 10:
steps_ = steps * 100
else:
steps_ = steps
basictest(f, steps_, const_size, prefix='mrg cpu',
inputs=input, allow_01=True,
target_avg=mean, mean_rtol=rtol)
if mode != 'FAST_COMPILE' and cuda_available:
R = MRG_RandomStreams(234, use_cuda=True)
u = R.binomial(size=size, p=mean, dtype='float32')
# well, it's really that this test w GPU doesn't make sense otw
assert u.dtype == 'float32'
f = theano.function(var_input, theano.Out(
theano.sandbox.cuda.basic_ops.gpu_from_host(u),
borrow=True), mode=mode_with_gpu)
gpu_out = numpy.asarray(f(*input))
basictest(f, steps_, const_size, prefix='mrg gpu',
inputs=input, allow_01=True,
target_avg=mean, mean_rtol=rtol)
numpy.testing.assert_array_almost_equal(out, gpu_out,
decimal=6)
RR = theano.tensor.shared_randomstreams.RandomStreams(234)
uu = RR.binomial(size=size, p=mean)
ff = theano.function(var_input, uu, mode=mode)
# It's not our problem if numpy generates 0 or 1
basictest(ff, steps_, const_size, prefix='numpy', allow_01=True,
inputs=input, target_avg=mean, mean_rtol=rtol)
@attr('slow')
def test_normal0():
steps = 50
std = 2.
if (mode in ['DEBUG_MODE', 'DebugMode', 'FAST_COMPILE'] or
mode == 'Mode' and config.linker in ['py']):
sample_size = (25, 30)
default_rtol = .02
else:
sample_size = (999, 50)
default_rtol = .01
sample_size_odd = (sample_size[0], sample_size[1] - 1)
x = tensor.matrix()
for size, const_size, var_input, input, avg, rtol, std_tol in [
(sample_size, sample_size, [], [], -5., default_rtol, default_rtol),
(x.shape, sample_size, [x],
[numpy.zeros(sample_size, dtype=config.floatX)],
-5., default_rtol, default_rtol),
((x.shape[0], sample_size[1]), sample_size, [x],
[numpy.zeros(sample_size, dtype=config.floatX)],
-5., default_rtol, default_rtol),
# test odd value
(sample_size_odd, sample_size_odd, [], [], -5.,
default_rtol, default_rtol),
# test odd value
(x.shape, sample_size_odd, [x],
[numpy.zeros(sample_size_odd, dtype=config.floatX)],
-5., default_rtol, default_rtol),
(sample_size, sample_size, [], [],
numpy.arange(numpy.prod(sample_size),
dtype='float32').reshape(sample_size),
10. * std / numpy.sqrt(steps), default_rtol),
# test empty size (scalar)
((), (), [], [], -5., default_rtol, 0.02),
# test with few samples at the same time
((1,), (1,), [], [], -5., default_rtol, 0.02),
((2,), (2,), [], [], -5., default_rtol, 0.02),
((3,), (3,), [], [], -5., default_rtol, 0.02),
]:
# print ''
# print 'ON CPU:'
R = MRG_RandomStreams(234, use_cuda=False)
# Note: we specify `nstreams` to avoid a warning.
n = R.normal(size=size, avg=avg, std=std,
nstreams=rng_mrg.guess_n_streams(size, warn=False))
f = theano.function(var_input, n, mode=mode)
# theano.printing.debugprint(f)
out = f(*input)
# print 'random?[:10]\n', out[0, 0:10]
# Increase the number of steps if size implies only a few samples
if numpy.prod(const_size) < 10:
steps_ = steps * 50
else:
steps_ = steps
basictest(f, steps_, const_size, target_avg=avg, target_std=std,
prefix='mrg ', allow_01=True, inputs=input,
mean_rtol=rtol, std_tol=std_tol)
sys.stdout.flush()
if mode != 'FAST_COMPILE' and cuda_available:
# print ''
# print 'ON GPU:'
R = MRG_RandomStreams(234, use_cuda=True)
n = R.normal(size=size, avg=avg, std=std, dtype='float32',
nstreams=rng_mrg.guess_n_streams(size, warn=False))
# well, it's really that this test w GPU doesn't make sense otw
assert n.dtype == 'float32'
f = theano.function(var_input, theano.Out(
theano.sandbox.cuda.basic_ops.gpu_from_host(n),
borrow=True), mode=mode_with_gpu)
# theano.printing.debugprint(f)
sys.stdout.flush()
gpu_out = numpy.asarray(f(*input))
# print 'random?[:10]\n', gpu_out[0, 0:10]
# print '----'
sys.stdout.flush()
basictest(f, steps_, const_size, target_avg=avg, target_std=std,
prefix='gpu mrg ', allow_01=True, inputs=input,
mean_rtol=rtol, std_tol=std_tol)
# Need to allow some rounding error as their is float
# computation that are done on the gpu vs cpu
assert numpy.allclose(out, gpu_out, rtol=5e-6, atol=5e-6)
# print ''
# print 'ON CPU w NUMPY:'
RR = theano.tensor.shared_randomstreams.RandomStreams(234)
nn = RR.normal(size=size, avg=avg, std=std)
ff = theano.function(var_input, nn)
basictest(ff, steps_, const_size, target_avg=avg, target_std=std,
prefix='numpy ', allow_01=True, inputs=input, mean_rtol=rtol)
def basic_multinomialtest(f, steps, sample_size, target_pvals,
prefix="", mean_rtol=0.04):
dt = 0.0
avg_pvals = numpy.zeros(target_pvals.shape, dtype=config.floatX)
for i in xrange(steps):
t0 = time.time()
ival = f()
assert ival.shape == sample_size
dt += time.time() - t0
#ival = numpy.asarray(ival)
avg_pvals += ival
avg_pvals /= steps
print('random?[:10]\n', numpy.asarray(f()[:10]))
print(prefix, 'mean', avg_pvals)
# < mean_rtol, 'bad mean? %s %s' % (str(avg_pvals), str(target_pvals))
print(numpy.mean(abs(avg_pvals - target_pvals)))
print(prefix, 'time', dt)
print(prefix, 'elements', steps * numpy.prod(target_pvals.shape))
print(prefix, 'samples/sec', steps * numpy.prod(target_pvals.shape) / dt)
def test_multinomial():
steps = 100
mode_ = mode
if mode == 'FAST_COMPILE':
mode_ = 'FAST_RUN'
if (mode in ['DEBUG_MODE', 'DebugMode', 'FAST_COMPILE'] or
mode == 'Mode' and config.linker in ['py']):
sample_size = (49, 5)
else:
sample_size = (450, 6)
mode_ = theano.compile.mode.get_mode(mode_)
# print ''
# print 'ON CPU:'
pvals = numpy.asarray(numpy.random.uniform(size=sample_size))
pvals = numpy.apply_along_axis(lambda row: row / numpy.sum(row), 1, pvals)
R = MRG_RandomStreams(234, use_cuda=False)
# Note: we specify `nstreams` to avoid a warning.
m = R.multinomial(pvals=pvals, dtype=config.floatX, nstreams=30 * 256)
f = theano.function([], m, mode=mode_)
# theano.printing.debugprint(f)
out = f()
basic_multinomialtest(f, steps, sample_size, pvals, prefix='mrg ')
sys.stdout.flush()
if mode != 'FAST_COMPILE' and cuda_available:
# print ''
# print 'ON GPU:'
R = MRG_RandomStreams(234, use_cuda=True)
pvals = numpy.asarray(pvals, dtype='float32')
# We give the number of streams to avoid a warning.
n = R.multinomial(pvals=pvals, dtype='float32', nstreams=30 * 256)
# well, it's really that this test w GPU doesn't make sense otw
assert n.dtype == 'float32'
f = theano.function(
[],
theano.sandbox.cuda.basic_ops.gpu_from_host(n),
mode=mode_.including('gpu'))
# theano.printing.debugprint(f)
gpu_out = f()
sys.stdout.flush()
basic_multinomialtest(f, steps, sample_size, pvals, prefix='gpu mrg ')
numpy.testing.assert_array_almost_equal(out, gpu_out, decimal=6)
class T_MRG(unittest.TestCase):
def test_bad_size(self):
R = MRG_RandomStreams(234, use_cuda=False)
for size in [
(0, 100),
(-1, 100),
(1, 0),
]:
self.assertRaises(ValueError, R.uniform, size)
self.assertRaises(ValueError, R.binomial, size)
self.assertRaises(ValueError, R.multinomial, size, 1, [])
self.assertRaises(ValueError, R.normal, size)
def test_multiple_rng_aliasing():
"""
Test that when we have multiple random number generators, we do not alias
the state_updates member. `state_updates` can be useful when attempting to
copy the (random) state between two similar theano graphs. The test is
meant to detect a previous bug where state_updates was initialized as a
class-attribute, instead of the __init__ function.
"""
rng1 = MRG_RandomStreams(1234)
rng2 = MRG_RandomStreams(2392)
assert rng1.state_updates is not rng2.state_updates
def test_random_state_transfer():
"""
Test that random state can be transferred from one theano graph to another.
"""
class Graph:
def __init__(self, seed=123):
self.rng = MRG_RandomStreams(seed)
self.y = self.rng.uniform(size=(1,))
g1 = Graph(seed=123)
f1 = theano.function([], g1.y)
g2 = Graph(seed=987)
f2 = theano.function([], g2.y)
g2.rng.rstate = g1.rng.rstate
for (su1, su2) in zip(g1.rng.state_updates, g2.rng.state_updates):
su2[0].set_value(su1[0].get_value())
numpy.testing.assert_array_almost_equal(f1(), f2(), decimal=6)
def test_gradient_scan():
# Test for a crash when using MRG inside scan and taking the gradient
# See https://groups.google.com/d/msg/theano-dev/UbcYyU5m-M8/UO9UgXqnQP0J
theano_rng = MRG_RandomStreams(10)
w = theano.shared(numpy.ones(1, dtype='float32'))
def one_step(x):
return x + theano_rng.uniform((1,), dtype='float32') * w
x = tensor.vector(dtype='float32')
values, updates = theano.scan(one_step, outputs_info=x, n_steps=10)
gw = theano.grad(tensor.sum(values[-1]), w)
f = theano.function([x], gw)
f(numpy.arange(1, dtype='float32'))
def test_multMatVect():
A1 = tensor.lmatrix('A1')
s1 = tensor.ivector('s1')
m1 = tensor.iscalar('m1')
A2 = tensor.lmatrix('A2')
s2 = tensor.ivector('s2')
m2 = tensor.iscalar('m2')
g0 = rng_mrg.DotModulo()(A1, s1, m1, A2, s2, m2)
f0 = theano.function([A1, s1, m1, A2, s2, m2], g0)
i32max = numpy.iinfo(numpy.int32).max
A1 = numpy.random.randint(0, i32max, (3, 3)).astype('int64')
s1 = numpy.random.randint(0, i32max, 3).astype('int32')
m1 = numpy.asarray(numpy.random.randint(i32max), dtype="int32")
A2 = numpy.random.randint(0, i32max, (3, 3)).astype('int64')
s2 = numpy.random.randint(0, i32max, 3).astype('int32')
m2 = numpy.asarray(numpy.random.randint(i32max), dtype="int32")
f0.input_storage[0].storage[0] = A1
f0.input_storage[1].storage[0] = s1
f0.input_storage[2].storage[0] = m1
f0.input_storage[3].storage[0] = A2
f0.input_storage[4].storage[0] = s2
f0.input_storage[5].storage[0] = m2
r_a1 = rng_mrg.matVecModM(A1, s1, m1)
r_a2 = rng_mrg.matVecModM(A2, s2, m2)
f0.fn()
r_b = f0.output_storage[0].value
assert numpy.allclose(r_a1, r_b[:3])
assert numpy.allclose(r_a2, r_b[3:])
def test_seed_fn():
test_use_cuda = [False]
if cuda_available:
test_use_cuda.append(True)
idx = tensor.ivector()
for use_cuda in test_use_cuda:
if config.mode == 'FAST_COMPILE' and use_cuda:
mode = 'FAST_RUN'
else:
mode = config.mode
for new_seed, same in [(234, True), (None, True), (23, False)]:
random = MRG_RandomStreams(234, use_cuda=use_cuda)
fn1 = theano.function([], random.uniform((2, 2), dtype='float32'),
mode=mode)
fn2 = theano.function([], random.uniform((3, 3), nstreams=2,
dtype='float32'),
mode=mode)
fn3 = theano.function([idx],
random.uniform(idx, nstreams=3, ndim=1,
dtype='float32'),
mode=mode)
fn1_val0 = fn1()
fn1_val1 = fn1()
assert not numpy.allclose(fn1_val0, fn1_val1)
fn2_val0 = fn2()
fn2_val1 = fn2()
assert not numpy.allclose(fn2_val0, fn2_val1)
fn3_val0 = fn3([4])
fn3_val1 = fn3([4])
assert not numpy.allclose(fn3_val0, fn3_val1)
assert fn1_val0.size == 4
assert fn2_val0.size == 9
random.seed(new_seed)
fn1_val2 = fn1()
fn1_val3 = fn1()
fn2_val2 = fn2()
fn2_val3 = fn2()
fn3_val2 = fn3([4])
fn3_val3 = fn3([4])
assert numpy.allclose(fn1_val0, fn1_val2) == same
assert numpy.allclose(fn1_val1, fn1_val3) == same
assert numpy.allclose(fn2_val0, fn2_val2) == same
assert numpy.allclose(fn2_val1, fn2_val3) == same
assert numpy.allclose(fn3_val0, fn3_val2) == same
assert numpy.allclose(fn3_val1, fn3_val3) == same
if __name__ == "__main__":
rng = MRG_RandomStreams(numpy.random.randint(2147462579))
import time
print(theano.__file__)
pvals = theano.tensor.fmatrix()
for i in range(10):
t0 = time.time()
multinomial = rng.multinomial(pvals=pvals)
print(time.time() - t0)
|
{
"content_hash": "2ba8d083669993ddbed24d843ad009d2",
"timestamp": "",
"source": "github",
"line_count": 1011,
"max_line_length": 79,
"avg_line_length": 35.56478733926805,
"alnum_prop": 0.5646345533429747,
"repo_name": "cmdunkers/DeeperMind",
"id": "ee064447cf6a382182dff9c1d91d459641ec04fc",
"size": "35956",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PythonEnv/lib/python2.7/site-packages/theano/sandbox/tests/test_rng_mrg.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "653032"
},
{
"name": "C++",
"bytes": "3354338"
},
{
"name": "Cuda",
"bytes": "538188"
},
{
"name": "FORTRAN",
"bytes": "10375"
},
{
"name": "HTML",
"bytes": "124328"
},
{
"name": "Makefile",
"bytes": "214"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "22186197"
},
{
"name": "Shell",
"bytes": "4377"
}
],
"symlink_target": ""
}
|
from trac.ticket.model import Ticket
from trac.core import Component, implements, TracError, ExtensionPoint
from trac.perm import IPermissionPolicy, IPermissionGroupProvider, PermissionSystem
from trac.util import as_bool
class SecretCheckboxTicketPolicy(Component):
implements(IPermissionPolicy)
def check_permission(self, action, user, res, perm):
if 'TICKET_VIEW' != action:
return None
while res:
if 'ticket' == res.realm:
break
res = res.parent
if res and res.id and 'ticket' == res.realm:
return self.check_ticket_access(perm, res)
def check_ticket_access(self, perm, res):
try:
if 'TRAC_ADMIN' in perm:
return None
ticket = Ticket(self.env, res.id)
if False == as_bool(ticket['secret']):
return None
username = perm.username.lower()
if ticket['reporter'].lower() == username:
return None
if ticket['owner'].lower() == username:
return None
cc_list = [cc.strip().lower() for cc in ticket['cc'].split(',')]
if username in cc_list:
return None
return False
except TracError as e:
self.log.error(e.message)
|
{
"content_hash": "18d879bddb04da847c7a32d0a2b77a32",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 83,
"avg_line_length": 31.069767441860463,
"alnum_prop": 0.5718562874251497,
"repo_name": "104corp/trac-secret-checkbox-ticket",
"id": "1679068a5c17dadbee28bfed9e2f91a72a54ac34",
"size": "1578",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "secretcheckboxticket/policy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2818"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_virtualmachineextension_info
version_added: "2.9"
short_description: Get Azure Virtual Machine Extension facts
description:
- Get facts of Azure Virtual Machine Extension.
options:
resource_group:
description:
- The name of the resource group.
required: True
virtual_machine_name:
description:
- The name of the virtual machine containing the extension.
required: True
name:
description:
- The name of the virtual machine extension.
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Get information on specific Virtual Machine Extension
azure_rm_virtualmachineextension_info:
resource_group: myResourceGroup
virtual_machine_name: myvm
name: myextension
- name: List installed Virtual Machine Extensions
azure_rm_virtualmachineextension_info:
resource_group: myResourceGroup
virtual_machine_name: myvm
'''
RETURN = '''
extensions:
description:
- A list of dictionaries containing facts for Virtual Machine Extension.
returned: always
type: complex
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Compute/myvm/testVM/extens
ions/myextension"
resource_group:
description:
- Resource group name.
returned: always
type: str
sample: myResourceGroup
virtual_machine_name:
description:
- Virtual machine name.
returned: always
type: str
sample: myvm
name:
description:
- Virtual machine name.
returned: always
type: str
sample: myextension
location:
description:
- The resource location.
returned: always
type: str
sample: eastus
publisher:
description:
- Extension publisher.
returned: always
type: str
sample: Microsoft.Azure.Extensions
type:
description:
- Extension type.
returned: always
type: str
sample: CustomScript
settings:
description:
- Extension specific settings dictionary.
returned: always
type: dict
sample: { 'commandToExecute':'hostname' }
auto_upgrade_minor_version:
description:
- Autoupgrade minor version flag.
returned: always
type: bool
sample: true
tags:
description:
- Resource tags.
returned: always
type: dict
sample: { "mytag":"abc" }
provisioning_state:
description:
- Provisioning state of the extension.
returned: always
type: str
sample: Succeeded
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMVirtualMachineExtensionInfo(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
virtual_machine_name=dict(
type='str',
required=True
),
name=dict(
type='str'
),
tags=dict(
type='list'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.resource_group = None
self.virtual_machine_name = None
self.name = None
self.tags = None
super(AzureRMVirtualMachineExtensionInfo, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_virtualmachineextension_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_virtualmachineextension_facts' module has been renamed to 'azure_rm_virtualmachineextension_info'",
version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if self.name is not None:
self.results['extensions'] = self.get_extensions()
else:
self.results['extensions'] = self.list_extensions()
return self.results
def get_extensions(self):
response = None
results = []
try:
response = self.compute_client.virtual_machine_extensions.get(resource_group_name=self.resource_group,
vm_name=self.virtual_machine_name,
vm_extension_name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for Virtual Machine Extension.')
if response and self.has_tags(response.tags, self.tags):
results.append(self.format_response(response))
return results
def list_extensions(self):
response = None
results = []
try:
response = self.compute_client.virtual_machine_extensions.list(resource_group_name=self.resource_group,
vm_name=self.virtual_machine_name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for Virtual Machine Extension.')
if response is not None and response.value is not None:
for item in response.value:
if self.has_tags(item.tags, self.tags):
results.append(self.format_response(item))
return results
def format_response(self, item):
d = item.as_dict()
d = {
'id': d.get('id', None),
'resource_group': self.resource_group,
'virtual_machine_name': self.virtual_machine_name,
'location': d.get('location'),
'name': d.get('name'),
'publisher': d.get('publisher'),
'type': d.get('virtual_machine_extension_type'),
'settings': d.get('settings'),
'auto_upgrade_minor_version': d.get('auto_upgrade_minor_version'),
'tags': d.get('tags', None),
'provisioning_state': d.get('provisioning_state')
}
return d
def main():
AzureRMVirtualMachineExtensionInfo()
if __name__ == '__main__':
main()
|
{
"content_hash": "7a4de187b82e72427316baf8b703026a",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 150,
"avg_line_length": 31.483471074380166,
"alnum_prop": 0.5529597059981625,
"repo_name": "thaim/ansible",
"id": "de9d7975c20466abffa39c0f7fe4bfcad75abb6d",
"size": "7784",
"binary": false,
"copies": "19",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/cloud/azure/azure_rm_virtualmachineextension_info.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
}
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SurveyQuestionResponse.positive_response'
db.add_column(u'survey_surveyquestionresponse', 'positive_response',
self.gf('django.db.models.fields.NullBooleanField')(default=None, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SurveyQuestionResponse.positive_response'
db.delete_column(u'survey_surveyquestionresponse', 'positive_response')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'clinics.clinic': {
'Meta': {'object_name': 'Clinic'},
'code': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lga': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'lga_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'pbf_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'town': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ward': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'clinics.clinicstaff': {
'Meta': {'object_name': 'ClinicStaff'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'staff_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'year_started': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
},
u'clinics.patient': {
'Meta': {'unique_together': "[('clinic', 'serial')]", 'object_name': 'Patient'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'serial': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'clinics.service': {
'Meta': {'object_name': 'Service'},
'code': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'clinics.visit': {
'Meta': {'object_name': 'Visit'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'patient': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Patient']"}),
'satisfied': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'sender': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Service']", 'null': 'True', 'blank': 'True'}),
'staff': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.ClinicStaff']", 'null': 'True', 'blank': 'True'}),
'survey_completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'survey_sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'survey_started': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'visit_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'welcome_sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'rapidsms.contact': {
'Meta': {'object_name': 'Contact'},
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'survey.displaylabel': {
'Meta': {'object_name': 'DisplayLabel'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'survey.survey': {
'Meta': {'object_name': 'Survey'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'flow_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'max_length': '32'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'survey.surveyquestion': {
'Meta': {'unique_together': "[('survey', 'label')]", 'object_name': 'SurveyQuestion'},
'categories': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'display_label': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.DisplayLabel']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'last_negative': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'question_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'question_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'survey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Survey']"})
},
u'survey.surveyquestionresponse': {
'Meta': {'unique_together': "[('visit', 'question')]", 'object_name': 'SurveyQuestionResponse'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'display_on_dashboard': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'positive_response': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.SurveyQuestion']"}),
'response': ('django.db.models.fields.TextField', [], {}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Service']", 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'visit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Visit']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['survey']
|
{
"content_hash": "90d329f4d62772388810abddd6d12306",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 195,
"avg_line_length": 78.03030303030303,
"alnum_prop": 0.5511456310679612,
"repo_name": "myvoice-nigeria/myvoice",
"id": "dada04aa4ce6d6f6e64d8a9a4b9dce4006a272e7",
"size": "12899",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "myvoice/survey/migrations/0013_auto__add_field_surveyquestionresponse_positive_response.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "82690"
},
{
"name": "Elixir",
"bytes": "391"
},
{
"name": "HTML",
"bytes": "79449"
},
{
"name": "JavaScript",
"bytes": "683534"
},
{
"name": "Python",
"bytes": "1279764"
},
{
"name": "Scheme",
"bytes": "3876"
},
{
"name": "Shell",
"bytes": "1263"
}
],
"symlink_target": ""
}
|
"""
command_line_runner.py
Created by Scott on 2014-08-14.
Copyright (c) 2014 Scott Rice. All rights reserved.
"""
import argparse
from pysteam.steam import get_steam, Steam
import tasks
from ice import decorators
from ice import debug
from ice import settings
from ice.logs import logger
from ice.filesystem import RealFilesystem
from ice.tasks import TaskEngine
def handle_exception(e, fatal):
# Just log it
if fatal:
logger.exception("An exception occurred while running Ice")
else:
logger.error(e.message)
class CommandLineRunner(object):
def __init__(self, steam=None, filesystem=None):
self.steam = steam
self.filesystem = RealFilesystem() if filesystem is None else filesystem
def get_command_line_args(self, argv):
parser = argparse.ArgumentParser()
parser.add_argument('pdebug', type=bool, nargs='?', help="Pastes debug logs to pastebin to include with bug reports.")
parser.add_argument('--skip-steam-check', action='store_true', help="Skips checking whether Steam is running")
parser.add_argument('--launch-steam', action='store_true', help="Launches Steam after the shortcuts have been synced and its safe to do so")
# Config options
parser.add_argument('-c', '--config', type=str, default=None)
parser.add_argument('-C', '--consoles', type=str, default=None)
parser.add_argument('-e', '--emulators', type=str, default=None)
# Debugging options
parser.add_argument('-d', '--dry-run', action='store_true')
return parser.parse_args(argv)
def should_use_user_override(self, override):
if override is None:
return False
if override == "":
return False
if not self.filesystem.path_exists(override):
logger.warning("config.txt specifies a Steam userdata directory that doesn't exist. Ignoring.")
return False
return False
def get_steam(self, config):
override = config.userdata_directory
if self.should_use_user_override(override):
return Steam(override)
if self.steam is not None:
return self.steam
return get_steam()
@decorators.catch_exceptions(handle_exception)
def run(self, argv):
opts = self.get_command_line_args(argv[1:])
if opts.pdebug is True:
debug.paste_debug_logs()
return
task_coordinator = tasks.TaskCoordinator(self.filesystem)
app_settings = settings.load_app_settings(self.filesystem, file_overrides = {
'config.txt': opts.config,
'consoles.txt': opts.consoles,
'emulators.txt': opts.emulators,
})
engine = TaskEngine(
self.get_steam(app_settings.config),
)
tasks_to_run = task_coordinator.tasks_for_options(
launch_steam = opts.launch_steam,
skip_steam_check = opts.skip_steam_check,
)
engine.run(
tasks = tasks_to_run,
app_settings = app_settings,
dry_run=opts.dry_run
)
|
{
"content_hash": "c78042c343db84d363313452ee66d8a4",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 144,
"avg_line_length": 30,
"alnum_prop": 0.6895833333333333,
"repo_name": "scottrice/Ice",
"id": "c431470d4df534dfb99adef875135f74440cbad6",
"size": "2880",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ice/cli/runner.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "140642"
}
],
"symlink_target": ""
}
|
'''
Created on 08.02.2016.
@author: Lazar
'''
from textx.exceptions import TextXSemanticError
def selector_object_processor(selector_object):
if selector_object.property not in selector_object.object.properties:
line, col = selector_object._tx_metamodel.parser.pos_to_linecol(
selector_object._tx_position)
raise TextXSemanticError("ERROR: (at %d, %d) Object %s has no property named %s." %
(line, col, selector_object.object.name, selector_object.property.name))
else:
return True
class SelectorObject(object):
'''
classdocs
'''
def __init__(self, object, property, parent, query=None):
self.object = object
self.property = property
self.query = query
self.parent = parent
self.name = object.obj_name
def accept(self, visitor):
return visitor.visit_selector_object(self.object, self.property)
|
{
"content_hash": "3af41523d670b8a943e81ede1904bdeb",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 105,
"avg_line_length": 30.70967741935484,
"alnum_prop": 0.6386554621848739,
"repo_name": "theshammy/GenAn",
"id": "14a8db1d6581a44cb18ff8a6d176ccd91bc29b5d",
"size": "952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/concepts/selector_object.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2122"
},
{
"name": "HTML",
"bytes": "19036"
},
{
"name": "JavaScript",
"bytes": "15499"
},
{
"name": "Python",
"bytes": "62881"
}
],
"symlink_target": ""
}
|
"""Registers functions to be called if an exception or signal occurs."""
import functools
import logging
import signal
import traceback
from types import TracebackType
from typing import Any
from typing import Callable
from typing import Dict
from typing import List
from typing import Optional
from typing import Type
from typing import Union
from certbot import errors
from certbot.compat import os
logger = logging.getLogger(__name__)
# _SIGNALS stores the signals that will be handled by the ErrorHandler. These
# signals were chosen as their default handler terminates the process and could
# potentially occur from inside Python. Signals such as SIGILL were not
# included as they could be a sign of something devious and we should terminate
# immediately.
if os.name != "nt":
_SIGNALS = [signal.SIGTERM]
for signal_code in [signal.SIGHUP, signal.SIGQUIT,
signal.SIGXCPU, signal.SIGXFSZ]:
# Adding only those signals that their default action is not Ignore.
# This is platform-dependent, so we check it dynamically.
if signal.getsignal(signal_code) != signal.SIG_IGN:
_SIGNALS.append(signal_code)
else:
# POSIX signals are not implemented natively in Windows, but emulated from the C runtime.
# As consumed by CPython, most of handlers on theses signals are useless, in particular
# SIGTERM: for instance, os.kill(pid, signal.SIGTERM) will call TerminateProcess, that stops
# immediately the process without calling the attached handler. Besides, non-POSIX signals
# (CTRL_C_EVENT and CTRL_BREAK_EVENT) are implemented in a console context to handle the
# CTRL+C event to a process launched from the console. Only CTRL_C_EVENT has a reliable
# behavior in fact, and maps to the handler to SIGINT. However in this case, a
# KeyboardInterrupt is raised, that will be handled by ErrorHandler through the context manager
# protocol. Finally, no signal on Windows is electable to be handled using ErrorHandler.
#
# Refs: https://stackoverflow.com/a/35792192, https://maruel.ca/post/python_windows_signal,
# https://docs.python.org/2/library/os.html#os.kill,
# https://www.reddit.com/r/Python/comments/1dsblt/windows_command_line_automation_ctrlc_question
_SIGNALS = []
class ErrorHandler:
"""Context manager for running code that must be cleaned up on failure.
The context manager allows you to register functions that will be called
when an exception (excluding SystemExit) or signal is encountered.
Usage::
handler = ErrorHandler(cleanup1_func, *cleanup1_args, **cleanup1_kwargs)
handler.register(cleanup2_func, *cleanup2_args, **cleanup2_kwargs)
with handler:
do_something()
Or for one cleanup function::
with ErrorHandler(func, args, kwargs):
do_something()
If an exception is raised out of do_something, the cleanup functions will
be called in last in first out order. Then the exception is raised.
Similarly, if a signal is encountered, the cleanup functions are called
followed by the previously received signal handler.
Each registered cleanup function is called exactly once. If a registered
function raises an exception, it is logged and the next function is called.
Signals received while the registered functions are executing are
deferred until they finish.
"""
def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
self.call_on_regular_exit = False
self.body_executed = False
self.funcs: List[Callable[[], Any]] = []
self.prev_handlers: Dict[int, Union[int, None, Callable]] = {}
self.received_signals: List[int] = []
if func is not None:
self.register(func, *args, **kwargs)
def __enter__(self) -> None:
self.body_executed = False
self._set_signal_handlers()
def __exit__(self, exec_type: Optional[Type[BaseException]],
exec_value: Optional[BaseException],
trace: Optional[TracebackType]) -> bool:
self.body_executed = True
retval = False
# SystemExit is ignored to properly handle forks that don't exec
if exec_type is SystemExit:
return retval
if exec_type is None:
if not self.call_on_regular_exit:
return retval
elif exec_type is errors.SignalExit:
logger.debug("Encountered signals: %s", self.received_signals)
retval = True
else:
logger.debug("Encountered exception:\n%s", "".join(
traceback.format_exception(exec_type, exec_value, trace)))
self._call_registered()
self._reset_signal_handlers()
self._call_signals()
return retval
def register(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
"""Sets func to be run with the given arguments during cleanup.
:param function func: function to be called in case of an error
"""
self.funcs.append(functools.partial(func, *args, **kwargs))
def _call_registered(self) -> None:
"""Calls all registered functions"""
logger.debug("Calling registered functions")
while self.funcs:
try:
self.funcs[-1]()
except Exception as exc: # pylint: disable=broad-except
output = traceback.format_exception_only(type(exc), exc)
logger.error("Encountered exception during recovery: %s",
''.join(output).rstrip())
self.funcs.pop()
def _set_signal_handlers(self) -> None:
"""Sets signal handlers for signals in _SIGNALS."""
for signum in _SIGNALS:
prev_handler = signal.getsignal(signum)
# If prev_handler is None, the handler was set outside of Python
if prev_handler is not None:
self.prev_handlers[signum] = prev_handler
signal.signal(signum, self._signal_handler)
def _reset_signal_handlers(self) -> None:
"""Resets signal handlers for signals in _SIGNALS."""
for signum, handler in self.prev_handlers.items():
signal.signal(signum, handler)
self.prev_handlers.clear()
def _signal_handler(self, signum: int, unused_frame: Any) -> None:
"""Replacement function for handling received signals.
Store the received signal. If we are executing the code block in
the body of the context manager, stop by raising signal exit.
:param int signum: number of current signal
"""
self.received_signals.append(signum)
if not self.body_executed:
raise errors.SignalExit
def _call_signals(self) -> None:
"""Finally call the deferred signals."""
for signum in self.received_signals:
logger.debug("Calling signal %s", signum)
os.kill(os.getpid(), signum)
class ExitHandler(ErrorHandler):
"""Context manager for running code that must be cleaned up.
Subclass of ErrorHandler, with the same usage and parameters.
In addition to cleaning up on all signals, also cleans up on
regular exit.
"""
def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
super().__init__(func, *args, **kwargs)
self.call_on_regular_exit = True
|
{
"content_hash": "9b96fca3403636d160d91f40cd62d757",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 100,
"avg_line_length": 41.4,
"alnum_prop": 0.6600912506709609,
"repo_name": "lmcro/letsencrypt",
"id": "24ab46d9d19b583937dc5bdd7bdad6d44b9bdcf9",
"size": "7452",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "certbot/certbot/_internal/error_handler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "14147"
},
{
"name": "Augeas",
"bytes": "4731"
},
{
"name": "Batchfile",
"bytes": "35037"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Makefile",
"bytes": "37309"
},
{
"name": "Nginx",
"bytes": "4274"
},
{
"name": "Python",
"bytes": "1225979"
},
{
"name": "Shell",
"bytes": "26934"
}
],
"symlink_target": ""
}
|
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.views.generic import CreateView, ListView, UpdateView
from pretix.base.models import Organizer, OrganizerPermission
from pretix.control.forms.organizer import OrganizerForm, OrganizerUpdateForm
from pretix.control.permissions import OrganizerPermissionRequiredMixin
class OrganizerList(ListView):
model = Organizer
context_object_name = 'organizers'
template_name = 'pretixcontrol/organizers/index.html'
paginate_by = 30
def get_queryset(self):
if self.request.user.is_superuser:
return Organizer.objects.current.all()
else:
return Organizer.objects.current.filter(
permitted__id__exact=self.request.user.pk
)
class OrganizerUpdate(OrganizerPermissionRequiredMixin, UpdateView):
model = Organizer
form_class = OrganizerUpdateForm
template_name = 'pretixcontrol/organizers/detail.html'
permission = None
context_object_name = 'organizer'
def get_object(self, queryset=None) -> Organizer:
return self.request.organizer
def form_valid(self, form):
messages.success(self.request, _('Your changes have been saved.'))
return super().form_valid(form)
def get_success_url(self) -> str:
return reverse('control:organizer.edit', kwargs={
'organizer': self.request.organizer.slug,
})
class OrganizerCreate(CreateView):
model = Organizer
form_class = OrganizerForm
template_name = 'pretixcontrol/organizers/create.html'
context_object_name = 'organizer'
def dispatch(self, request, *args, **kwargs):
if not request.user.is_superuser:
raise PermissionDenied() # TODO
return super().dispatch(request, *args, **kwargs)
def form_valid(self, form):
messages.success(self.request, _('The new organizer has been created.'))
ret = super().form_valid(form)
OrganizerPermission.objects.create(
organizer=form.instance, user=self.request.user,
can_create_events=True
)
return ret
def get_success_url(self) -> str:
return reverse('control:organizers')
|
{
"content_hash": "fd2b0a659175c438430dbb420bb1caa4",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 80,
"avg_line_length": 34.470588235294116,
"alnum_prop": 0.6941126279863481,
"repo_name": "akuks/pretix",
"id": "4bb36da0034c6b34fe17c540c1103a82693cb9ef",
"size": "2344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pretix/control/views/organizer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "45033"
},
{
"name": "HTML",
"bytes": "171262"
},
{
"name": "JavaScript",
"bytes": "24712"
},
{
"name": "Makefile",
"bytes": "423"
},
{
"name": "Python",
"bytes": "672101"
},
{
"name": "Shell",
"bytes": "808"
}
],
"symlink_target": ""
}
|
class Node:
def __init__(self, value):
self._value = value
self._next = None
def value(self):
return self._value
def next(self):
return self._next
class LinkedIterator:
def __init__(self, linked_list):
self.current = linked_list._head
def __iter__(self):
return self
def __next__(self):
if self.current is None:
raise StopIteration
value = self.current.value()
self.current = self.current.next()
return value
def next(self):
return self.__next__()
class LinkedList:
def __init__(self, values=None):
values = values if values is not None else []
self._head = None
self._len = 0
for value in values:
self.push(value)
def __iter__(self):
return LinkedIterator(self)
def __len__(self):
return self._len
def head(self):
if self._head is None:
raise EmptyListException('The list is empty.')
return self._head
def push(self, value):
new_node = Node(value)
new_node._next = self._head
self._head = new_node
self._len += 1
def pop(self):
if self._head is None:
raise EmptyListException('The list is empty.')
self._len -= 1
ret = self._head.value()
self._head = self._head.next()
return ret
def reversed(self):
return LinkedList(self)
class EmptyListException(Exception):
"""Exception raised when the linked list is empty.
message: explanation of the error.
"""
def __init__(self, message):
self.message = message
|
{
"content_hash": "4393ff7c5d9a9261909dfdc9ced9caa6",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 58,
"avg_line_length": 22.157894736842106,
"alnum_prop": 0.5546318289786223,
"repo_name": "exercism/python",
"id": "f87b155e79ec80752120654634a37295b4a46e9b",
"size": "1684",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "exercises/practice/simple-linked-list/.meta/example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "103144"
},
{
"name": "Python",
"bytes": "934764"
},
{
"name": "Shell",
"bytes": "2960"
}
],
"symlink_target": ""
}
|
"""
Easy Install
------------
A tool for doing automatic download/extract/build of distutils-based Python
packages. For detailed documentation, see the accompanying EasyInstall.txt
file, or visit the `EasyInstall home page`__.
__ https://pythonhosted.org/setuptools/easy_install.html
"""
from glob import glob
from distutils.util import get_platform
from distutils.util import convert_path, subst_vars
from distutils.errors import (
DistutilsArgError, DistutilsOptionError,
DistutilsError, DistutilsPlatformError,
)
from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
from distutils import log, dir_util
from distutils.command.build_scripts import first_line_re
from distutils.spawn import find_executable
import sys
import os
import zipimport
import shutil
import tempfile
import zipfile
import re
import stat
import random
import textwrap
import warnings
import site
import struct
import contextlib
import subprocess
import shlex
import io
from setuptools.extern import six
from setuptools.extern.six.moves import configparser, map
from setuptools import Command
from setuptools.sandbox import run_setup
from setuptools.py31compat import get_path, get_config_vars
from setuptools.command import setopt
from setuptools.archive_util import unpack_archive
from setuptools.package_index import PackageIndex
from setuptools.package_index import URL_SCHEME
from setuptools.command import bdist_egg, egg_info
from pkg_resources import (
yield_lines, normalize_path, resource_string, ensure_directory,
get_distribution, find_distributions, Environment, Requirement,
Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
VersionConflict, DEVELOP_DIST,
)
import pkg_resources
# Turn on PEP440Warnings
warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
__all__ = [
'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
'main', 'get_exe_prefixes',
]
def is_64bit():
return struct.calcsize("P") == 8
def samefile(p1, p2):
"""
Determine if two paths reference the same file.
Augments os.path.samefile to work on Windows and
suppresses errors if the path doesn't exist.
"""
both_exist = os.path.exists(p1) and os.path.exists(p2)
use_samefile = hasattr(os.path, 'samefile') and both_exist
if use_samefile:
return os.path.samefile(p1, p2)
norm_p1 = os.path.normpath(os.path.normcase(p1))
norm_p2 = os.path.normpath(os.path.normcase(p2))
return norm_p1 == norm_p2
if six.PY2:
def _to_ascii(s):
return s
def isascii(s):
try:
six.text_type(s, 'ascii')
return True
except UnicodeError:
return False
else:
def _to_ascii(s):
return s.encode('ascii')
def isascii(s):
try:
s.encode('ascii')
return True
except UnicodeError:
return False
_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
class easy_install(Command):
"""Manage a download/build/install process"""
description = "Find/get/install Python packages"
command_consumes_arguments = True
user_options = [
('prefix=', None, "installation prefix"),
("zip-ok", "z", "install package as a zipfile"),
("multi-version", "m", "make apps have to require() a version"),
("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
("install-dir=", "d", "install package to DIR"),
("script-dir=", "s", "install scripts to DIR"),
("exclude-scripts", "x", "Don't install scripts"),
("always-copy", "a", "Copy all needed packages to install dir"),
("index-url=", "i", "base URL of Python Package Index"),
("find-links=", "f", "additional URL(s) to search for packages"),
("build-directory=", "b",
"download/extract/build in DIR; keep the results"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('record=', None,
"filename in which to record list of installed files"),
('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
('site-dirs=', 'S', "list of directories where .pth files work"),
('editable', 'e', "Install specified packages in editable form"),
('no-deps', 'N', "don't install dependencies"),
('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
('local-snapshots-ok', 'l',
"allow building eggs from local checkouts"),
('version', None, "print version information and exit"),
('no-find-links', None,
"Don't load find-links defined in packages being installed")
]
boolean_options = [
'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
'editable',
'no-deps', 'local-snapshots-ok', 'version'
]
if site.ENABLE_USER_SITE:
help_msg = "install in user site-package '%s'" % site.USER_SITE
user_options.append(('user', None, help_msg))
boolean_options.append('user')
negative_opt = {'always-unzip': 'zip-ok'}
create_index = PackageIndex
def initialize_options(self):
# the --user option seems to be an opt-in one,
# so the default should be False.
self.user = 0
self.zip_ok = self.local_snapshots_ok = None
self.install_dir = self.script_dir = self.exclude_scripts = None
self.index_url = None
self.find_links = None
self.build_directory = None
self.args = None
self.optimize = self.record = None
self.upgrade = self.always_copy = self.multi_version = None
self.editable = self.no_deps = self.allow_hosts = None
self.root = self.prefix = self.no_report = None
self.version = None
self.install_purelib = None # for pure module distributions
self.install_platlib = None # non-pure (dists w/ extensions)
self.install_headers = None # for C/C++ headers
self.install_lib = None # set to either purelib or platlib
self.install_scripts = None
self.install_data = None
self.install_base = None
self.install_platbase = None
if site.ENABLE_USER_SITE:
self.install_userbase = site.USER_BASE
self.install_usersite = site.USER_SITE
else:
self.install_userbase = None
self.install_usersite = None
self.no_find_links = None
# Options not specifiable via command line
self.package_index = None
self.pth_file = self.always_copy_from = None
self.site_dirs = None
self.installed_projects = {}
self.sitepy_installed = False
# Always read easy_install options, even if we are subclassed, or have
# an independent instance created. This ensures that defaults will
# always come from the standard configuration file(s)' "easy_install"
# section, even if this is a "develop" or "install" command, or some
# other embedding.
self._dry_run = None
self.verbose = self.distribution.verbose
self.distribution._set_command_options(
self, self.distribution.get_option_dict('easy_install')
)
def delete_blockers(self, blockers):
extant_blockers = (
filename for filename in blockers
if os.path.exists(filename) or os.path.islink(filename)
)
list(map(self._delete_path, extant_blockers))
def _delete_path(self, path):
log.info("Deleting %s", path)
if self.dry_run:
return
is_tree = os.path.isdir(path) and not os.path.islink(path)
remover = rmtree if is_tree else os.unlink
remover(path)
@staticmethod
def _render_version():
"""
Render the Setuptools version and installation details, then exit.
"""
ver = sys.version[:3]
dist = get_distribution('setuptools')
tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
print(tmpl.format(**locals()))
raise SystemExit()
def finalize_options(self):
self.version and self._render_version()
py_version = sys.version.split()[0]
prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
self.config_vars = {
'dist_name': self.distribution.get_name(),
'dist_version': self.distribution.get_version(),
'dist_fullname': self.distribution.get_fullname(),
'py_version': py_version,
'py_version_short': py_version[0:3],
'py_version_nodot': py_version[0] + py_version[2],
'sys_prefix': prefix,
'prefix': prefix,
'sys_exec_prefix': exec_prefix,
'exec_prefix': exec_prefix,
# Only python 3.2+ has abiflags
'abiflags': getattr(sys, 'abiflags', ''),
}
if site.ENABLE_USER_SITE:
self.config_vars['userbase'] = self.install_userbase
self.config_vars['usersite'] = self.install_usersite
self._fix_install_dir_for_user_site()
self.expand_basedirs()
self.expand_dirs()
self._expand(
'install_dir', 'script_dir', 'build_directory',
'site_dirs',
)
# If a non-default installation directory was specified, default the
# script directory to match it.
if self.script_dir is None:
self.script_dir = self.install_dir
if self.no_find_links is None:
self.no_find_links = False
# Let install_dir get set by install_lib command, which in turn
# gets its info from the install command, and takes into account
# --prefix and --home and all that other crud.
self.set_undefined_options(
'install_lib', ('install_dir', 'install_dir')
)
# Likewise, set default script_dir from 'install_scripts.install_dir'
self.set_undefined_options(
'install_scripts', ('install_dir', 'script_dir')
)
if self.user and self.install_purelib:
self.install_dir = self.install_purelib
self.script_dir = self.install_scripts
# default --record from the install command
self.set_undefined_options('install', ('record', 'record'))
# Should this be moved to the if statement below? It's not used
# elsewhere
normpath = map(normalize_path, sys.path)
self.all_site_dirs = get_site_dirs()
if self.site_dirs is not None:
site_dirs = [
os.path.expanduser(s.strip()) for s in
self.site_dirs.split(',')
]
for d in site_dirs:
if not os.path.isdir(d):
log.warn("%s (in --site-dirs) does not exist", d)
elif normalize_path(d) not in normpath:
raise DistutilsOptionError(
d + " (in --site-dirs) is not on sys.path"
)
else:
self.all_site_dirs.append(normalize_path(d))
if not self.editable:
self.check_site_dir()
self.index_url = self.index_url or "https://pypi.python.org/simple"
self.shadow_path = self.all_site_dirs[:]
for path_item in self.install_dir, normalize_path(self.script_dir):
if path_item not in self.shadow_path:
self.shadow_path.insert(0, path_item)
if self.allow_hosts is not None:
hosts = [s.strip() for s in self.allow_hosts.split(',')]
else:
hosts = ['*']
if self.package_index is None:
self.package_index = self.create_index(
self.index_url, search_path=self.shadow_path, hosts=hosts,
)
self.local_index = Environment(self.shadow_path + sys.path)
if self.find_links is not None:
if isinstance(self.find_links, six.string_types):
self.find_links = self.find_links.split()
else:
self.find_links = []
if self.local_snapshots_ok:
self.package_index.scan_egg_links(self.shadow_path + sys.path)
if not self.no_find_links:
self.package_index.add_find_links(self.find_links)
self.set_undefined_options('install_lib', ('optimize', 'optimize'))
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
if not (0 <= self.optimize <= 2):
raise ValueError
except ValueError:
raise DistutilsOptionError("--optimize must be 0, 1, or 2")
if self.editable and not self.build_directory:
raise DistutilsArgError(
"Must specify a build directory (-b) when using --editable"
)
if not self.args:
raise DistutilsArgError(
"No urls, filenames, or requirements specified (see --help)")
self.outputs = []
def _fix_install_dir_for_user_site(self):
"""
Fix the install_dir if "--user" was used.
"""
if not self.user or not site.ENABLE_USER_SITE:
return
self.create_home_path()
if self.install_userbase is None:
msg = "User base directory is not specified"
raise DistutilsPlatformError(msg)
self.install_base = self.install_platbase = self.install_userbase
scheme_name = os.name.replace('posix', 'unix') + '_user'
self.select_scheme(scheme_name)
def _expand_attrs(self, attrs):
for attr in attrs:
val = getattr(self, attr)
if val is not None:
if os.name == 'posix' or os.name == 'nt':
val = os.path.expanduser(val)
val = subst_vars(val, self.config_vars)
setattr(self, attr, val)
def expand_basedirs(self):
"""Calls `os.path.expanduser` on install_base, install_platbase and
root."""
self._expand_attrs(['install_base', 'install_platbase', 'root'])
def expand_dirs(self):
"""Calls `os.path.expanduser` on install dirs."""
dirs = [
'install_purelib',
'install_platlib',
'install_lib',
'install_headers',
'install_scripts',
'install_data',
]
self._expand_attrs(dirs)
def run(self):
if self.verbose != self.distribution.verbose:
log.set_verbosity(self.verbose)
try:
for spec in self.args:
self.easy_install(spec, not self.no_deps)
if self.record:
outputs = self.outputs
if self.root: # strip any package prefix
root_len = len(self.root)
for counter in range(len(outputs)):
outputs[counter] = outputs[counter][root_len:]
from distutils import file_util
self.execute(
file_util.write_file, (self.record, outputs),
"writing list of installed files to '%s'" %
self.record
)
self.warn_deprecated_options()
finally:
log.set_verbosity(self.distribution.verbose)
def pseudo_tempname(self):
"""Return a pseudo-tempname base in the install directory.
This code is intentionally naive; if a malicious party can write to
the target directory you're already in deep doodoo.
"""
try:
pid = os.getpid()
except Exception:
pid = random.randint(0, sys.maxsize)
return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
def warn_deprecated_options(self):
pass
def check_site_dir(self):
"""Verify that self.install_dir is .pth-capable dir, if needed"""
instdir = normalize_path(self.install_dir)
pth_file = os.path.join(instdir, 'easy-install.pth')
# Is it a configured, PYTHONPATH, implicit, or explicit site dir?
is_site_dir = instdir in self.all_site_dirs
if not is_site_dir and not self.multi_version:
# No? Then directly test whether it does .pth file processing
is_site_dir = self.check_pth_processing()
else:
# make sure we can write to target dir
testfile = self.pseudo_tempname() + '.write-test'
test_exists = os.path.exists(testfile)
try:
if test_exists:
os.unlink(testfile)
open(testfile, 'w').close()
os.unlink(testfile)
except (OSError, IOError):
self.cant_write_to_target()
if not is_site_dir and not self.multi_version:
# Can't install non-multi to non-site dir
raise DistutilsError(self.no_default_version_msg())
if is_site_dir:
if self.pth_file is None:
self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
else:
self.pth_file = None
PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
if instdir not in map(normalize_path, filter(None, PYTHONPATH)):
# only PYTHONPATH dirs need a site.py, so pretend it's there
self.sitepy_installed = True
elif self.multi_version and not os.path.exists(pth_file):
self.sitepy_installed = True # don't need site.py in this case
self.pth_file = None # and don't create a .pth file
self.install_dir = instdir
__cant_write_msg = textwrap.dedent("""
can't create or remove files in install directory
The following error occurred while trying to add or remove files in the
installation directory:
%s
The installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
""").lstrip()
__not_exists_id = textwrap.dedent("""
This directory does not currently exist. Please create it and try again, or
choose a different installation directory (using the -d or --install-dir
option).
""").lstrip()
__access_msg = textwrap.dedent("""
Perhaps your account does not have write access to this directory? If the
installation directory is a system-owned directory, you may need to sign in
as the administrator or "root" account. If you do not have administrative
access to this machine, you may wish to choose a different installation
directory, preferably one that is listed in your PYTHONPATH environment
variable.
For information on other options, you may wish to consult the
documentation at:
https://pythonhosted.org/setuptools/easy_install.html
Please make the appropriate changes for your system and try again.
""").lstrip()
def cant_write_to_target(self):
msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
if not os.path.exists(self.install_dir):
msg += '\n' + self.__not_exists_id
else:
msg += '\n' + self.__access_msg
raise DistutilsError(msg)
def check_pth_processing(self):
"""Empirically verify whether .pth files are supported in inst. dir"""
instdir = self.install_dir
log.info("Checking .pth file support in %s", instdir)
pth_file = self.pseudo_tempname() + ".pth"
ok_file = pth_file + '.ok'
ok_exists = os.path.exists(ok_file)
tmpl = _one_liner("""
import os
f = open({ok_file!r}, 'w')
f.write('OK')
f.close()
""") + '\n'
try:
if ok_exists:
os.unlink(ok_file)
dirname = os.path.dirname(ok_file)
if not os.path.exists(dirname):
os.makedirs(dirname)
f = open(pth_file, 'w')
except (OSError, IOError):
self.cant_write_to_target()
else:
try:
f.write(tmpl.format(**locals()))
f.close()
f = None
executable = sys.executable
if os.name == 'nt':
dirname, basename = os.path.split(executable)
alt = os.path.join(dirname, 'pythonw.exe')
use_alt = (
basename.lower() == 'python.exe' and
os.path.exists(alt)
)
if use_alt:
# use pythonw.exe to avoid opening a console window
executable = alt
from distutils.spawn import spawn
spawn([executable, '-E', '-c', 'pass'], 0)
if os.path.exists(ok_file):
log.info(
"TEST PASSED: %s appears to support .pth files",
instdir
)
return True
finally:
if f:
f.close()
if os.path.exists(ok_file):
os.unlink(ok_file)
if os.path.exists(pth_file):
os.unlink(pth_file)
if not self.multi_version:
log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
return False
def install_egg_scripts(self, dist):
"""Write all the scripts for `dist`, unless scripts are excluded"""
if not self.exclude_scripts and dist.metadata_isdir('scripts'):
for script_name in dist.metadata_listdir('scripts'):
if dist.metadata_isdir('scripts/' + script_name):
# The "script" is a directory, likely a Python 3
# __pycache__ directory, so skip it.
continue
self.install_script(
dist, script_name,
dist.get_metadata('scripts/' + script_name)
)
self.install_wrapper_scripts(dist)
def add_output(self, path):
if os.path.isdir(path):
for base, dirs, files in os.walk(path):
for filename in files:
self.outputs.append(os.path.join(base, filename))
else:
self.outputs.append(path)
def not_editable(self, spec):
if self.editable:
raise DistutilsArgError(
"Invalid argument %r: you can't use filenames or URLs "
"with --editable (except via the --find-links option)."
% (spec,)
)
def check_editable(self, spec):
if not self.editable:
return
if os.path.exists(os.path.join(self.build_directory, spec.key)):
raise DistutilsArgError(
"%r already exists in %s; can't do a checkout there" %
(spec.key, self.build_directory)
)
def easy_install(self, spec, deps=False):
tmpdir = tempfile.mkdtemp(prefix="easy_install-")
if not self.editable:
self.install_site_py()
try:
if not isinstance(spec, Requirement):
if URL_SCHEME(spec):
# It's a url, download it to tmpdir and process
self.not_editable(spec)
dl = self.package_index.download(spec, tmpdir)
return self.install_item(None, dl, tmpdir, deps, True)
elif os.path.exists(spec):
# Existing file or directory, just process it directly
self.not_editable(spec)
return self.install_item(None, spec, tmpdir, deps, True)
else:
spec = parse_requirement_arg(spec)
self.check_editable(spec)
dist = self.package_index.fetch_distribution(
spec, tmpdir, self.upgrade, self.editable,
not self.always_copy, self.local_index
)
if dist is None:
msg = "Could not find suitable distribution for %r" % spec
if self.always_copy:
msg += " (--always-copy skips system and development eggs)"
raise DistutilsError(msg)
elif dist.precedence == DEVELOP_DIST:
# .egg-info dists don't need installing, just process deps
self.process_distribution(spec, dist, deps, "Using")
return dist
else:
return self.install_item(spec, dist.location, tmpdir, deps)
finally:
if os.path.exists(tmpdir):
rmtree(tmpdir)
def install_item(self, spec, download, tmpdir, deps, install_needed=False):
# Installation is also needed if file in tmpdir or is not an egg
install_needed = install_needed or self.always_copy
install_needed = install_needed or os.path.dirname(download) == tmpdir
install_needed = install_needed or not download.endswith('.egg')
install_needed = install_needed or (
self.always_copy_from is not None and
os.path.dirname(normalize_path(download)) ==
normalize_path(self.always_copy_from)
)
if spec and not install_needed:
# at this point, we know it's a local .egg, we just don't know if
# it's already installed.
for dist in self.local_index[spec.project_name]:
if dist.location == download:
break
else:
install_needed = True # it's not in the local index
log.info("Processing %s", os.path.basename(download))
if install_needed:
dists = self.install_eggs(spec, download, tmpdir)
for dist in dists:
self.process_distribution(spec, dist, deps)
else:
dists = [self.egg_distribution(download)]
self.process_distribution(spec, dists[0], deps, "Using")
if spec is not None:
for dist in dists:
if dist in spec:
return dist
def select_scheme(self, name):
"""Sets the install directories by applying the install schemes."""
# it's the caller's problem if they supply a bad name!
scheme = INSTALL_SCHEMES[name]
for key in SCHEME_KEYS:
attrname = 'install_' + key
if getattr(self, attrname) is None:
setattr(self, attrname, scheme[key])
def process_distribution(self, requirement, dist, deps=True, *info):
self.update_pth(dist)
self.package_index.add(dist)
if dist in self.local_index[dist.key]:
self.local_index.remove(dist)
self.local_index.add(dist)
self.install_egg_scripts(dist)
self.installed_projects[dist.key] = dist
log.info(self.installation_report(requirement, dist, *info))
if (dist.has_metadata('dependency_links.txt') and
not self.no_find_links):
self.package_index.add_find_links(
dist.get_metadata_lines('dependency_links.txt')
)
if not deps and not self.always_copy:
return
elif requirement is not None and dist.key != requirement.key:
log.warn("Skipping dependencies for %s", dist)
return # XXX this is not the distribution we were looking for
elif requirement is None or dist not in requirement:
# if we wound up with a different version, resolve what we've got
distreq = dist.as_requirement()
requirement = Requirement(str(distreq))
log.info("Processing dependencies for %s", requirement)
try:
distros = WorkingSet([]).resolve(
[requirement], self.local_index, self.easy_install
)
except DistributionNotFound as e:
raise DistutilsError(str(e))
except VersionConflict as e:
raise DistutilsError(e.report())
if self.always_copy or self.always_copy_from:
# Force all the relevant distros to be copied or activated
for dist in distros:
if dist.key not in self.installed_projects:
self.easy_install(dist.as_requirement())
log.info("Finished processing dependencies for %s", requirement)
def should_unzip(self, dist):
if self.zip_ok is not None:
return not self.zip_ok
if dist.has_metadata('not-zip-safe'):
return True
if not dist.has_metadata('zip-safe'):
return True
return False
def maybe_move(self, spec, dist_filename, setup_base):
dst = os.path.join(self.build_directory, spec.key)
if os.path.exists(dst):
msg = (
"%r already exists in %s; build directory %s will not be kept"
)
log.warn(msg, spec.key, self.build_directory, setup_base)
return setup_base
if os.path.isdir(dist_filename):
setup_base = dist_filename
else:
if os.path.dirname(dist_filename) == setup_base:
os.unlink(dist_filename) # get it out of the tmp dir
contents = os.listdir(setup_base)
if len(contents) == 1:
dist_filename = os.path.join(setup_base, contents[0])
if os.path.isdir(dist_filename):
# if the only thing there is a directory, move it instead
setup_base = dist_filename
ensure_directory(dst)
shutil.move(setup_base, dst)
return dst
def install_wrapper_scripts(self, dist):
if self.exclude_scripts:
return
for args in ScriptWriter.best().get_args(dist):
self.write_script(*args)
def install_script(self, dist, script_name, script_text, dev_path=None):
"""Generate a legacy script wrapper and install it"""
spec = str(dist.as_requirement())
is_script = is_python_script(script_text, script_name)
if is_script:
body = self._load_template(dev_path) % locals()
script_text = ScriptWriter.get_header(script_text) + body
self.write_script(script_name, _to_ascii(script_text), 'b')
@staticmethod
def _load_template(dev_path):
"""
There are a couple of template scripts in the package. This
function loads one of them and prepares it for use.
"""
# See https://github.com/pypa/setuptools/issues/134 for info
# on script file naming and downstream issues with SVR4
name = 'script.tmpl'
if dev_path:
name = name.replace('.tmpl', ' (dev).tmpl')
raw_bytes = resource_string('setuptools', name)
return raw_bytes.decode('utf-8')
def write_script(self, script_name, contents, mode="t", blockers=()):
"""Write an executable file to the scripts directory"""
self.delete_blockers( # clean up old .py/.pyw w/o a script
[os.path.join(self.script_dir, x) for x in blockers]
)
log.info("Installing %s script to %s", script_name, self.script_dir)
target = os.path.join(self.script_dir, script_name)
self.add_output(target)
mask = current_umask()
if not self.dry_run:
ensure_directory(target)
if os.path.exists(target):
os.unlink(target)
with open(target, "w" + mode) as f:
f.write(contents)
chmod(target, 0o777 - mask)
def install_eggs(self, spec, dist_filename, tmpdir):
# .egg dirs or files are already built, so just return them
if dist_filename.lower().endswith('.egg'):
return [self.install_egg(dist_filename, tmpdir)]
elif dist_filename.lower().endswith('.exe'):
return [self.install_exe(dist_filename, tmpdir)]
# Anything else, try to extract and build
setup_base = tmpdir
if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
unpack_archive(dist_filename, tmpdir, self.unpack_progress)
elif os.path.isdir(dist_filename):
setup_base = os.path.abspath(dist_filename)
if (setup_base.startswith(tmpdir) # something we downloaded
and self.build_directory and spec is not None):
setup_base = self.maybe_move(spec, dist_filename, setup_base)
# Find the setup.py file
setup_script = os.path.join(setup_base, 'setup.py')
if not os.path.exists(setup_script):
setups = glob(os.path.join(setup_base, '*', 'setup.py'))
if not setups:
raise DistutilsError(
"Couldn't find a setup script in %s" %
os.path.abspath(dist_filename)
)
if len(setups) > 1:
raise DistutilsError(
"Multiple setup scripts in %s" %
os.path.abspath(dist_filename)
)
setup_script = setups[0]
# Now run it, and return the result
if self.editable:
log.info(self.report_editable(spec, setup_script))
return []
else:
return self.build_and_install(setup_script, setup_base)
def egg_distribution(self, egg_path):
if os.path.isdir(egg_path):
metadata = PathMetadata(egg_path, os.path.join(egg_path,
'EGG-INFO'))
else:
metadata = EggMetadata(zipimport.zipimporter(egg_path))
return Distribution.from_filename(egg_path, metadata=metadata)
def install_egg(self, egg_path, tmpdir):
destination = os.path.join(
self.install_dir,
os.path.basename(egg_path),
)
destination = os.path.abspath(destination)
if not self.dry_run:
ensure_directory(destination)
dist = self.egg_distribution(egg_path)
if not samefile(egg_path, destination):
if os.path.isdir(destination) and not os.path.islink(destination):
dir_util.remove_tree(destination, dry_run=self.dry_run)
elif os.path.exists(destination):
self.execute(
os.unlink,
(destination,),
"Removing " + destination,
)
try:
new_dist_is_zipped = False
if os.path.isdir(egg_path):
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copytree, "Copying"
elif self.should_unzip(dist):
self.mkpath(destination)
f, m = self.unpack_and_compile, "Extracting"
else:
new_dist_is_zipped = True
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copy2, "Copying"
self.execute(
f,
(egg_path, destination),
(m + " %s to %s") % (
os.path.basename(egg_path),
os.path.dirname(destination)
),
)
update_dist_caches(
destination,
fix_zipimporter_caches=new_dist_is_zipped,
)
except Exception:
update_dist_caches(destination, fix_zipimporter_caches=False)
raise
self.add_output(destination)
return self.egg_distribution(destination)
def install_exe(self, dist_filename, tmpdir):
# See if it's valid, get data
cfg = extract_wininst_cfg(dist_filename)
if cfg is None:
raise DistutilsError(
"%s is not a valid distutils Windows .exe" % dist_filename
)
# Create a dummy distribution object until we build the real distro
dist = Distribution(
None,
project_name=cfg.get('metadata', 'name'),
version=cfg.get('metadata', 'version'), platform=get_platform(),
)
# Convert the .exe to an unpacked egg
egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')
dist.location = egg_path
egg_tmp = egg_path + '.tmp'
_egg_info = os.path.join(egg_tmp, 'EGG-INFO')
pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
self.exe_to_egg(dist_filename, egg_tmp)
# Write EGG-INFO/PKG-INFO
if not os.path.exists(pkg_inf):
f = open(pkg_inf, 'w')
f.write('Metadata-Version: 1.0\n')
for k, v in cfg.items('metadata'):
if k != 'target_version':
f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
f.close()
script_dir = os.path.join(_egg_info, 'scripts')
# delete entry-point scripts to avoid duping
self.delete_blockers([
os.path.join(script_dir, args[0])
for args in ScriptWriter.get_args(dist)
])
# Build .egg file from tmpdir
bdist_egg.make_zipfile(
egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run,
)
# install the .egg
return self.install_egg(egg_path, tmpdir)
def exe_to_egg(self, dist_filename, egg_tmp):
"""Extract a bdist_wininst to the directories an egg would use"""
# Check for .pth file and set up prefix translations
prefixes = get_exe_prefixes(dist_filename)
to_compile = []
native_libs = []
top_level = {}
def process(src, dst):
s = src.lower()
for old, new in prefixes:
if s.startswith(old):
src = new + src[len(old):]
parts = src.split('/')
dst = os.path.join(egg_tmp, *parts)
dl = dst.lower()
if dl.endswith('.pyd') or dl.endswith('.dll'):
parts[-1] = bdist_egg.strip_module(parts[-1])
top_level[os.path.splitext(parts[0])[0]] = 1
native_libs.append(src)
elif dl.endswith('.py') and old != 'SCRIPTS/':
top_level[os.path.splitext(parts[0])[0]] = 1
to_compile.append(dst)
return dst
if not src.endswith('.pth'):
log.warn("WARNING: can't process %s", src)
return None
# extract, tracking .pyd/.dll->native_libs and .py -> to_compile
unpack_archive(dist_filename, egg_tmp, process)
stubs = []
for res in native_libs:
if res.lower().endswith('.pyd'): # create stubs for .pyd's
parts = res.split('/')
resource = parts[-1]
parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
pyfile = os.path.join(egg_tmp, *parts)
to_compile.append(pyfile)
stubs.append(pyfile)
bdist_egg.write_stub(resource, pyfile)
self.byte_compile(to_compile) # compile .py's
bdist_egg.write_safety_flag(
os.path.join(egg_tmp, 'EGG-INFO'),
bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
for name in 'top_level', 'native_libs':
if locals()[name]:
txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
if not os.path.exists(txt):
f = open(txt, 'w')
f.write('\n'.join(locals()[name]) + '\n')
f.close()
__mv_warning = textwrap.dedent("""
Because this distribution was installed --multi-version, before you can
import modules from this package in an application, you will need to
'import pkg_resources' and then use a 'require()' call similar to one of
these examples, in order to select the desired version:
pkg_resources.require("%(name)s") # latest installed version
pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
""").lstrip()
__id_warning = textwrap.dedent("""
Note also that the installation directory must be on sys.path at runtime for
this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.)
""")
def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users"""
msg = "\n%(what)s %(eggloc)s%(extras)s"
if self.multi_version and not self.no_report:
msg += '\n' + self.__mv_warning
if self.install_dir not in map(normalize_path, sys.path):
msg += '\n' + self.__id_warning
eggloc = dist.location
name = dist.project_name
version = dist.version
extras = '' # TODO: self.report_extras(req, dist)
return msg % locals()
__editable_msg = textwrap.dedent("""
Extracted editable version of %(spec)s to %(dirname)s
If it uses setuptools in its setup script, you can activate it in
"development" mode by going to that directory and running::
%(python)s setup.py develop
See the setuptools documentation for the "develop" command for more info.
""").lstrip()
def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script)
python = sys.executable
return '\n' + self.__editable_msg % locals()
def run_setup(self, setup_script, setup_base, args):
sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
sys.modules.setdefault('distutils.command.egg_info', egg_info)
args = list(args)
if self.verbose > 2:
v = 'v' * (self.verbose - 1)
args.insert(0, '-' + v)
elif self.verbose < 2:
args.insert(0, '-q')
if self.dry_run:
args.insert(0, '-n')
log.info(
"Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
)
try:
run_setup(setup_script, args)
except SystemExit as v:
raise DistutilsError("Setup script exited with %s" % (v.args[0],))
def build_and_install(self, setup_script, setup_base):
args = ['bdist_egg', '--dist-dir']
dist_dir = tempfile.mkdtemp(
prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
)
try:
self._set_fetcher_options(os.path.dirname(setup_script))
args.append(dist_dir)
self.run_setup(setup_script, setup_base, args)
all_eggs = Environment([dist_dir])
eggs = []
for key in all_eggs:
for dist in all_eggs[key]:
eggs.append(self.install_egg(dist.location, setup_base))
if not eggs and not self.dry_run:
log.warn("No eggs found in %s (setup script problem?)",
dist_dir)
return eggs
finally:
rmtree(dist_dir)
log.set_verbosity(self.verbose) # restore our log verbosity
def _set_fetcher_options(self, base):
"""
When easy_install is about to run bdist_egg on a source dist, that
source dist might have 'setup_requires' directives, requiring
additional fetching. Ensure the fetcher options given to easy_install
are available to that command as well.
"""
# find the fetch options from easy_install and write them out
# to the setup.cfg file.
ei_opts = self.distribution.get_option_dict('easy_install').copy()
fetch_directives = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts',
)
fetch_options = {}
for key, val in ei_opts.items():
if key not in fetch_directives:
continue
fetch_options[key.replace('_', '-')] = val[1]
# create a settings dictionary suitable for `edit_config`
settings = dict(easy_install=fetch_options)
cfg_filename = os.path.join(base, 'setup.cfg')
setopt.edit_config(cfg_filename, settings)
def update_pth(self, dist):
if self.pth_file is None:
return
for d in self.pth_file[dist.key]: # drop old entries
if self.multi_version or d.location != dist.location:
log.info("Removing %s from easy-install.pth file", d)
self.pth_file.remove(d)
if d.location in self.shadow_path:
self.shadow_path.remove(d.location)
if not self.multi_version:
if dist.location in self.pth_file.paths:
log.info(
"%s is already the active version in easy-install.pth",
dist,
)
else:
log.info("Adding %s to easy-install.pth file", dist)
self.pth_file.add(dist) # add new entry
if dist.location not in self.shadow_path:
self.shadow_path.append(dist.location)
if not self.dry_run:
self.pth_file.save()
if dist.key == 'setuptools':
# Ensure that setuptools itself never becomes unavailable!
# XXX should this check for latest version?
filename = os.path.join(self.install_dir, 'setuptools.pth')
if os.path.islink(filename):
os.unlink(filename)
f = open(filename, 'wt')
f.write(self.pth_file.make_relative(dist.location) + '\n')
f.close()
def unpack_progress(self, src, dst):
# Progress filter for unpacking
log.debug("Unpacking %s to %s", src, dst)
return dst # only unpack-and-compile skips files for dry run
def unpack_and_compile(self, egg_path, destination):
to_compile = []
to_chmod = []
def pf(src, dst):
if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
to_compile.append(dst)
elif dst.endswith('.dll') or dst.endswith('.so'):
to_chmod.append(dst)
self.unpack_progress(src, dst)
return not self.dry_run and dst or None
unpack_archive(egg_path, destination, pf)
self.byte_compile(to_compile)
if not self.dry_run:
for f in to_chmod:
mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
chmod(f, mode)
def byte_compile(self, to_compile):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
try:
# try to make the byte compile messages quieter
log.set_verbosity(self.verbose - 1)
byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
if self.optimize:
byte_compile(
to_compile, optimize=self.optimize, force=1,
dry_run=self.dry_run,
)
finally:
log.set_verbosity(self.verbose) # restore original verbosity
__no_default_msg = textwrap.dedent("""
bad install directory or PYTHONPATH
You are attempting to install a package to a directory that is not
on PYTHONPATH and which Python does not read ".pth" files from. The
installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
and your PYTHONPATH environment variable currently contains:
%r
Here are some of your options for correcting the problem:
* You can choose a different installation directory, i.e., one that is
on PYTHONPATH or supports .pth files
* You can add the installation directory to the PYTHONPATH environment
variable. (It must then also be on PYTHONPATH whenever you run
Python and want to use the package(s) you are installing.)
* You can set up the installation directory to support ".pth" files by
using one of the approaches described here:
https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
Please make the appropriate changes for your system and try again.""").lstrip()
def no_default_version_msg(self):
template = self.__no_default_msg
return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
def install_site_py(self):
"""Make sure there's a site.py in the target dir, if needed"""
if self.sitepy_installed:
return # already did it, or don't need to
sitepy = os.path.join(self.install_dir, "site.py")
source = resource_string("setuptools", "site-patch.py")
source = source.decode('utf-8')
current = ""
if os.path.exists(sitepy):
log.debug("Checking existing site.py in %s", self.install_dir)
with io.open(sitepy) as strm:
current = strm.read()
if not current.startswith('def __boot():'):
raise DistutilsError(
"%s is not a setuptools-generated site.py; please"
" remove it." % sitepy
)
if current != source:
log.info("Creating %s", sitepy)
if not self.dry_run:
ensure_directory(sitepy)
with io.open(sitepy, 'w', encoding='utf-8') as strm:
strm.write(source)
self.byte_compile([sitepy])
self.sitepy_installed = True
def create_home_path(self):
"""Create directories under ~."""
if not self.user:
return
home = convert_path(os.path.expanduser("~"))
for name, path in six.iteritems(self.config_vars):
if path.startswith(home) and not os.path.isdir(path):
self.debug_print("os.makedirs('%s', 0o700)" % path)
os.makedirs(path, 0o700)
INSTALL_SCHEMES = dict(
posix=dict(
install_dir='$base/lib/python$py_version_short/site-packages',
script_dir='$base/bin',
),
)
DEFAULT_SCHEME = dict(
install_dir='$base/Lib/site-packages',
script_dir='$base/Scripts',
)
def _expand(self, *attrs):
config_vars = self.get_finalized_command('install').config_vars
if self.prefix:
# Set default install_dir/scripts from --prefix
config_vars = config_vars.copy()
config_vars['base'] = self.prefix
scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
for attr, val in scheme.items():
if getattr(self, attr, None) is None:
setattr(self, attr, val)
from distutils.util import subst_vars
for attr in attrs:
val = getattr(self, attr)
if val is not None:
val = subst_vars(val, config_vars)
if os.name == 'posix':
val = os.path.expanduser(val)
setattr(self, attr, val)
def get_site_dirs():
# return a list of 'site' dirs
sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
'').split(os.pathsep) if _f]
prefixes = [sys.prefix]
if sys.exec_prefix != sys.prefix:
prefixes.append(sys.exec_prefix)
for prefix in prefixes:
if prefix:
if sys.platform in ('os2emx', 'riscos'):
sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
elif os.sep == '/':
sitedirs.extend([
os.path.join(
prefix,
"lib",
"python" + sys.version[:3],
"site-packages",
),
os.path.join(prefix, "lib", "site-python"),
])
else:
sitedirs.extend([
prefix,
os.path.join(prefix, "lib", "site-packages"),
])
if sys.platform == 'darwin':
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if 'Python.framework' in prefix:
home = os.environ.get('HOME')
if home:
home_sp = os.path.join(
home,
'Library',
'Python',
sys.version[:3],
'site-packages',
)
sitedirs.append(home_sp)
lib_paths = get_path('purelib'), get_path('platlib')
for site_lib in lib_paths:
if site_lib not in sitedirs:
sitedirs.append(site_lib)
if site.ENABLE_USER_SITE:
sitedirs.append(site.USER_SITE)
try:
sitedirs.extend(site.getsitepackages())
except AttributeError:
pass
sitedirs = list(map(normalize_path, sitedirs))
return sitedirs
def expand_paths(inputs):
"""Yield sys.path directories that might contain "old-style" packages"""
seen = {}
for dirname in inputs:
dirname = normalize_path(dirname)
if dirname in seen:
continue
seen[dirname] = 1
if not os.path.isdir(dirname):
continue
files = os.listdir(dirname)
yield dirname, files
for name in files:
if not name.endswith('.pth'):
# We only care about the .pth files
continue
if name in ('easy-install.pth', 'setuptools.pth'):
# Ignore .pth files that we control
continue
# Read the .pth file
f = open(os.path.join(dirname, name))
lines = list(yield_lines(f))
f.close()
# Yield existing non-dupe, non-import directory lines from it
for line in lines:
if not line.startswith("import"):
line = normalize_path(line.rstrip())
if line not in seen:
seen[line] = 1
if not os.path.isdir(line):
continue
yield line, os.listdir(line)
def extract_wininst_cfg(dist_filename):
"""Extract configuration data from a bdist_wininst .exe
Returns a configparser.RawConfigParser, or None
"""
f = open(dist_filename, 'rb')
try:
endrec = zipfile._EndRecData(f)
if endrec is None:
return None
prepended = (endrec[9] - endrec[5]) - endrec[6]
if prepended < 12: # no wininst data here
return None
f.seek(prepended - 12)
tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
if tag not in (0x1234567A, 0x1234567B):
return None # not a valid tag
f.seek(prepended - (12 + cfglen))
init = {'version': '', 'target_version': ''}
cfg = configparser.RawConfigParser(init)
try:
part = f.read(cfglen)
# Read up to the first null byte.
config = part.split(b'\0', 1)[0]
# Now the config is in bytes, but for RawConfigParser, it should
# be text, so decode it.
config = config.decode(sys.getfilesystemencoding())
cfg.readfp(six.StringIO(config))
except configparser.Error:
return None
if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
return None
return cfg
finally:
f.close()
def get_exe_prefixes(exe_filename):
"""Get exe->egg path translations for a given .exe file"""
prefixes = [
('PURELIB/', ''),
('PLATLIB/pywin32_system32', ''),
('PLATLIB/', ''),
('SCRIPTS/', 'EGG-INFO/scripts/'),
('DATA/lib/site-packages', ''),
]
z = zipfile.ZipFile(exe_filename)
try:
for info in z.infolist():
name = info.filename
parts = name.split('/')
if len(parts) == 3 and parts[2] == 'PKG-INFO':
if parts[1].endswith('.egg-info'):
prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
break
if len(parts) != 2 or not name.endswith('.pth'):
continue
if name.endswith('-nspkg.pth'):
continue
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
contents = z.read(name)
if six.PY3:
contents = contents.decode()
for pth in yield_lines(contents):
pth = pth.strip().replace('\\', '/')
if not pth.startswith('import'):
prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
finally:
z.close()
prefixes = [(x.lower(), y) for x, y in prefixes]
prefixes.sort()
prefixes.reverse()
return prefixes
def parse_requirement_arg(spec):
try:
return Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" % (spec,)
)
class PthDistributions(Environment):
"""A .pth file with Distribution paths in it"""
dirty = False
def __init__(self, filename, sitedirs=()):
self.filename = filename
self.sitedirs = list(map(normalize_path, sitedirs))
self.basedir = normalize_path(os.path.dirname(self.filename))
self._load()
Environment.__init__(self, [], None, None)
for path in yield_lines(self.paths):
list(map(self.add, find_distributions(path, True)))
def _load(self):
self.paths = []
saw_import = False
seen = dict.fromkeys(self.sitedirs)
if os.path.isfile(self.filename):
f = open(self.filename, 'rt')
for line in f:
if line.startswith('import'):
saw_import = True
continue
path = line.rstrip()
self.paths.append(path)
if not path.strip() or path.strip().startswith('#'):
continue
# skip non-existent paths, in case somebody deleted a package
# manually, and duplicate paths as well
path = self.paths[-1] = normalize_path(
os.path.join(self.basedir, path)
)
if not os.path.exists(path) or path in seen:
self.paths.pop() # skip it
self.dirty = True # we cleaned up, so we're dirty now :)
continue
seen[path] = 1
f.close()
if self.paths and not saw_import:
self.dirty = True # ensure anything we touch has import wrappers
while self.paths and not self.paths[-1].strip():
self.paths.pop()
def save(self):
"""Write changed .pth file back to disk"""
if not self.dirty:
return
rel_paths = list(map(self.make_relative, self.paths))
if rel_paths:
log.debug("Saving %s", self.filename)
lines = self._wrap_lines(rel_paths)
data = '\n'.join(lines) + '\n'
if os.path.islink(self.filename):
os.unlink(self.filename)
with open(self.filename, 'wt') as f:
f.write(data)
elif os.path.exists(self.filename):
log.debug("Deleting empty %s", self.filename)
os.unlink(self.filename)
self.dirty = False
@staticmethod
def _wrap_lines(lines):
return lines
def add(self, dist):
"""Add `dist` to the distribution map"""
new_path = (
dist.location not in self.paths and (
dist.location not in self.sitedirs or
# account for '.' being in PYTHONPATH
dist.location == os.getcwd()
)
)
if new_path:
self.paths.append(dist.location)
self.dirty = True
Environment.add(self, dist)
def remove(self, dist):
"""Remove `dist` from the distribution map"""
while dist.location in self.paths:
self.paths.remove(dist.location)
self.dirty = True
Environment.remove(self, dist)
def make_relative(self, path):
npath, last = os.path.split(normalize_path(path))
baselen = len(self.basedir)
parts = [last]
sep = os.altsep == '/' and '/' or os.sep
while len(npath) >= baselen:
if npath == self.basedir:
parts.append(os.curdir)
parts.reverse()
return sep.join(parts)
npath, last = os.path.split(npath)
parts.append(last)
else:
return path
class RewritePthDistributions(PthDistributions):
@classmethod
def _wrap_lines(cls, lines):
yield cls.prelude
for line in lines:
yield line
yield cls.postlude
prelude = _one_liner("""
import sys
sys.__plen = len(sys.path)
""")
postlude = _one_liner("""
import sys
new = sys.path[sys.__plen:]
del sys.path[sys.__plen:]
p = getattr(sys, '__egginsert', 0)
sys.path[p:p] = new
sys.__egginsert = p + len(new)
""")
if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':
PthDistributions = RewritePthDistributions
def _first_line_re():
"""
Return a regular expression based on first_line_re suitable for matching
strings.
"""
if isinstance(first_line_re.pattern, str):
return first_line_re
# first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
return re.compile(first_line_re.pattern.decode())
def auto_chmod(func, arg, exc):
if func is os.remove and os.name == 'nt':
chmod(arg, stat.S_IWRITE)
return func(arg)
et, ev, _ = sys.exc_info()
six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
def update_dist_caches(dist_path, fix_zipimporter_caches):
"""
Fix any globally cached `dist_path` related data
`dist_path` should be a path of a newly installed egg distribution (zipped
or unzipped).
sys.path_importer_cache contains finder objects that have been cached when
importing data from the original distribution. Any such finders need to be
cleared since the replacement distribution might be packaged differently,
e.g. a zipped egg distribution might get replaced with an unzipped egg
folder or vice versa. Having the old finders cached may then cause Python
to attempt loading modules from the replacement distribution using an
incorrect loader.
zipimport.zipimporter objects are Python loaders charged with importing
data packaged inside zip archives. If stale loaders referencing the
original distribution, are left behind, they can fail to load modules from
the replacement distribution. E.g. if an old zipimport.zipimporter instance
is used to load data from a new zipped egg archive, it may cause the
operation to attempt to locate the requested data in the wrong location -
one indicated by the original distribution's zip archive directory
information. Such an operation may then fail outright, e.g. report having
read a 'bad local file header', or even worse, it may fail silently &
return invalid data.
zipimport._zip_directory_cache contains cached zip archive directory
information for all existing zipimport.zipimporter instances and all such
instances connected to the same archive share the same cached directory
information.
If asked, and the underlying Python implementation allows it, we can fix
all existing zipimport.zipimporter instances instead of having to track
them down and remove them one by one, by updating their shared cached zip
archive directory information. This, of course, assumes that the
replacement distribution is packaged as a zipped egg.
If not asked to fix existing zipimport.zipimporter instances, we still do
our best to clear any remaining zipimport.zipimporter related cached data
that might somehow later get used when attempting to load data from the new
distribution and thus cause such load operations to fail. Note that when
tracking down such remaining stale data, we can not catch every conceivable
usage from here, and we clear only those that we know of and have found to
cause problems if left alive. Any remaining caches should be updated by
whomever is in charge of maintaining them, i.e. they should be ready to
handle us replacing their zip archives with new distributions at runtime.
"""
# There are several other known sources of stale zipimport.zipimporter
# instances that we do not clear here, but might if ever given a reason to
# do so:
# * Global setuptools pkg_resources.working_set (a.k.a. 'master working
# set') may contain distributions which may in turn contain their
# zipimport.zipimporter loaders.
# * Several zipimport.zipimporter loaders held by local variables further
# up the function call stack when running the setuptools installation.
# * Already loaded modules may have their __loader__ attribute set to the
# exact loader instance used when importing them. Python 3.4 docs state
# that this information is intended mostly for introspection and so is
# not expected to cause us problems.
normalized_path = normalize_path(dist_path)
_uncache(normalized_path, sys.path_importer_cache)
if fix_zipimporter_caches:
_replace_zip_directory_cache_data(normalized_path)
else:
# Here, even though we do not want to fix existing and now stale
# zipimporter cache information, we still want to remove it. Related to
# Python's zip archive directory information cache, we clear each of
# its stale entries in two phases:
# 1. Clear the entry so attempting to access zip archive information
# via any existing stale zipimport.zipimporter instances fails.
# 2. Remove the entry from the cache so any newly constructed
# zipimport.zipimporter instances do not end up using old stale
# zip archive directory information.
# This whole stale data removal step does not seem strictly necessary,
# but has been left in because it was done before we started replacing
# the zip archive directory information cache content if possible, and
# there are no relevant unit tests that we can depend on to tell us if
# this is really needed.
_remove_and_clear_zip_directory_cache_data(normalized_path)
def _collect_zipimporter_cache_entries(normalized_path, cache):
"""
Return zipimporter cache entry keys related to a given normalized path.
Alternative path spellings (e.g. those using different character case or
those using alternative path separators) related to the same path are
included. Any sub-path entries are included as well, i.e. those
corresponding to zip archives embedded in other zip archives.
"""
result = []
prefix_len = len(normalized_path)
for p in cache:
np = normalize_path(p)
if (np.startswith(normalized_path) and
np[prefix_len:prefix_len + 1] in (os.sep, '')):
result.append(p)
return result
def _update_zipimporter_cache(normalized_path, cache, updater=None):
"""
Update zipimporter cache data for a given normalized path.
Any sub-path entries are processed as well, i.e. those corresponding to zip
archives embedded in other zip archives.
Given updater is a callable taking a cache entry key and the original entry
(after already removing the entry from the cache), and expected to update
the entry and possibly return a new one to be inserted in its place.
Returning None indicates that the entry should not be replaced with a new
one. If no updater is given, the cache entries are simply removed without
any additional processing, the same as if the updater simply returned None.
"""
for p in _collect_zipimporter_cache_entries(normalized_path, cache):
# N.B. pypy's custom zipimport._zip_directory_cache implementation does
# not support the complete dict interface:
# * Does not support item assignment, thus not allowing this function
# to be used only for removing existing cache entries.
# * Does not support the dict.pop() method, forcing us to use the
# get/del patterns instead. For more detailed information see the
# following links:
# https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
# https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
old_entry = cache[p]
del cache[p]
new_entry = updater and updater(p, old_entry)
if new_entry is not None:
cache[p] = new_entry
def _uncache(normalized_path, cache):
_update_zipimporter_cache(normalized_path, cache)
def _remove_and_clear_zip_directory_cache_data(normalized_path):
def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
old_entry.clear()
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=clear_and_remove_cached_zip_archive_directory_data)
# PyPy Python implementation does not allow directly writing to the
# zipimport._zip_directory_cache and so prevents us from attempting to correct
# its content. The best we can do there is clear the problematic cache content
# and have PyPy repopulate it as needed. The downside is that if there are any
# stale zipimport.zipimporter instances laying around, attempting to use them
# will fail due to not having its zip archive directory information available
# instead of being automatically corrected to use the new correct zip archive
# directory information.
if '__pypy__' in sys.builtin_module_names:
_replace_zip_directory_cache_data = \
_remove_and_clear_zip_directory_cache_data
else:
def _replace_zip_directory_cache_data(normalized_path):
def replace_cached_zip_archive_directory_data(path, old_entry):
# N.B. In theory, we could load the zip directory information just
# once for all updated path spellings, and then copy it locally and
# update its contained path strings to contain the correct
# spelling, but that seems like a way too invasive move (this cache
# structure is not officially documented anywhere and could in
# theory change with new Python releases) for no significant
# benefit.
old_entry.clear()
zipimport.zipimporter(path)
old_entry.update(zipimport._zip_directory_cache[path])
return old_entry
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=replace_cached_zip_archive_directory_data)
def is_python(text, filename='<string>'):
"Is this string a valid Python script?"
try:
compile(text, filename, 'exec')
except (SyntaxError, TypeError):
return False
else:
return True
def is_sh(executable):
"""Determine if the specified executable is a .sh (contains a #! line)"""
try:
with io.open(executable, encoding='latin-1') as fp:
magic = fp.read(2)
except (OSError, IOError):
return executable
return magic == '#!'
def nt_quote_arg(arg):
"""Quote a command line argument according to Windows parsing rules"""
return subprocess.list2cmdline([arg])
def is_python_script(script_text, filename):
"""Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
"""
if filename.endswith('.py') or filename.endswith('.pyw'):
return True # extension says it's Python
if is_python(script_text, filename):
return True # it's syntactically valid Python
if script_text.startswith('#!'):
# It begins with a '#!' line, so check if 'python' is in it somewhere
return 'python' in script_text.splitlines()[0].lower()
return False # Not any Python I can recognize
try:
from os import chmod as _chmod
except ImportError:
# Jython compatibility
def _chmod(*args):
pass
def chmod(path, mode):
log.debug("changing mode of %s to %o", path, mode)
try:
_chmod(path, mode)
except os.error as e:
log.debug("chmod failed: %s", e)
class CommandSpec(list):
"""
A command spec for a #! header, specified as a list of arguments akin to
those passed to Popen.
"""
options = []
split_args = dict()
@classmethod
def best(cls):
"""
Choose the best CommandSpec class based on environmental conditions.
"""
return cls
@classmethod
def _sys_executable(cls):
_default = os.path.normpath(sys.executable)
return os.environ.get('__PYVENV_LAUNCHER__', _default)
@classmethod
def from_param(cls, param):
"""
Construct a CommandSpec from a parameter to build_scripts, which may
be None.
"""
if isinstance(param, cls):
return param
if isinstance(param, list):
return cls(param)
if param is None:
return cls.from_environment()
# otherwise, assume it's a string.
return cls.from_string(param)
@classmethod
def from_environment(cls):
return cls([cls._sys_executable()])
@classmethod
def from_string(cls, string):
"""
Construct a command spec from a simple string representing a command
line parseable by shlex.split.
"""
items = shlex.split(string, **cls.split_args)
return cls(items)
def install_options(self, script_text):
self.options = shlex.split(self._extract_options(script_text))
cmdline = subprocess.list2cmdline(self)
if not isascii(cmdline):
self.options[:0] = ['-x']
@staticmethod
def _extract_options(orig_script):
"""
Extract any options from the first line of the script.
"""
first = (orig_script + '\n').splitlines()[0]
match = _first_line_re().match(first)
options = match.group(1) or '' if match else ''
return options.strip()
def as_header(self):
return self._render(self + list(self.options))
@staticmethod
def _strip_quotes(item):
_QUOTES = '"\''
for q in _QUOTES:
if item.startswith(q) and item.endswith(q):
return item[1:-1]
return item
@staticmethod
def _render(items):
cmdline = subprocess.list2cmdline(
CommandSpec._strip_quotes(item.strip()) for item in items)
return '#!' + cmdline + '\n'
# For pbr compat; will be removed in a future version.
sys_executable = CommandSpec._sys_executable()
class WindowsCommandSpec(CommandSpec):
split_args = dict(posix=False)
class ScriptWriter(object):
"""
Encapsulates behavior around writing entry point scripts for console and
gui apps.
"""
template = textwrap.dedent("""
# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
__requires__ = %(spec)r
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point(%(spec)r, %(group)r, %(name)r)()
)
""").lstrip()
command_spec_class = CommandSpec
@classmethod
def get_script_args(cls, dist, executable=None, wininst=False):
# for backward compatibility
warnings.warn("Use get_args", DeprecationWarning)
writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
header = cls.get_script_header("", executable, wininst)
return writer.get_args(dist, header)
@classmethod
def get_script_header(cls, script_text, executable=None, wininst=False):
# for backward compatibility
warnings.warn("Use get_header", DeprecationWarning)
if wininst:
executable = "python.exe"
cmd = cls.command_spec_class.best().from_param(executable)
cmd.install_options(script_text)
return cmd.as_header()
@classmethod
def get_args(cls, dist, header=None):
"""
Yield write_script() argument tuples for a distribution's
console_scripts and gui_scripts entry points.
"""
if header is None:
header = cls.get_header()
spec = str(dist.as_requirement())
for type_ in 'console', 'gui':
group = type_ + '_scripts'
for name, ep in dist.get_entry_map(group).items():
cls._ensure_safe_name(name)
script_text = cls.template % locals()
args = cls._get_script_args(type_, name, header, script_text)
for res in args:
yield res
@staticmethod
def _ensure_safe_name(name):
"""
Prevent paths in *_scripts entry point names.
"""
has_path_sep = re.search(r'[\\/]', name)
if has_path_sep:
raise ValueError("Path separators not allowed in script names")
@classmethod
def get_writer(cls, force_windows):
# for backward compatibility
warnings.warn("Use best", DeprecationWarning)
return WindowsScriptWriter.best() if force_windows else cls.best()
@classmethod
def best(cls):
"""
Select the best ScriptWriter for this environment.
"""
if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
return WindowsScriptWriter.best()
else:
return cls
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
# Simply write the stub with no extension.
yield (name, header + script_text)
@classmethod
def get_header(cls, script_text="", executable=None):
"""Create a #! line, getting options (if any) from script_text"""
cmd = cls.command_spec_class.best().from_param(executable)
cmd.install_options(script_text)
return cmd.as_header()
class WindowsScriptWriter(ScriptWriter):
command_spec_class = WindowsCommandSpec
@classmethod
def get_writer(cls):
# for backward compatibility
warnings.warn("Use best", DeprecationWarning)
return cls.best()
@classmethod
def best(cls):
"""
Select the best ScriptWriter suitable for Windows
"""
writer_lookup = dict(
executable=WindowsExecutableLauncherWriter,
natural=cls,
)
# for compatibility, use the executable launcher by default
launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
return writer_lookup[launcher]
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"For Windows, add a .py extension"
ext = dict(console='.pya', gui='.pyw')[type_]
if ext not in os.environ['PATHEXT'].lower().split(';'):
msg = (
"{ext} not listed in PATHEXT; scripts will not be "
"recognized as executables."
).format(**locals())
warnings.warn(msg, UserWarning)
old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
old.remove(ext)
header = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield name + ext, header + script_text, 't', blockers
@classmethod
def _adjust_header(cls, type_, orig_header):
"""
Make sure 'pythonw' is used for gui and and 'python' is used for
console (regardless of what sys.executable is).
"""
pattern = 'pythonw.exe'
repl = 'python.exe'
if type_ == 'gui':
pattern, repl = repl, pattern
pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
new_header = pattern_ob.sub(string=orig_header, repl=repl)
return new_header if cls._use_header(new_header) else orig_header
@staticmethod
def _use_header(new_header):
"""
Should _adjust_header use the replaced header?
On non-windows systems, always use. On
Windows systems, only use the replaced header if it resolves
to an executable on the system.
"""
clean_header = new_header[2:-1].strip('"')
return sys.platform != 'win32' or find_executable(clean_header)
class WindowsExecutableLauncherWriter(WindowsScriptWriter):
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"""
For Windows, add a .py extension and an .exe launcher
"""
if type_ == 'gui':
launcher_type = 'gui'
ext = '-script.pyw'
old = ['.pyw']
else:
launcher_type = 'cli'
ext = '-script.py'
old = ['.py', '.pyc', '.pyo']
hdr = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield (name + ext, hdr + script_text, 't', blockers)
yield (
name + '.exe', get_win_launcher(launcher_type),
'b' # write in binary mode
)
if not is_64bit():
# install a manifest for the launcher to prevent Windows
# from detecting it as an installer (which it will for
# launchers like easy_install.exe). Consider only
# adding a manifest for launchers detected as installers.
# See Distribute #143 for details.
m_name = name + '.exe.manifest'
yield (m_name, load_launcher_manifest(name), 't')
# for backward-compatibility
get_script_args = ScriptWriter.get_script_args
get_script_header = ScriptWriter.get_script_header
def get_win_launcher(type):
"""
Load the Windows launcher (executable) suitable for launching a script.
`type` should be either 'cli' or 'gui'
Returns the executable as a byte string.
"""
launcher_fn = '%s.exe' % type
if is_64bit():
launcher_fn = launcher_fn.replace(".", "-64.")
else:
launcher_fn = launcher_fn.replace(".", "-32.")
return resource_string('setuptools', launcher_fn)
def load_launcher_manifest(name):
manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
if six.PY2:
return manifest % vars()
else:
return manifest.decode('utf-8') % vars()
def rmtree(path, ignore_errors=False, onerror=auto_chmod):
"""Recursively delete a directory tree.
This code is taken from the Python 2.4 version of 'shutil', because
the 2.3 version doesn't really work right.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
names = []
try:
names = os.listdir(path)
except os.error:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def current_umask():
tmp = os.umask(0o022)
os.umask(tmp)
return tmp
def bootstrap():
# This function is called when setuptools*.egg is run using /bin/sh
import setuptools
argv0 = os.path.dirname(setuptools.__path__[0])
sys.argv[0] = argv0
sys.argv.append(argv0)
main()
def main(argv=None, **kw):
from setuptools import setup
from setuptools.dist import Distribution
class DistributionWithoutHelpCommands(Distribution):
common_usage = ""
def _show_help(self, *args, **kw):
with _patch_usage():
Distribution._show_help(self, *args, **kw)
if argv is None:
argv = sys.argv[1:]
with _patch_usage():
setup(
script_args=['-q', 'easy_install', '-v'] + argv,
script_name=sys.argv[0] or 'easy_install',
distclass=DistributionWithoutHelpCommands,
**kw
)
@contextlib.contextmanager
def _patch_usage():
import distutils.core
USAGE = textwrap.dedent("""
usage: %(script)s [options] requirement_or_url ...
or: %(script)s --help
""").lstrip()
def gen_usage(script_name):
return USAGE % dict(
script=os.path.basename(script_name),
)
saved = distutils.core.gen_usage
distutils.core.gen_usage = gen_usage
try:
yield
finally:
distutils.core.gen_usage = saved
|
{
"content_hash": "7dd35f20c97b825d23ec21eaf1b8c751",
"timestamp": "",
"source": "github",
"line_count": 2323,
"max_line_length": 139,
"avg_line_length": 37.32199741713302,
"alnum_prop": 0.5776652556546211,
"repo_name": "harshita-gupta/Harvard-FRSEM-Catalog-2016-17",
"id": "0e0dc2c4f469839165b0ac820a483d8c8c891175",
"size": "86722",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "flask/lib/python2.7/site-packages/setuptools/command/easy_install.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6612"
},
{
"name": "CSS",
"bytes": "55614"
},
{
"name": "HTML",
"bytes": "18090"
},
{
"name": "JavaScript",
"bytes": "176982"
},
{
"name": "Python",
"bytes": "13727320"
},
{
"name": "Shell",
"bytes": "3264"
}
],
"symlink_target": ""
}
|
x = input("Please insert the first number ")
y = input("Please insert the second number ")
print "The score is =", x + y
quit()
|
{
"content_hash": "5a7def16bd5ed29e4a2822d62bf098a9",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 45,
"avg_line_length": 25.6,
"alnum_prop": 0.671875,
"repo_name": "codingsmartschool/PyMaIn",
"id": "0ac6deea136b17b4c2f0348be8c08f3025ab9673",
"size": "147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymain/plus.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1194"
}
],
"symlink_target": ""
}
|
import os
from abc import abstractmethod
from pants.backend.jvm.tasks.classpath_entry import ClasspathEntry
from pants.base.build_environment import get_buildroot
from pants.engine.fs import Digest, PathGlobs, PathGlobsAndRoot
from pants.task.task import Task
from pants.util.dirutil import fast_relpath
class ResourcesTask(Task):
"""A base class for tasks that process or create resource files.
This base assumes that resources targets or targets that generate resources are independent from
each other and can be processed in isolation in any order.
:API: public
"""
@classmethod
def product_types(cls):
return ["runtime_classpath"]
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--confs",
advanced=True,
type=list,
default=["default"],
help="Prepare resources for these Ivy confs.",
)
@classmethod
def prepare(cls, options, round_manager):
round_manager.require_data("compile_classpath")
@property
def cache_target_dirs(self):
return True
def execute(self):
# Tracked and returned for use in tests.
# TODO: Rewrite those tests. execute() is not supposed to return anything.
processed_targets = []
compile_classpath = self.context.products.get_data("compile_classpath")
runtime_classpath = self.context.products.get_data(
"runtime_classpath", compile_classpath.copy
)
all_relevant_resources_targets = self.find_all_relevant_resources_targets()
if not all_relevant_resources_targets:
return processed_targets
with self.invalidated(
targets=all_relevant_resources_targets,
fingerprint_strategy=self.create_invalidation_strategy(),
invalidate_dependents=False,
topological_order=False,
) as invalidation:
for vt in invalidation.invalid_vts:
# Generate resources to the chroot.
self.prepare_resources(vt.target, vt.results_dir)
processed_targets.append(vt.target)
for vt, digest in self._capture_resources(invalidation.all_vts):
# Register the target's chroot in the products.
for conf in self.get_options().confs:
runtime_classpath.add_for_target(
vt.target, [(conf, ClasspathEntry(vt.results_dir, digest))]
)
return processed_targets
def _capture_resources(self, vts):
"""Given a list of VersionedTargets, capture DirectoryDigests for all of them.
:returns: A list of tuples of VersionedTargets and digests for their content.
"""
# Capture Snapshots for each directory, using an optional adjacent digest. Create the digest
# afterward if it does not exist.
buildroot = get_buildroot()
snapshots = self.context._scheduler.capture_snapshots(
tuple(
PathGlobsAndRoot(
PathGlobs([os.path.join(fast_relpath(vt.results_dir, buildroot), "**")]),
buildroot,
Digest.load(vt.current_results_dir),
)
for vt in vts
)
)
result = []
for vt, snapshot in zip(vts, snapshots):
snapshot.digest.dump(vt.current_results_dir)
result.append((vt, snapshot.digest))
return result
@abstractmethod
def find_all_relevant_resources_targets(self):
"""Returns an iterable over all the relevant resources targets in the context."""
def create_invalidation_strategy(self):
"""Creates a custom fingerprint strategy for determining invalid resources targets.
:returns: A custom fingerprint strategy to use for determining invalid targets, or `None` to
use the standard target payload.
:rtype: :class:`pants.base.fingerprint_strategy.FingerprintStrategy`
"""
return None
@abstractmethod
def prepare_resources(self, target, chroot):
"""Prepares the resources associated with `target` in the given `chroot`.
:param target: The target to prepare resource files for.
:type target: :class:`pants.build_graph.target.Target`
:param string chroot: An existing, clean chroot dir to generate `target`'s resources to.
"""
|
{
"content_hash": "b1872f963cd57a7c55bc7aca17a630da",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 100,
"avg_line_length": 37.6,
"alnum_prop": 0.6340868794326241,
"repo_name": "tdyas/pants",
"id": "9e6f06e337d86e22cfcb20bce284fefe8a6137fb",
"size": "4644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/jvm/tasks/resources_task.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5596"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "44381"
},
{
"name": "Java",
"bytes": "518180"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "7955590"
},
{
"name": "Rust",
"bytes": "1031208"
},
{
"name": "Scala",
"bytes": "106520"
},
{
"name": "Shell",
"bytes": "109904"
},
{
"name": "Starlark",
"bytes": "502255"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
"""
Тестового варианта доступа не существует, поэтому проверим отказ в доступе.
"""
from .common import CommonTestCase
class DenyTest(CommonTestCase):
def test_that_client_has_session(self):
self.assertTrue(self.client.session)
|
{
"content_hash": "c1897ba8c7f2a539ea3921f3c3fc6e25",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 75,
"avg_line_length": 26.88888888888889,
"alnum_prop": 0.756198347107438,
"repo_name": "tigrus/dadata-python",
"id": "b32153a07f5bb584a0e46944d0cd1ac3a1719a7c",
"size": "322",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_connect.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15941"
}
],
"symlink_target": ""
}
|
import logging
import os
import sys
import tempfile
import unittest
# TODO: No idea why pytype cannot find names from this module.
# pytype: disable=name-error
import iree.compiler.tools.tf
if not iree.compiler.tools.tf.is_available():
print(f"Skipping test {__file__} because the IREE TensorFlow compiler "
f"is not installed")
sys.exit(0)
import tensorflow as tf
class SimpleArithmeticModule(tf.Module):
@tf.function(input_signature=[
tf.TensorSpec([4], tf.float32),
tf.TensorSpec([4], tf.float32)
])
def simple_mul(self, a, b):
return a * b
@tf.function(input_signature=[
tf.TensorSpec([128, 3072], tf.float32),
tf.TensorSpec([3072, 256], tf.float32),
])
def simple_matmul(self, a, b):
return tf.matmul(a, b)
# TODO(laurenzo): More test cases needed (may need additional files).
# Specifically, figure out how to test v1 models.
class TfCompilerTest(tf.test.TestCase):
def testImportSavedModel(self):
import_mlir = iree.compiler.tools.tf.compile_saved_model(
self.smdir, import_only=True, output_generic_mlir=True).decode("utf-8")
self.assertIn("sym_name = \"simple_matmul\"", import_mlir)
def testCompileSavedModel(self):
binary = iree.compiler.tools.tf.compile_saved_model(
self.smdir,
target_backends=iree.compiler.tools.tf.DEFAULT_TESTING_BACKENDS)
logging.info("Compiled len: %d", len(binary))
self.assertIn(b"simple_matmul", binary)
self.assertIn(b"simple_mul", binary)
def testCompileModule(self):
binary = iree.compiler.tools.tf.compile_module(
self.m, target_backends=iree.compiler.tools.tf.DEFAULT_TESTING_BACKENDS)
logging.info("Compiled len: %d", len(binary))
self.assertIn(b"simple_matmul", binary)
self.assertIn(b"simple_mul", binary)
@classmethod
def setUpClass(cls):
cls.m = SimpleArithmeticModule()
cls.tempdir = tempfile.TemporaryDirectory()
cls.smdir = os.path.join(cls.tempdir.name, "arith.sm")
tf.saved_model.save(
cls.m,
cls.smdir,
options=tf.saved_model.SaveOptions(save_debug_info=True))
@classmethod
def tearDownClass(cls):
cls.tempdir.cleanup()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
tf.test.main()
|
{
"content_hash": "cd746dac47b0b19a2ec69129b0b8cfef",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 80,
"avg_line_length": 29.38961038961039,
"alnum_prop": 0.692001767565179,
"repo_name": "iree-org/iree",
"id": "fc1ee71660391b3f1369b4cb1e227a8f4eb153c6",
"size": "2481",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "compiler/src/iree/compiler/API/python/test/tools/compiler_tf_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "23010"
},
{
"name": "Batchfile",
"bytes": "353"
},
{
"name": "C",
"bytes": "3830546"
},
{
"name": "C++",
"bytes": "8161374"
},
{
"name": "CMake",
"bytes": "899403"
},
{
"name": "Dockerfile",
"bytes": "28245"
},
{
"name": "GLSL",
"bytes": "2629"
},
{
"name": "HTML",
"bytes": "31018"
},
{
"name": "Java",
"bytes": "31697"
},
{
"name": "JavaScript",
"bytes": "18714"
},
{
"name": "MLIR",
"bytes": "5606822"
},
{
"name": "NASL",
"bytes": "3852"
},
{
"name": "PowerShell",
"bytes": "7893"
},
{
"name": "Python",
"bytes": "1143963"
},
{
"name": "Shell",
"bytes": "248374"
},
{
"name": "Starlark",
"bytes": "600260"
}
],
"symlink_target": ""
}
|
from ctypes import *
import ctypes.util
import usb.util
from usb._debug import methodtrace
import logging
import errno
import sys
import usb._interop as _interop
import usb.util as util
import usb.libloader
from usb.core import USBError
__author__ = 'Wander Lairson Costa'
__all__ = [
'get_backend'
'OPENUSB_SUCCESS'
'OPENUSB_PLATFORM_FAILURE'
'OPENUSB_NO_RESOURCES'
'OPENUSB_NO_BANDWIDTH'
'OPENUSB_NOT_SUPPORTED'
'OPENUSB_HC_HARDWARE_ERROR'
'OPENUSB_INVALID_PERM'
'OPENUSB_BUSY'
'OPENUSB_BADARG'
'OPENUSB_NOACCESS'
'OPENUSB_PARSE_ERROR'
'OPENUSB_UNKNOWN_DEVICE'
'OPENUSB_INVALID_HANDLE'
'OPENUSB_SYS_FUNC_FAILURE'
'OPENUSB_NULL_LIST'
'OPENUSB_CB_CONTINUE'
'OPENUSB_CB_TERMINATE'
'OPENUSB_IO_STALL'
'OPENUSB_IO_CRC_ERROR'
'OPENUSB_IO_DEVICE_HUNG'
'OPENUSB_IO_REQ_TOO_BIG'
'OPENUSB_IO_BIT_STUFFING'
'OPENUSB_IO_UNEXPECTED_PID'
'OPENUSB_IO_DATA_OVERRUN'
'OPENUSB_IO_DATA_UNDERRUN'
'OPENUSB_IO_BUFFER_OVERRUN'
'OPENUSB_IO_BUFFER_UNDERRUN'
'OPENUSB_IO_PID_CHECK_FAILURE'
'OPENUSB_IO_DATA_TOGGLE_MISMATCH'
'OPENUSB_IO_TIMEOUT'
'OPENUSB_IO_CANCELED'
]
_logger = logging.getLogger('usb.backend.openusb')
OPENUSB_SUCCESS = 0
OPENUSB_PLATFORM_FAILURE = -1
OPENUSB_NO_RESOURCES = -2
OPENUSB_NO_BANDWIDTH = -3
OPENUSB_NOT_SUPPORTED = -4
OPENUSB_HC_HARDWARE_ERROR = -5
OPENUSB_INVALID_PERM = -6
OPENUSB_BUSY = -7
OPENUSB_BADARG = -8
OPENUSB_NOACCESS = -9
OPENUSB_PARSE_ERROR = -10
OPENUSB_UNKNOWN_DEVICE = -11
OPENUSB_INVALID_HANDLE = -12
OPENUSB_SYS_FUNC_FAILURE = -13
OPENUSB_NULL_LIST = -14
OPENUSB_CB_CONTINUE = -20
OPENUSB_CB_TERMINATE = -21
OPENUSB_IO_STALL = -50
OPENUSB_IO_CRC_ERROR = -51
OPENUSB_IO_DEVICE_HUNG = -52
OPENUSB_IO_REQ_TOO_BIG = -53
OPENUSB_IO_BIT_STUFFING = -54
OPENUSB_IO_UNEXPECTED_PID = -55
OPENUSB_IO_DATA_OVERRUN = -56
OPENUSB_IO_DATA_UNDERRUN = -57
OPENUSB_IO_BUFFER_OVERRUN = -58
OPENUSB_IO_BUFFER_UNDERRUN = -59
OPENUSB_IO_PID_CHECK_FAILURE = -60
OPENUSB_IO_DATA_TOGGLE_MISMATCH = -61
OPENUSB_IO_TIMEOUT = -62
OPENUSB_IO_CANCELED = -63
_openusb_errno = {
OPENUSB_SUCCESS:None,
OPENUSB_PLATFORM_FAILURE:None,
OPENUSB_NO_RESOURCES:errno.__dict__.get('ENOMEM', None),
OPENUSB_NO_BANDWIDTH:None,
OPENUSB_NOT_SUPPORTED:errno.__dict__.get('ENOSYS', None),
OPENUSB_HC_HARDWARE_ERROR:errno.__dict__.get('EIO', None),
OPENUSB_INVALID_PERM:errno.__dict__.get('EBADF', None),
OPENUSB_BUSY:errno.__dict__.get('EBUSY', None),
OPENUSB_BADARG:errno.__dict__.get('EINVAL', None),
OPENUSB_NOACCESS:errno.__dict__.get('EACCES', None),
OPENUSB_PARSE_ERROR:None,
OPENUSB_UNKNOWN_DEVICE:errno.__dict__.get('ENODEV', None),
OPENUSB_INVALID_HANDLE:errno.__dict__.get('EINVAL', None),
OPENUSB_SYS_FUNC_FAILURE:None,
OPENUSB_NULL_LIST:None,
OPENUSB_CB_CONTINUE:None,
OPENUSB_CB_TERMINATE:None,
OPENUSB_IO_STALL:errno.__dict__.get('EIO', None),
OPENUSB_IO_CRC_ERROR:errno.__dict__.get('EIO', None),
OPENUSB_IO_DEVICE_HUNG:errno.__dict__.get('EIO', None),
OPENUSB_IO_REQ_TOO_BIG:errno.__dict__.get('E2BIG', None),
OPENUSB_IO_BIT_STUFFING:None,
OPENUSB_IO_UNEXPECTED_PID:errno.__dict__.get('ESRCH', None),
OPENUSB_IO_DATA_OVERRUN:errno.__dict__.get('EOVERFLOW', None),
OPENUSB_IO_DATA_UNDERRUN:None,
OPENUSB_IO_BUFFER_OVERRUN:errno.__dict__.get('EOVERFLOW', None),
OPENUSB_IO_BUFFER_UNDERRUN:None,
OPENUSB_IO_PID_CHECK_FAILURE:None,
OPENUSB_IO_DATA_TOGGLE_MISMATCH:None,
OPENUSB_IO_TIMEOUT:errno.__dict__.get('ETIMEDOUT', None),
OPENUSB_IO_CANCELED:errno.__dict__.get('EINTR', None)
}
class _usb_endpoint_desc(Structure):
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('bEndpointAddress', c_uint8),
('bmAttributes', c_uint8),
('wMaxPacketSize', c_uint16),
('bInterval', c_uint8),
('bRefresh', c_uint8),
('bSynchAddress', c_uint8)]
class _usb_interface_desc(Structure):
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('bInterfaceNumber', c_uint8),
('bAlternateSetting', c_uint8),
('bNumEndpoints', c_uint8),
('bInterfaceClass', c_uint8),
('bInterfaceSubClass', c_uint8),
('bInterfaceProtocol', c_uint8),
('iInterface', c_uint8)]
class _usb_config_desc(Structure):
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('wTotalLength', c_uint16),
('bNumInterfaces', c_uint8),
('bConfigurationValue', c_uint8),
('iConfiguration', c_uint8),
('bmAttributes', c_uint8),
('bMaxPower', c_uint8)]
class _usb_device_desc(Structure):
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('bcdUSB', c_uint16),
('bDeviceClass', c_uint8),
('bDeviceSubClass', c_uint8),
('bDeviceProtocol', c_uint8),
('bMaxPacketSize0', c_uint8),
('idVendor', c_uint16),
('idProduct', c_uint16),
('bcdDevice', c_uint16),
('iManufacturer', c_uint8),
('iProduct', c_uint8),
('iSerialNumber', c_uint8),
('bNumConfigurations', c_uint8)]
class _openusb_request_result(Structure):
_fields_ = [('status', c_int32),
('transferred_bytes', c_uint32)]
class _openusb_ctrl_request(Structure):
def __init__(self):
super(_openusb_ctrl_request, self).__init__()
self.setup.bmRequestType = 0
self.setup.bRequest = 0
self.setup.wValue = 0
self.setup.wIndex = 0
self.payload = None
self.length = 0
self.timeout = 0
self.flags = 0
self.result.status = 0
self.result.transferred_bytes = 0
self.next = None
class _openusb_ctrl_setup(Structure):
_fields_ = [('bmRequestType', c_uint8),
('bRequest', c_uint8),
('wValue', c_uint16),
('wIndex', c_uint16)]
_fields_ = [('setup', _openusb_ctrl_setup),
('payload', POINTER(c_uint8)),
('length', c_uint32),
('timeout', c_uint32),
('flags', c_uint32),
('result', _openusb_request_result),
('next', c_void_p)]
class _openusb_intr_request(Structure):
_fields_ = [('interval', c_uint16),
('payload', POINTER(c_uint8)),
('length', c_uint32),
('timeout', c_uint32),
('flags', c_uint32),
('result', _openusb_request_result),
('next', c_void_p)]
class _openusb_bulk_request(Structure):
_fields_ = [('payload', POINTER(c_uint8)),
('length', c_uint32),
('timeout', c_uint32),
('flags', c_uint32),
('result', _openusb_request_result),
('next', c_void_p)]
class _openusb_isoc_pkts(Structure):
class _openusb_isoc_packet(Structure):
_fields_ = [('payload', POINTER(c_uint8)),
('length', c_uint32)]
_fields_ = [('num_packets', c_uint32),
('packets', POINTER(_openusb_isoc_packet))]
class _openusb_isoc_request(Structure):
_fields_ = [('start_frame', c_uint32),
('flags', c_uint32),
('pkts', _openusb_isoc_pkts),
('isoc_results', POINTER(_openusb_request_result)),
('isoc_status', c_int32),
('next', c_void_p)]
_openusb_devid = c_uint64
_openusb_busid = c_uint64
_openusb_handle = c_uint64
_openusb_dev_handle = c_uint64
_lib = None
_ctx = None
def _load_library(find_library=None):
# FIXME: cygwin name is "openusb"?
# (that's what the original _load_library() function
# would have searched for)
return usb.libloader.load_locate_library(
('openusb',), 'openusb', "OpenUSB library", find_library=find_library
)
def _setup_prototypes(lib):
# int32_t openusb_init(uint32_t flags , openusb_handle_t *handle);
lib.openusb_init.argtypes = [c_uint32, POINTER(_openusb_handle)]
lib.openusb_init.restype = c_int32
# void openusb_fini(openusb_handle_t handle );
lib.openusb_fini.argtypes = [_openusb_handle]
# uint32_t openusb_get_busid_list(openusb_handle_t handle,
# openusb_busid_t **busids,
# uint32_t *num_busids);
lib.openusb_get_busid_list.argtypes = [
_openusb_handle,
POINTER(POINTER(_openusb_busid)),
POINTER(c_uint32)
]
# void openusb_free_busid_list(openusb_busid_t * busids);
lib.openusb_free_busid_list.argtypes = [POINTER(_openusb_busid)]
# uint32_t openusb_get_devids_by_bus(openusb_handle_t handle,
# openusb_busid_t busid,
# openusb_devid_t **devids,
# uint32_t *num_devids);
lib.openusb_get_devids_by_bus.argtypes = [
_openusb_handle,
_openusb_busid,
POINTER(POINTER(_openusb_devid)),
POINTER(c_uint32)
]
lib.openusb_get_devids_by_bus.restype = c_int32
# void openusb_free_devid_list(openusb_devid_t * devids);
lib.openusb_free_devid_list.argtypes = [POINTER(_openusb_devid)]
# int32_t openusb_open_device(openusb_handle_t handle,
# openusb_devid_t devid ,
# uint32_t flags,
# openusb_dev_handle_t *dev);
lib.openusb_open_device.argtypes = [
_openusb_handle,
_openusb_devid,
c_uint32,
POINTER(_openusb_dev_handle)
]
lib.openusb_open_device.restype = c_int32
# int32_t openusb_close_device(openusb_dev_handle_t dev);
lib.openusb_close_device.argtypes = [_openusb_dev_handle]
lib.openusb_close_device.restype = c_int32
# int32_t openusb_set_configuration(openusb_dev_handle_t dev,
# uint8_t cfg);
lib.openusb_set_configuration.argtypes = [_openusb_dev_handle, c_uint8]
lib.openusb_set_configuration.restype = c_int32
# int32_t openusb_get_configuration(openusb_dev_handle_t dev,
# uint8_t *cfg);
lib.openusb_get_configuration.argtypes = [_openusb_dev_handle, POINTER(c_uint8)]
lib.openusb_get_configuration.restype = c_int32
# int32_t openusb_claim_interface(openusb_dev_handle_t dev,
# uint8_t ifc,
# openusb_init_flag_t flags);
lib.openusb_claim_interface.argtypes = [
_openusb_dev_handle,
c_uint8,
c_int
]
lib.openusb_claim_interface.restype = c_int32
# int32_t openusb_release_interface(openusb_dev_handle_t dev,
# uint8_t ifc);
lib.openusb_release_interface.argtypes = [
_openusb_dev_handle,
c_uint8
]
lib.openusb_release_interface.restype = c_int32
# int32_topenusb_set_altsetting(openusb_dev_handle_t dev,
# uint8_t ifc,
# uint8_t alt);
lib.openusb_set_altsetting.argtypes = [
_openusb_dev_handle,
c_uint8,
c_uint8
]
lib.openusb_set_altsetting.restype = c_int32
# int32_t openusb_reset(openusb_dev_handle_t dev);
lib.openusb_reset.argtypes = [_openusb_dev_handle]
lib.openusb_reset.restype = c_int32
# int32_t openusb_parse_device_desc(openusb_handle_t handle,
# openusb_devid_t devid,
# uint8_t *buffer,
# uint16_t buflen,
# usb_device_desc_t *devdesc);
lib.openusb_parse_device_desc.argtypes = [
_openusb_handle,
_openusb_devid,
POINTER(c_uint8),
c_uint16,
POINTER(_usb_device_desc)
]
lib.openusb_parse_device_desc.restype = c_int32
# int32_t openusb_parse_config_desc(openusb_handle_t handle,
# openusb_devid_t devid,
# uint8_t *buffer,
# uint16_t buflen,
# uint8_t cfgidx,
# usb_config_desc_t *cfgdesc);
lib.openusb_parse_config_desc.argtypes = [
_openusb_handle,
_openusb_devid,
POINTER(c_uint8),
c_uint16,
c_uint8,
POINTER(_usb_config_desc)
]
lib.openusb_parse_config_desc.restype = c_int32
# int32_t openusb_parse_interface_desc(openusb_handle_t handle,
# openusb_devid_t devid,
# uint8_t *buffer,
# uint16_t buflen,
# uint8_t cfgidx,
# uint8_t ifcidx,
# uint8_t alt,
# usb_interface_desc_t *ifcdesc);
lib.openusb_parse_interface_desc.argtypes = [
_openusb_handle,
_openusb_devid,
POINTER(c_uint8),
c_uint16,
c_uint8,
c_uint8,
c_uint8,
POINTER(_usb_interface_desc)
]
lib.openusb_parse_interface_desc.restype = c_int32
# int32_t openusb_parse_endpoint_desc(openusb_handle_t handle,
# openusb_devid_t devid,
# uint8_t *buffer,
# uint16_t buflen,
# uint8_t cfgidx,
# uint8_t ifcidx,
# uint8_t alt,
# uint8_t eptidx,
# usb_endpoint_desc_t *eptdesc);
lib.openusb_parse_endpoint_desc.argtypes = [
_openusb_handle,
_openusb_devid,
POINTER(c_uint8),
c_uint16,
c_uint8,
c_uint8,
c_uint8,
c_uint8,
POINTER(_usb_endpoint_desc)
]
lib.openusb_parse_interface_desc.restype = c_int32
# const char *openusb_strerror(int32_t error );
lib.openusb_strerror.argtypes = [c_int32]
lib.openusb_strerror.restype = c_char_p
# int32_t openusb_ctrl_xfer(openusb_dev_handle_t dev,
# uint8_t ifc,
# uint8_t ept,
# openusb_ctrl_request_t *ctrl);
lib.openusb_ctrl_xfer.argtypes = [
_openusb_dev_handle,
c_uint8,
c_uint8,
POINTER(_openusb_ctrl_request)
]
lib.openusb_ctrl_xfer.restype = c_int32
# int32_t openusb_intr_xfer(openusb_dev_handle_t dev,
# uint8_t ifc,
# uint8_t ept,
# openusb_intr_request_t *intr);
lib.openusb_intr_xfer.argtypes = [
_openusb_dev_handle,
c_uint8,
c_uint8,
POINTER(_openusb_intr_request)
]
lib.openusb_bulk_xfer.restype = c_int32
# int32_t openusb_bulk_xfer(openusb_dev_handle_t dev,
# uint8_t ifc,
# uint8_t ept,
# openusb_bulk_request_t *bulk);
lib.openusb_bulk_xfer.argtypes = [
_openusb_dev_handle,
c_uint8,
c_uint8,
POINTER(_openusb_bulk_request)
]
lib.openusb_bulk_xfer.restype = c_int32
# int32_t openusb_isoc_xfer(openusb_dev_handle_t dev,
# uint8_t ifc,
# uint8_t ept,
# openusb_isoc_request_t *isoc);
lib.openusb_isoc_xfer.argtypes = [
_openusb_dev_handle,
c_uint8,
c_uint8,
POINTER(_openusb_isoc_request)
]
lib.openusb_isoc_xfer.restype = c_int32
def _check(ret):
if hasattr(ret, 'value'):
ret = ret.value
if ret != 0:
raise USBError(_lib.openusb_strerror(ret), ret, _openusb_errno[ret])
return ret
class _Context(object):
def __init__(self):
self.handle = _openusb_handle()
_check(_lib.openusb_init(0, byref(self.handle)))
def __del__(self):
_lib.openusb_fini(self.handle)
class _BusIterator(object):
def __init__(self):
self.buslist = POINTER(_openusb_busid)()
num_busids = c_uint32()
_check(_lib.openusb_get_busid_list(_ctx.handle,
byref(self.buslist),
byref(num_busids)))
self.num_busids = num_busids.value
def __iter__(self):
for i in range(self.num_busids):
yield self.buslist[i]
def __del__(self):
_lib.openusb_free_busid_list(self.buslist)
class _DevIterator(object):
def __init__(self, busid):
self.devlist = POINTER(_openusb_devid)()
num_devids = c_uint32()
_check(_lib.openusb_get_devids_by_bus(_ctx.handle,
busid,
byref(self.devlist),
byref(num_devids)))
self.num_devids = num_devids.value
def __iter__(self):
for i in range(self.num_devids):
yield self.devlist[i]
def __del__(self):
_lib.openusb_free_devid_list(self.devlist)
class _OpenUSB(usb.backend.IBackend):
@methodtrace(_logger)
def enumerate_devices(self):
for bus in _BusIterator():
for devid in _DevIterator(bus):
yield devid
@methodtrace(_logger)
def get_device_descriptor(self, dev):
desc = _usb_device_desc()
_check(_lib.openusb_parse_device_desc(_ctx.handle,
dev,
None,
0,
byref(desc)))
desc.bus = None
desc.address = None
desc.port_number = None
return desc
@methodtrace(_logger)
def get_configuration_descriptor(self, dev, config):
desc = _usb_config_desc()
_check(_lib.openusb_parse_config_desc(_ctx.handle,
dev,
None,
0,
config,
byref(desc)))
desc.extra_descriptors = None
return desc
@methodtrace(_logger)
def get_interface_descriptor(self, dev, intf, alt, config):
desc = _usb_interface_desc()
_check(_lib.openusb_parse_interface_desc(_ctx.handle,
dev,
None,
0,
config,
intf,
alt,
byref(desc)))
desc.extra_descriptors = None
return desc
@methodtrace(_logger)
def get_endpoint_descriptor(self, dev, ep, intf, alt, config):
desc = _usb_endpoint_desc()
_check(_lib.openusb_parse_endpoint_desc(_ctx.handle,
dev,
None,
0,
config,
intf,
alt,
ep,
byref(desc)))
desc.extra_descriptors = None
return desc
@methodtrace(_logger)
def open_device(self, dev):
handle = _openusb_dev_handle()
_check(_lib.openusb_open_device(_ctx.handle, dev, 0, byref(handle)))
return handle
@methodtrace(_logger)
def close_device(self, dev_handle):
_lib.openusb_close_device(dev_handle)
@methodtrace(_logger)
def set_configuration(self, dev_handle, config_value):
_check(_lib.openusb_set_configuration(dev_handle, config_value))
@methodtrace(_logger)
def get_configuration(self, dev_handle):
config = c_uint8()
_check(_lib.openusb_get_configuration(dev_handle, byref(config)))
return config.value
@methodtrace(_logger)
def set_interface_altsetting(self, dev_handle, intf, altsetting):
_check(_lib.openusb_set_altsetting(dev_handle, intf, altsetting))
@methodtrace(_logger)
def claim_interface(self, dev_handle, intf):
_check(_lib.openusb_claim_interface(dev_handle, intf, 0))
@methodtrace(_logger)
def release_interface(self, dev_handle, intf):
_lib.openusb_release_interface(dev_handle, intf)
@methodtrace(_logger)
def bulk_write(self, dev_handle, ep, intf, data, timeout):
request = _openusb_bulk_request()
memset(byref(request), 0, sizeof(request))
payload, request.length = data.buffer_info()
request.payload = cast(payload, POINTER(c_uint8))
request.timeout = timeout
_check(_lib.openusb_bulk_xfer(dev_handle, intf, ep, byref(request)))
return request.result.transferred_bytes
@methodtrace(_logger)
def bulk_read(self, dev_handle, ep, intf, buff, timeout):
request = _openusb_bulk_request()
memset(byref(request), 0, sizeof(request))
payload, request.length = buff.buffer_info()
request.payload = cast(payload, POINTER(c_uint8))
request.timeout = timeout
_check(_lib.openusb_bulk_xfer(dev_handle, intf, ep, byref(request)))
return request.result.transferred_bytes
@methodtrace(_logger)
def intr_write(self, dev_handle, ep, intf, data, timeout):
request = _openusb_intr_request()
memset(byref(request), 0, sizeof(request))
payload, request.length = data.buffer_info()
request.payload = cast(payload, POINTER(c_uint8))
request.timeout = timeout
_check(_lib.openusb_intr_xfer(dev_handle, intf, ep, byref(request)))
return request.result.transferred_bytes
@methodtrace(_logger)
def intr_read(self, dev_handle, ep, intf, buff, timeout):
request = _openusb_intr_request()
memset(byref(request), 0, sizeof(request))
payload, request.length = buff.buffer_info()
request.payload = cast(payload, POINTER(c_uint8))
request.timeout = timeout
_check(_lib.openusb_intr_xfer(dev_handle, intf, ep, byref(request)))
return request.result.transferred_bytes
# TODO: implement isochronous
# @methodtrace(_logger)
# def iso_write(self, dev_handle, ep, intf, data, timeout):
# pass
# @methodtrace(_logger)
# def iso_read(self, dev_handle, ep, intf, size, timeout):
# pass
@methodtrace(_logger)
def ctrl_transfer(self,
dev_handle,
bmRequestType,
bRequest,
wValue,
wIndex,
data,
timeout):
request = _openusb_ctrl_request()
request.setup.bmRequestType = bmRequestType
request.setup.bRequest = bRequest
request.setup.wValue
request.setup.wIndex
request.timeout = timeout
direction = usb.util.ctrl_direction(bmRequestType)
payload, request.length = data.buffer_info()
request.length *= data.itemsize
request.payload = cast(payload, POINTER(c_uint8))
_check(_lib.openusb_ctrl_xfer(dev_handle, 0, 0, byref(request)))
return request.result.transferred_bytes
@methodtrace(_logger)
def reset_device(self, dev_handle):
_check(_lib.openusb_reset(dev_handle))
@methodtrace(_logger)
def clear_halt(self, dev_handle, ep):
bmRequestType = util.build_request_type(
util.CTRL_OUT,
util.CTRL_TYPE_STANDARD,
util.CTRL_RECIPIENT_ENDPOINT)
self.ctrl_transfer(
dev_handle,
bmRequestType,
0x03,
0,
ep,
_interop.as_array(),
1000)
def get_backend(find_library=None):
try:
global _lib, _ctx
if _lib is None:
_lib = _load_library(find_library)
_setup_prototypes(_lib)
_ctx = _Context()
return _OpenUSB()
except usb.libloader.LibaryException:
# exception already logged (if any)
_logger.error('Error loading OpenUSB backend', exc_info=False)
return None
except Exception:
_logger.error('Error loading OpenUSB backend', exc_info=True)
return None
|
{
"content_hash": "0913049bfb1946c819dd462b75320e51",
"timestamp": "",
"source": "github",
"line_count": 718,
"max_line_length": 84,
"avg_line_length": 36.68941504178273,
"alnum_prop": 0.515051436814334,
"repo_name": "1upon0/rfid-auth-system",
"id": "c9b31c1504ee0fce59832ca433a958fcec924ba8",
"size": "27809",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "GUI/printer/pyusb-1.0.0b2/build/lib.linux-x86_64-2.7/usb/backend/openusb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "110445"
},
{
"name": "C",
"bytes": "848319"
},
{
"name": "C++",
"bytes": "257556"
},
{
"name": "CSS",
"bytes": "142637"
},
{
"name": "Gnuplot",
"bytes": "1125"
},
{
"name": "HTML",
"bytes": "921766"
},
{
"name": "JavaScript",
"bytes": "400339"
},
{
"name": "Makefile",
"bytes": "119274"
},
{
"name": "Python",
"bytes": "2308656"
},
{
"name": "QMake",
"bytes": "987"
},
{
"name": "Shell",
"bytes": "6209"
}
],
"symlink_target": ""
}
|
"""
.. py:currentmodule:: pymcxray.FileFormat.Results.test_XraySpectraRegionEmitted
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
Tests for the module `XraySpectraRegionEmitted`.
"""
# Script information for the file.
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = "0.1"
__date__ = "Feb 12, 2015"
__copyright__ = "Copyright (c) 2015 Hendrix Demers"
__license__ = "GPL 3"
# Standard library modules.
import unittest
import os.path
# Third party modules.
from nose import SkipTest
# Local modules.
# Project modules
from pymcxray.FileFormat.Results.XraySpectraRegionEmitted import XraySpectraRegionEmitted
# Globals and constants variables.
class TestXraySpectraRegionEmitted(unittest.TestCase):
"""
TestCase class for the module `XraySpectraRegionEmitted`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
self.testDataPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../test_data/results"))
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
#self.fail("Test if the testcase is working.")
def test_readRegion_0_30kV(self):
"""
Tests for method `readRegion_0`.
"""
filename = "SimulationNanoparticleAg_Au_SpectraPerElectron_1_srkeV_Region_0.csv"
filepath = os.path.join(self.testDataPath, filename)
if not os.path.isfile(filepath):
raise SkipTest
spectra = XraySpectraRegionEmitted()
spectra.path = self.testDataPath
spectra.basename = "SimulationNanoparticleAg_Au"
spectra.read()
self.assertEqual(6000, len(spectra.energies_keV))
self.assertEqual(6000, len(spectra.total_1_ekeVsr))
self.assertEqual(6000, len(spectra.characteristic_1_ekeVsr))
self.assertEqual(6000, len(spectra.bremsstrahlung_1_ekeVsr))
self.assertAlmostEqual(0.0025, spectra.energies_keV[0], 6)
self.assertAlmostEqual(29.9975, spectra.energies_keV[-1], 6)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[-1], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[-1], 12)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[-1], 12)
# 58 0.2825, 0.0, 0.0, 0.0
self.assertAlmostEqual(0.2825, spectra.energies_keV[56], 6)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[56], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[56], 12)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[56], 12)
# 426 2.1225, 1.02186e-006, 8.96679e-007, 1.25179e-007
self.assertAlmostEqual(2.1225, spectra.energies_keV[424], 6)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[424], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[424], 12)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[424], 12)
# 598 2.9825, 0.0319011, 0.031818, 8.31376e-005
self.assertAlmostEqual(2.9825, spectra.energies_keV[596], 6)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[596], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[596], 12)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[596], 12)
#self.fail("Test if the testcase is working.")
def test_readRegion_1_30kV(self):
"""
Tests for method `readRegion_0`.
"""
filename = "SimulationNanoparticleAg_Au_SpectraPerElectron_1_srkeV_Region_1.csv"
filepath = os.path.join(self.testDataPath, filename)
if not os.path.isfile(filepath):
raise SkipTest
spectra = XraySpectraRegionEmitted()
spectra.path = self.testDataPath
spectra.basename = "SimulationNanoparticleAg_Au"
spectra.read(regionID=1)
self.assertEqual(6000, len(spectra.energies_keV))
self.assertEqual(6000, len(spectra.total_1_ekeVsr))
self.assertEqual(6000, len(spectra.characteristic_1_ekeVsr))
self.assertEqual(6000, len(spectra.bremsstrahlung_1_ekeVsr))
self.assertAlmostEqual(0.0025, spectra.energies_keV[0], 6)
self.assertAlmostEqual(29.9975, spectra.energies_keV[-1], 6)
self.assertAlmostEqual(1.29326e-008, spectra.total_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[-1], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[-1], 12)
self.assertAlmostEqual(1.29326e-008, spectra.bremsstrahlung_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[-1], 12)
# 58 0.2825, 1.12879e-006, 0, 1.12879e-006
self.assertAlmostEqual(0.2825, spectra.energies_keV[56], 6)
self.assertAlmostEqual(1.12879e-006, spectra.total_1_ekeVsr[56], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[56], 12)
self.assertAlmostEqual(1.12879e-006, spectra.bremsstrahlung_1_ekeVsr[56], 12)
# 426 2.1225, 1.02186e-006, 8.96679e-007, 1.25179e-007
self.assertAlmostEqual(2.1225, spectra.energies_keV[424], 6)
self.assertAlmostEqual(1.02186e-006, spectra.total_1_ekeVsr[424], 12)
self.assertAlmostEqual(8.96679e-007, spectra.characteristic_1_ekeVsr[424], 12)
self.assertAlmostEqual(1.25179e-007, spectra.bremsstrahlung_1_ekeVsr[424], 12)
# 598 2.9825, 8.64716e-008, 0, 8.64716e-008
self.assertAlmostEqual(2.9825, spectra.energies_keV[596], 6)
self.assertAlmostEqual(8.64716e-008, spectra.total_1_ekeVsr[596], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[596], 12)
self.assertAlmostEqual(8.64716e-008, spectra.bremsstrahlung_1_ekeVsr[596], 12)
#self.fail("Test if the testcase is working.")
def test_readRegion_2_30kV(self):
"""
Tests for method `readRegion_0`.
"""
filename = "SimulationNanoparticleAg_Au_SpectraPerElectron_1_srkeV_Region_2.csv"
filepath = os.path.join(self.testDataPath, filename)
if not os.path.isfile(filepath):
raise SkipTest
spectra = XraySpectraRegionEmitted()
spectra.path = self.testDataPath
spectra.basename = "SimulationNanoparticleAg_Au"
spectra.read(regionID=2)
self.assertEqual(6000, len(spectra.energies_keV))
self.assertEqual(6000, len(spectra.total_1_ekeVsr))
self.assertEqual(6000, len(spectra.characteristic_1_ekeVsr))
self.assertEqual(6000, len(spectra.bremsstrahlung_1_ekeVsr))
self.assertAlmostEqual(0.0025, spectra.energies_keV[0], 6)
self.assertAlmostEqual(29.9975, spectra.energies_keV[-1], 6)
self.assertAlmostEqual(1.47934e-006, spectra.total_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[-1], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[-1], 12)
self.assertAlmostEqual(1.47934e-006, spectra.bremsstrahlung_1_ekeVsr[0], 12)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[-1], 12)
# 0.2825, 0.000126745, 0, 0.000126745
self.assertAlmostEqual(0.2825, spectra.energies_keV[56], 6)
self.assertAlmostEqual(0.000126745, spectra.total_1_ekeVsr[56], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[56], 12)
self.assertAlmostEqual(0.000126745, spectra.bremsstrahlung_1_ekeVsr[56], 12)
# 426 2.1225, 8.165e-005, 0, 8.165e-005
self.assertAlmostEqual(2.1225, spectra.energies_keV[424], 6)
self.assertAlmostEqual(8.165e-005, spectra.total_1_ekeVsr[424], 12)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[424], 12)
self.assertAlmostEqual(8.165e-005, spectra.bremsstrahlung_1_ekeVsr[424], 12)
# 598 2.9825, 0.0319011, 0.031818, 8.31376e-005
self.assertAlmostEqual(2.9825, spectra.energies_keV[596], 6)
self.assertAlmostEqual(0.0319011, spectra.total_1_ekeVsr[596], 12)
self.assertAlmostEqual(0.031818, spectra.characteristic_1_ekeVsr[596], 12)
self.assertAlmostEqual(8.31376e-005, spectra.bremsstrahlung_1_ekeVsr[596], 12)
#self.fail("Test if the testcase is working.")
def test_readRegion_1_200kV(self):
"""
Tests for method `readRegion_0`.
"""
filename = "SimulationNanoparticleAg_C_SpectraPerElectron_1_srkeV_Region_1.csv"
filepath = os.path.join(self.testDataPath, filename)
if not os.path.isfile(filepath):
raise SkipTest
spectra = XraySpectraRegionEmitted()
spectra.path = self.testDataPath
spectra.basename = "SimulationNanoparticleAg_C"
spectra.read(regionID=1)
self.assertEqual(40000, len(spectra.energies_keV))
self.assertEqual(40000, len(spectra.total_1_ekeVsr))
self.assertEqual(40000, len(spectra.characteristic_1_ekeVsr))
self.assertEqual(40000, len(spectra.bremsstrahlung_1_ekeVsr))
self.assertAlmostEqual(0.0025, spectra.energies_keV[0], 6)
self.assertAlmostEqual(199.998, spectra.energies_keV[-1], 6)
self.assertAlmostEqual(1.18553e-011, spectra.total_1_ekeVsr[0], 17)
self.assertAlmostEqual(0.0, spectra.total_1_ekeVsr[-1], 17)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[0], 17)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[-1], 17)
self.assertAlmostEqual(1.18553e-011, spectra.bremsstrahlung_1_ekeVsr[0], 17)
self.assertAlmostEqual(0.0, spectra.bremsstrahlung_1_ekeVsr[-1], 17)
# 58 0.2825, 2.11852e-005, 2.11838e-005, 1.33965e-009
self.assertAlmostEqual(0.2825, spectra.energies_keV[56], 6)
self.assertAlmostEqual(2.11852e-005, spectra.total_1_ekeVsr[56], 17)
self.assertAlmostEqual(2.11838e-005, spectra.characteristic_1_ekeVsr[56], 17)
self.assertAlmostEqual(1.33965e-009, spectra.bremsstrahlung_1_ekeVsr[56], 17)
# 426 2.1225, 7.34935e-009, 0, 7.34935e-009
self.assertAlmostEqual(2.1225, spectra.energies_keV[424], 6)
self.assertAlmostEqual(7.34935e-009, spectra.total_1_ekeVsr[424], 17)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[424], 17)
self.assertAlmostEqual(7.34935e-009, spectra.bremsstrahlung_1_ekeVsr[424], 17)
# 598 2.9825, 7.2569e-009, 0, 7.2569e-009
self.assertAlmostEqual(2.9825, spectra.energies_keV[596], 6)
self.assertAlmostEqual(7.2569e-009, spectra.total_1_ekeVsr[596], 17)
self.assertAlmostEqual(0.0, spectra.characteristic_1_ekeVsr[596], 17)
self.assertAlmostEqual(7.2569e-009, spectra.bremsstrahlung_1_ekeVsr[596], 17)
#self.fail("Test if the testcase is working.")
def test__indice(self):
"""
Tests for method `_indice`.
"""
filename = "SimulationNanoparticleAg_Au_SpectraPerElectron_1_srkeV_Region_1.csv"
filepath = os.path.join(self.testDataPath, filename)
if not os.path.isfile(filepath):
raise SkipTest
spectra = XraySpectraRegionEmitted()
spectra.path = self.testDataPath
spectra.basename = "SimulationNanoparticleAg_Au"
spectra.read(regionID=1)
self.assertEqual(0, spectra._indice(0.0))
self.assertEqual(0, spectra._indice(0.0024))
self.assertEqual(0, spectra._indice(0.0025))
self.assertEqual(0, spectra._indice(0.0026))
self.assertEqual(0, spectra._indice(0.0049))
self.assertEqual(0, spectra._indice(0.0050))
self.assertEqual(1, spectra._indice(0.0051))
self.assertEqual(56, spectra._indice(0.282))
self.assertEqual(424, spectra._indice(2.123))
self.assertEqual(596, spectra._indice(2.984))
self.assertEqual(5999, spectra._indice(29.999))
self.assertEqual(5999, spectra._indice(30.0))
self.assertRaises(IndexError, spectra._indice, 31.0)
self.assertAlmostEqual(1.12879e-006, spectra.totalValue_1_ekeVsr(0.282), 12)
self.assertAlmostEqual(1.02186e-006, spectra.totalValue_1_ekeVsr(2.123), 12)
self.assertAlmostEqual(8.64716e-008, spectra.totalValue_1_ekeVsr(2.984), 12)
self.assertAlmostEqual(0.0, spectra.characteristicValue_1_ekeVsr(0.282), 12)
self.assertAlmostEqual(8.96679e-007, spectra.characteristicValue_1_ekeVsr(2.123), 12)
self.assertAlmostEqual(0.0, spectra.characteristicValue_1_ekeVsr(2.984), 12)
self.assertAlmostEqual(1.12879e-006, spectra.bremsstrahlungValue_1_ekeVsr(0.282), 12)
self.assertAlmostEqual(1.25179e-007, spectra.bremsstrahlungValue_1_ekeVsr(2.123), 12)
self.assertAlmostEqual(8.64716e-008, spectra.bremsstrahlungValue_1_ekeVsr(2.984), 12)
# 66.82
self.assertRaises(IndexError, spectra._indice, 66.82)
spectra = XraySpectraRegionEmitted()
spectra.path = self.testDataPath
spectra.basename = "SimulationNanoparticleAg_C"
spectra.read(regionID=1)
self.assertEqual(13363, spectra._indice(66.82))
self.assertAlmostEqual(0.0, spectra.characteristicValue_1_ekeVsr(66.82), 12)
#self.fail("Test if the testcase is working.")
if __name__ == '__main__': #pragma: no cover
import nose
nose.runmodule()
|
{
"content_hash": "7e8063d4110a547cdbf98b1fcba6827b",
"timestamp": "",
"source": "github",
"line_count": 319,
"max_line_length": 114,
"avg_line_length": 43.38244514106583,
"alnum_prop": 0.6756268516511309,
"repo_name": "drix00/pymcxray",
"id": "1bda62fad712b1c510ca44d2b17bdb4c979f3ce9",
"size": "13861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymcxray/FileFormat/Results/test_XraySpectraRegionEmitted.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2299"
},
{
"name": "Python",
"bytes": "933760"
}
],
"symlink_target": ""
}
|
from . import fn
compile_fns_to_bash = fn.compile_fns_to_bash
compile_fn_spec_to_bash = fn.compile_fn_spec_to_bash
|
{
"content_hash": "21c466c9822ffbd51630557b5d2a8e39",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 52,
"avg_line_length": 23.4,
"alnum_prop": 0.7350427350427351,
"repo_name": "themattrix/bashup",
"id": "b9501fb6ab842c8828aadbb7ec69dcd552ed6117",
"size": "117",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bashup/compile/elements/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49547"
},
{
"name": "Shell",
"bytes": "2857"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from datetime import datetime, timedelta
from itertools import count
from time import time
from celery.schedules import schedule, crontab
from celery.utils.timeutils import timedelta_seconds
from djcelery import schedulers
from djcelery import celery
from djcelery.models import PeriodicTask, IntervalSchedule, CrontabSchedule
from djcelery.models import PeriodicTasks
from djcelery.tests.utils import unittest
def create_model_interval(schedule, **kwargs):
return create_model(interval=IntervalSchedule.from_schedule(schedule),
**kwargs)
def create_model_crontab(schedule, **kwargs):
return create_model(crontab=CrontabSchedule.from_schedule(schedule),
**kwargs)
_next_id = count(0).next
def create_model(Model=PeriodicTask, **kwargs):
entry = dict(name='thefoo{0}'.format(_next_id()),
task='djcelery.unittest.add{0}'.format(_next_id()),
args='[2, 2]',
kwargs='{"callback": "foo"}',
queue='xaz',
routing_key='cpu',
exchange='foo')
return Model(**dict(entry, **kwargs))
class EntryTrackSave(schedulers.ModelEntry):
def __init__(self, *args, **kwargs):
self.saved = 0
super(EntryTrackSave, self).__init__(*args, **kwargs)
def save(self):
self.saved += 1
super(EntryTrackSave, self).save()
class EntrySaveRaises(schedulers.ModelEntry):
def save(self):
raise RuntimeError('this is expected')
class TrackingScheduler(schedulers.DatabaseScheduler):
Entry = EntryTrackSave
def __init__(self, *args, **kwargs):
self.flushed = 0
schedulers.DatabaseScheduler.__init__(self, *args, **kwargs)
def sync(self):
self.flushed += 1
schedulers.DatabaseScheduler.sync(self)
class test_ModelEntry(unittest.TestCase):
Entry = EntryTrackSave
def tearDown(self):
PeriodicTask.objects.all().delete()
def test_entry(self):
m = create_model_interval(schedule(timedelta(seconds=10)))
e = self.Entry(m)
self.assertListEqual(e.args, [2, 2])
self.assertDictEqual(e.kwargs, {'callback': 'foo'})
self.assertTrue(e.schedule)
self.assertEqual(e.total_run_count, 0)
self.assertIsInstance(e.last_run_at, datetime)
self.assertDictContainsSubset({'queue': 'xaz',
'exchange': 'foo',
'routing_key': 'cpu'}, e.options)
right_now = celery.now()
m2 = create_model_interval(schedule(timedelta(seconds=10)),
last_run_at=right_now)
self.assertTrue(m2.last_run_at)
e2 = self.Entry(m2)
self.assertIs(e2.last_run_at, right_now)
e3 = e2.next()
self.assertGreater(e3.last_run_at, e2.last_run_at)
self.assertEqual(e3.total_run_count, 1)
class test_DatabaseScheduler(unittest.TestCase):
Scheduler = TrackingScheduler
def setUp(self):
PeriodicTask.objects.all().delete()
self.prev_schedule = celery.conf.CELERYBEAT_SCHEDULE
celery.conf.CELERYBEAT_SCHEDULE = {}
m1 = create_model_interval(schedule(timedelta(seconds=10)))
m2 = create_model_interval(schedule(timedelta(minutes=20)))
m3 = create_model_crontab(crontab(minute='2,4,5'))
for obj in m1, m2, m3:
obj.save()
self.s = self.Scheduler()
self.m1 = PeriodicTask.objects.get(name=m1.name)
self.m2 = PeriodicTask.objects.get(name=m2.name)
self.m3 = PeriodicTask.objects.get(name=m3.name)
def tearDown(self):
celery.conf.CELERYBEAT_SCHEDULE = self.prev_schedule
PeriodicTask.objects.all().delete()
def test_constructor(self):
self.assertIsInstance(self.s._dirty, set)
self.assertIsNone(self.s._last_sync)
self.assertTrue(self.s.sync_every)
def test_all_as_schedule(self):
sched = self.s.schedule
self.assertTrue(sched)
self.assertEqual(len(sched), 4)
self.assertIn('celery.backend_cleanup', sched)
for n, e in sched.items():
self.assertIsInstance(e, self.s.Entry)
def test_schedule_changed(self):
self.m2.args = '[16, 16]'
self.m2.save()
e2 = self.s.schedule[self.m2.name]
self.assertListEqual(e2.args, [16, 16])
self.m1.args = '[32, 32]'
self.m1.save()
e1 = self.s.schedule[self.m1.name]
self.assertListEqual(e1.args, [32, 32])
e1 = self.s.schedule[self.m1.name]
self.assertListEqual(e1.args, [32, 32])
self.m3.delete()
self.assertRaises(KeyError, self.s.schedule.__getitem__, self.m3.name)
def test_should_sync(self):
self.assertTrue(self.s.should_sync())
self.s._last_sync = time()
self.assertFalse(self.s.should_sync())
self.s._last_sync -= self.s.sync_every
self.assertTrue(self.s.should_sync())
def test_reserve(self):
e1 = self.s.schedule[self.m1.name]
self.s.schedule[self.m1.name] = self.s.reserve(e1)
self.assertEqual(self.s.flushed, 1)
e2 = self.s.schedule[self.m2.name]
self.s.schedule[self.m2.name] = self.s.reserve(e2)
self.assertEqual(self.s.flushed, 1)
self.assertIn(self.m2.name, self.s._dirty)
def test_sync_saves_last_run_at(self):
e1 = self.s.schedule[self.m2.name]
last_run = e1.last_run_at
last_run2 = last_run - timedelta(days=1)
e1.model.last_run_at = last_run2
self.s._dirty.add(self.m2.name)
self.s.sync()
e2 = self.s.schedule[self.m2.name]
self.assertEqual(e2.last_run_at, last_run2)
def test_sync_syncs_before_save(self):
# Get the entry for m2
e1 = self.s.schedule[self.m2.name]
# Increment the entry (but make sure it doesn't sync)
self.s._last_sync = time()
e2 = self.s.schedule[e1.name] = self.s.reserve(e1)
self.assertEqual(self.s.flushed, 1)
# Fetch the raw object from db, change the args
# and save the changes.
m2 = PeriodicTask.objects.get(pk=self.m2.pk)
m2.args = '[16, 16]'
m2.save()
# get_schedule should now see the schedule has changed.
# and also sync the dirty objects.
e3 = self.s.schedule[self.m2.name]
self.assertEqual(self.s.flushed, 2)
self.assertEqual(e3.last_run_at, e2.last_run_at)
self.assertListEqual(e3.args, [16, 16])
def test_sync_not_dirty(self):
self.s._dirty.clear()
self.s.sync()
def test_sync_object_gone(self):
self.s._dirty.add('does-not-exist')
self.s.sync()
def test_sync_rollback_on_save_error(self):
self.s.schedule[self.m1.name] = EntrySaveRaises(self.m1)
self.s._dirty.add(self.m1.name)
self.assertRaises(RuntimeError, self.s.sync)
class test_models(unittest.TestCase):
def test_IntervalSchedule_unicode(self):
self.assertEqual(unicode(IntervalSchedule(every=1, period='seconds')),
'every second')
self.assertEqual(unicode(IntervalSchedule(every=10, period='seconds')),
'every 10 seconds')
def test_CrontabSchedule_unicode(self):
self.assertEqual(unicode(CrontabSchedule(minute=3,
hour=3,
day_of_week=None)),
'3 3 * * * (m/h/d/dM/MY)')
self.assertEqual(unicode(CrontabSchedule(minute=3,
hour=3,
day_of_week='tue',
day_of_month='*/2',
month_of_year='4,6')),
'3 3 tue */2 4,6 (m/h/d/dM/MY)')
def test_PeriodicTask_unicode_interval(self):
p = create_model_interval(schedule(timedelta(seconds=10)))
self.assertEqual(unicode(p),
'{0}: every 10.0 seconds'.format(p.name))
def test_PeriodicTask_unicode_crontab(self):
p = create_model_crontab(crontab(hour='4, 5', day_of_week='4, 5'))
self.assertEqual(unicode(p),
'{0}: * 4,5 4,5 * * (m/h/d/dM/MY)'.format(p.name))
def test_PeriodicTask_schedule_property(self):
p1 = create_model_interval(schedule(timedelta(seconds=10)))
s1 = p1.schedule
self.assertEqual(timedelta_seconds(s1.run_every), 10)
p2 = create_model_crontab(crontab(hour='4, 5',
minute='10,20,30',
day_of_month='1-7',
month_of_year='*/3'))
s2 = p2.schedule
self.assertSetEqual(s2.hour, set([4, 5]))
self.assertSetEqual(s2.minute, set([10, 20, 30]))
self.assertSetEqual(s2.day_of_week, set([0, 1, 2, 3, 4, 5, 6]))
self.assertSetEqual(s2.day_of_month, set([1, 2, 3, 4, 5, 6, 7]))
self.assertSetEqual(s2.month_of_year, set([1, 4, 7, 10]))
def test_PeriodicTask_unicode_no_schedule(self):
p = create_model()
self.assertEqual(unicode(p), '{0}: {{no schedule}}'.format(p.name))
def test_CrontabSchedule_schedule(self):
s = CrontabSchedule(minute='3, 7', hour='3, 4', day_of_week='*',
day_of_month='1, 16', month_of_year='1, 7')
self.assertEqual(s.schedule.minute, set([3, 7]))
self.assertEqual(s.schedule.hour, set([3, 4]))
self.assertEqual(s.schedule.day_of_week, set([0, 1, 2, 3, 4, 5, 6]))
self.assertEqual(s.schedule.day_of_month, set([1, 16]))
self.assertEqual(s.schedule.month_of_year, set([1, 7]))
class test_model_PeriodicTasks(unittest.TestCase):
def setUp(self):
PeriodicTasks.objects.all().delete()
def test_track_changes(self):
self.assertIsNone(PeriodicTasks.last_change())
m1 = create_model_interval(schedule(timedelta(seconds=10)))
m1.save()
x = PeriodicTasks.last_change()
self.assertTrue(x)
m1.args = '(23, 24)'
m1.save()
y = PeriodicTasks.last_change()
self.assertTrue(y)
self.assertGreater(y, x)
|
{
"content_hash": "1e8082e7e2ffe9e36a6173835007d8ab",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 79,
"avg_line_length": 36.07612456747405,
"alnum_prop": 0.5834452330711682,
"repo_name": "alexhayes/django-celery",
"id": "85a1042b0c54ef1c3cdbd041dbd0521023f71d1d",
"size": "10426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djcelery/tests/test_schedulers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "207537"
},
{
"name": "Shell",
"bytes": "2065"
}
],
"symlink_target": ""
}
|
__author__ = 'chengxue'
from taskflow import task
from utils.db_handlers import tenants as db_handler
from utils.helper import *
from keystoneclient import exceptions as keystone_exceptions
LOG = logging.getLogger(__name__)
class UpdateProjectsQuotasTask(task.Task):
"""
Task to update quotas for all migrated projects
"""
def __init__(self, *args, **kwargs):
super(UpdateProjectsQuotasTask, self).__init__(*args, **kwargs)
self.s_cloud_name = cfg.CONF.SOURCE.os_cloud_name
self.t_cloud_name = cfg.CONF.TARGET.os_cloud_name
self.ks_source = get_keystone_source()
self.nv_source = get_nova_source()
self.nv_target = get_nova_target()
def update_quota(self, tenant_name=None, quota=None, t_data=None):
if tenant_name is None:
LOG.error("Tenant name cannot be null, skip Updating.")
return
if quota is None:
LOG.info("Nothing to be updated for tenant {0}."
.format(tenant_name))
return
ks = get_keystone_target()
try:
tenant = ks.tenants.find(name=tenant_name)
except keystone_exceptions.NotFound:
LOG.error("Tenant {0} cannot be found in cloud {1}"
.format(tenant_name, self.s_cloud_name))
return
if tenant is not None:
self.nv_target.quotas.update(tenant.id,
metadata_items=quota.metadata_items,
injected_file_content_bytes=
quota.injected_file_content_bytes,
injected_file_path_bytes=None,
ram=quota.ram,
floating_ips=quota.floating_ips,
instances=quota.instances,
injected_files=quota.injected_files,
cores=quota.cores,
key_pairs=None,
security_groups=None,
security_group_rules=None)
t_data.update({'quota_updated': '1'})
db_handler.update_migration_record(**t_data)
LOG.info("The quota for tenant {0} has been updated successfully."
.format(tenant_name))
def execute(self):
LOG.info("Start Project quota updating ...")
tenants = self.ks_source.tenants.list()
for tenant in tenants:
tenant_name = tenant.name
# get the tenant data that has been migrated from src to dst
values = [tenant_name, self.s_cloud_name, self.t_cloud_name]
tenant_data = db_handler.get_migrated_tenant(values)
# only update quotas for project that has been completed migrated
if tenant_data is not None:
if tenant_data['state'] == "proxy_created":
if tenant_data['quota_updated'] == '1':
LOG.info("The quota of project {0} has been updated."
.format(tenant_data['project_name']))
else:
new_name_dst = tenant_data['new_project_name']
# get source project quota
src_quota = self.nv_source.quotas.get(tenant.id)
# update destination project quota
self.update_quota(new_name_dst,
src_quota,
tenant_data)
else:
LOG.info("The corresponding project {0} has not been "
"migrated.".format(tenant_data['project_name']))
else:
LOG.info("Tenant {} in could {} has not been migrated."
.format(tenant.name, self.s_cloud_name))
|
{
"content_hash": "5cf870a04d47decaf52a483cdd1e99e4",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 78,
"avg_line_length": 40.888888888888886,
"alnum_prop": 0.5007411067193676,
"repo_name": "Phoenix1708/OpenAcademy_OpenStack_Flyway",
"id": "43fd134b1219779069a4d6fbd7edb40b6b328e3b",
"size": "4048",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flyway/flow/update_projects_quotas_task.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "388880"
},
{
"name": "JavaScript",
"bytes": "348422"
},
{
"name": "Python",
"bytes": "281721"
},
{
"name": "Shell",
"bytes": "383"
}
],
"symlink_target": ""
}
|
"""Support for RESTful API."""
import logging
import httpx
DEFAULT_TIMEOUT = 10
_LOGGER = logging.getLogger(__name__)
class RestData:
"""Class for handling the data retrieval."""
def __init__(
self,
method,
resource,
auth,
headers,
data,
verify_ssl,
timeout=DEFAULT_TIMEOUT,
):
"""Initialize the data object."""
self._method = method
self._resource = resource
self._auth = auth
self._headers = headers
self._request_data = data
self._timeout = timeout
self._verify_ssl = verify_ssl
self._async_client = None
self.data = None
self.headers = None
async def async_remove(self):
"""Destroy the http session on destroy."""
if self._async_client:
await self._async_client.aclose()
def set_url(self, url):
"""Set url."""
self._resource = url
async def async_update(self):
"""Get the latest data from REST service with provided method."""
if not self._async_client:
self._async_client = httpx.AsyncClient(verify=self._verify_ssl)
_LOGGER.debug("Updating from %s", self._resource)
try:
response = await self._async_client.request(
self._method,
self._resource,
headers=self._headers,
auth=self._auth,
data=self._request_data,
timeout=self._timeout,
)
self.data = response.text
self.headers = response.headers
except httpx.RequestError as ex:
_LOGGER.error("Error fetching data: %s failed with %s", self._resource, ex)
self.data = None
self.headers = None
|
{
"content_hash": "723559a3a5368a353e5a48679a09db16",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 87,
"avg_line_length": 27.815384615384616,
"alnum_prop": 0.5459070796460177,
"repo_name": "sdague/home-assistant",
"id": "9d9e802c2a043c3b36c7c761c39be301025293de",
"size": "1808",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/rest/data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "27869189"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
}
|
'''
Copyright (c) 2009, Patrick Maupin, Austin, Texas
A wrapper around subprocess that performs two functions:
1) Adds non-blocking I/O
2) Adds process killability and timeouts
Currently only works under Linux.
'''
import sys
import subprocess
import select
import os
import time
import textwrap
from signal import SIGTERM, SIGKILL
import traceback
class BaseExec(object):
''' BaseExec is designed to be subclassed.
It wraps subprocess.Popen, and adds the
ability to kill a process and to manage
timeouts. By default, it uses pipes for
the new process, but doesn't do anything
with them.
'''
is_python_proc = False
defaults = dict(
bufsize=0,
executable=None,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=None, # Callable object in child process
close_fds=False,
shell=False,
cwd=None,
env=None,
universal_newlines=False,
startupinfo=None,
creationflags=0,
timeout=500.0, # Time in seconds before termination
killdelay=20.0, # Time in seconds after termination before kill
python_proc=None,
)
def before_init(self, keywords):
# Replace this in subclass to do more setup
pass
def after_init(self):
# Replace this in subclass to execute code after
# process creation
pass
def wrap_python_exec(self, preexec_fn):
# Don't let anything in our buffer wrap back into new process
# Otherwise, it might (will!) come out twice...
sys.stdout.flush()
sys.stderr.flush()
self.is_python_proc = True
def wrapper():
sys.argv = self.args
try:
preexec_fn()
except Exception:
sys.stdout.flush()
print >> sys.stderr, traceback.format_exc()
sys.stderr.write(chr(1))
except SystemExit, s:
sys.stdout.flush()
code = s.code
try:
code = int(code)
except:
pass
if code:
print >> sys.stderr, code
sys.stderr.write(chr(1))
else:
sys.stdout.flush()
sys.stderr.flush()
return wrapper
def __init__(self, *args, **kw):
# Allow flexible args handling.
if len(args) < 2:
try:
args[0] + ''
except TypeError:
args = args[0]
else:
args = args[0].split()
self.args = args
# Handle defaults
keywords = self.defaults.copy()
keywords.update(kw)
# Get our timeout information, and call
# subclass to get other parameters
self.timeout = keywords.pop('timeout') + time.time()
self.killdelay = keywords.pop('killdelay')
self.before_init(keywords)
# Handle any special Python proc
python_proc = keywords.pop('python_proc')
if python_proc is not None:
assert keywords.pop('preexec_fn') is None
keywords['preexec_fn'] = self.wrap_python_exec(python_proc)
args = ['true']
# Start the process and let subclass execute
proc = subprocess.Popen(args, **keywords)
self.proc = proc
self.after_init()
def kill(self, force=False):
action = force and SIGKILL or SIGTERM
os.kill(self.proc.pid, action)
return action
def checktimeout(self):
# Poll to decide if subprocess needs to be killed
now = time.time()
if now < self.timeout:
return 0
killdelay, self.killdelay = self.killdelay, 0
self.timeout = now + killdelay
return self.kill(not killdelay)
class PipeReader(object):
''' PipeReader is an iterator class designed to read from
the next ready pipe.
It can handle as many pipes at a time as desired,
and each call to next() will yield one of the following:
pipe, data -- After reading data from pipe
pipe, None -- When pipe is closing
None, None -- On timeout if no data
It raises StopIteration if no pipes are still open.
A logical extension would be to handle output pipes as well,
such as the subprocess's stdin, but the initial version is
input pipes only (the subprocess's stdout and stderr).
'''
TIMEOUT = 1.0 # Poll interval in seconds
BUFSIZE = 100000
def __init__(self, *pipes, **kw):
self.timeout = kw.pop('timeout', self.TIMEOUT)
self.bufsize = kw.pop('bufsize', self.BUFSIZE)
self.by_pipenum = {} # Dictionary of read functions
self.ready = [] # List of ready pipes
assert not kw, kw # Check for mispelings :)
for pipe in pipes:
self.addpipe(pipe)
def addpipe(self, pipe):
pipenum = pipe.fileno()
bufsize = self.bufsize
by_pipenum = self.by_pipenum
def getdata():
chunk = os.read(pipenum, bufsize)
if chunk:
return pipe, chunk
else:
# Here, we're done. Remove ourselves from
# the dictionary and return None as a notification
del by_pipenum[pipenum]
return pipe, None
assert by_pipenum.setdefault(pipenum, getdata) is getdata
def __iter__(self):
return self
def next(self):
ready = self.ready
if not ready:
allpipes = list(self.by_pipenum)
if not allpipes:
raise StopIteration
ready[:] = select.select(allpipes,[],[],self.timeout)[0]
if not ready:
return None, None # Allow code to execute after timeout
return self.by_pipenum[ready.pop()]()
class LineSplitter(object):
''' LineSplitter takes arbitrary string
data and splits it into text lines.
It manages the case where a single
line of data returned from a pipe is
split across multiple reads.
'''
def __init__(self, prefix):
self.prefix = prefix
self.leftovers = ''
self.lines = []
def __call__(self, chunk):
if not chunk:
if self.leftovers:
chunk = '\n'
else:
return self
chunk = chunk.replace('\r\n', '\n').replace('\r', '\n')
chunk = self.leftovers + chunk
newlines = chunk.split('\n')
self.leftovers = newlines.pop()
oldlines = self.lines
oldlines.reverse()
oldlines.extend(newlines)
oldlines.reverse()
return self
def __iter__(self):
return self
def next(self):
try:
return self.prefix, self.lines.pop()
except IndexError:
raise StopIteration
class TextOutExec(BaseExec):
''' TextOutExec is used for when an executed subprocess's
stdout and stderr are line-oriented text output.
This class is its own iterator. Each line from
the subprocess is yielded from here, with a prefix:
' ' -- line written by subprocess to stdout
'* ' -- line written by subprocess to stderr
'** ' -- line represents subprocess exit code
NB: Current implementation is probably not that secure,
in that it assumes that once the pipes are closed,
the process should be terminating itself shortly.
If this proves to be a problem in real life, we
can add timeout checking to the "wait for things
to finish up" logic.
'''
defaults = dict(
pollinterval=1.0,
readbufsize=100000,
)
defaults.update(BaseExec.defaults)
def before_init(self, keywords):
self.pollinterval = keywords.pop('pollinterval')
self.bufsize = keywords.pop('readbufsize')
def after_init(self):
proc = self.proc
self.pipes = PipeReader(proc.stdout, proc.stderr,
timeout=self.pollinterval, bufsize=self.bufsize)
self.pipedir = {proc.stdout : LineSplitter(' '),
proc.stderr : LineSplitter('*')}
self.lines = []
self.finished = False
def __iter__(self):
return self
def next(self):
lines = self.lines
while not lines:
self.checktimeout()
for pipe, data in self.pipes:
if pipe is not None:
lines.extend(self.pipedir[pipe](data))
lines.reverse()
break
else:
if self.finished:
raise StopIteration
else:
self.finished = True
lines.append(('**', str(self.proc.wait())))
return '%s %s' % lines.pop()
def elapsedtime(when=time.time()):
mins, secs = divmod(round(time.time() - when, 1), 60)
hrs, mins = divmod(mins, 60)
hrs = hrs and ('%02d:' % int(round(hrs))) or ''
mins = mins and ('%02d:' % int(round(mins))) or ''
secs = '%04.1f' % secs
units = hrs and 'hours' or mins and 'minutes' or 'seconds'
return '%s%s%s %s' % (hrs, mins, secs, units)
def default_logger(resultlist, data=None, data2=None):
if data is not None:
resultlist.append(data)
if data2 is None:
data2 = data
print data2
def textexec(*arg, **kw):
''' Exec a subprocess, print lines, and also return
them to caller
'''
logger = kw.pop('logger', default_logger)
formatcmd = textwrap.TextWrapper(initial_indent=' ',
subsequent_indent=' ',
break_long_words=False).fill
subproc = TextOutExec(*arg, **kw)
args = subproc.args
procname = args[0]
starttime = time.time()
result = []
logger(result,
'Process "%s" started on %s\n\n%s\n\n' % (
procname, time.asctime(), formatcmd(' '.join(args))))
errcode = 0
badexit = '* ' + chr(1)
for line in subproc:
if line == badexit and subproc.is_python_proc:
errcode = 1
continue
if not line.startswith('**'):
logger(result, line)
continue
errcode = errcode or int(line.split()[-1])
status = errcode and 'FAIL' or 'PASS'
logger(result,
'\nProgram %s exit code: %s (%d) elapsed time: %s\n' %
(procname, status, errcode, elapsedtime(starttime)))
logger(result, None,
'Cumulative execution time is %s\n' % elapsedtime())
return errcode, result
if __name__ == '__main__':
def goodfunc():
print "Good func", sys.argv
def badfunc():
assert 0, "Boo! %s" % sys.argv
#raise SystemExit('I am bad')
if len(sys.argv) > 1:
print "Starting subprocess"
sys.stdout.flush()
for i in range(10):
time.sleep(0.2)
print "This is line", i
sys.stdout.flush()
print >> sys.stderr, "This is an error message"
print "Ending subprocess"
if sys.argv[1] == 'die':
raise SystemExit('Deliberately croaking')
else:
print 'Calling good python_proc 1'
textexec('goodfunc', '1', python_proc=goodfunc)
print 'Calling bad python_proc 1'
textexec('badfunc', '1', python_proc=badfunc)
print 'Calling good python_proc 2'
textexec('goodfunc', '2', python_proc=goodfunc)
print 'Calling bad python_proc 2'
textexec('badfunc', '2', python_proc=badfunc)
print "Calling myself"
textexec(__file__, 'subprocess')
print "Calling myself with kill time"
textexec(__file__, 'subprocess', timeout=0.8)
print "Calling myself with forced error exit"
textexec(__file__, 'die')
print 'All Done'
|
{
"content_hash": "bd0d5784c5bb28345029d6ebbda21621",
"timestamp": "",
"source": "github",
"line_count": 380,
"max_line_length": 81,
"avg_line_length": 32.286842105263155,
"alnum_prop": 0.5513896813106203,
"repo_name": "openpolis/rst2pdf-patched-docutils-0.8",
"id": "d34e980891dc5f39af5962566cbc27ab413be63d",
"size": "12410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rst2pdf/tests/execmgr.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1113952"
},
{
"name": "Shell",
"bytes": "37941"
}
],
"symlink_target": ""
}
|
import logging
from flask import current_app
from boiler.feature.orm import db
class AbstractService:
"""
Abstract service
Base class for services that encapsulates common model operations.
Extend your concrete services from this class and define __model__
"""
__model__ = None
__create_validator__ = None
__persist_validator__ = None
def log(self, message, level=None):
""" Write a message to log """
if level is None:
level = logging.INFO
current_app.logger.log(msg=message, level=level)
def is_instance(self, model):
"""
Is instance?
Checks if provided object is instance of this service's model.
:param model: object
:return: bool
"""
result = isinstance(model, self.__model__)
if result is True:
return True
err = 'Object {} is not of type {}'
raise ValueError(err.format(model, self.__model__))
def commit(self):
"""
Commit
Commits orm transaction. Used mostly for bulk operations when
flush is of to commit multiple items at once.
:return: None
"""
db.session.commit()
def new(self, **kwargs):
"""
New
Returns a new unsaved instance of model, populated from the
provided arguments.
:param kwargs: varargs, data to populate with
:return: object, fresh unsaved model
"""
return self.__model__(**kwargs)
def create(self, **kwargs):
"""
Create
Instantiates and persists new model populated from provided
arguments
:param kwargs: varargs, data to populate with
:return: object, persisted new instance of model
"""
model = self.new(**kwargs)
return self.save(model)
def save(self, model, commit=True):
"""
Save
Puts model into unit of work for persistence. Can optionally
commit transaction. Returns persisted model as a result.
:param model: object, model to persist
:param commit: bool, commit transaction?
:return: object, saved model
"""
self.is_instance(model)
db.session.add(model)
if commit:
db.session.commit()
return model
def delete(self, model, commit=True):
"""
Delete
Puts model for deletion into unit of work and optionall commits
transaction
:param model: object, model to delete
:param commit: bool, commit?
:return: object, deleted model
"""
self.is_instance(model)
db.session.delete(model)
if commit:
db.session.commit()
return model
def get(self, id):
"""
Get
Returns single entity found by id, or None if not found
:param id: int, entity id
:return: object or None
"""
return self.__model__.query.get(id)
def get_or_404(self, id):
"""
Get or 404
Returns single entity found by its unique id, or raises
htp 404 exception if nothing is found.
:param id: int, entity id
:return: object
"""
return self.__model__.query.get_or_404(id)
def get_multiple(self, ids):
m = self.__model__
query = m.query.filter(m.id.in_(ids))
return query.all()
def find(self, **kwargs):
return self.__model__.query.filter_by(**kwargs).all()
def first(self, **kwargs):
return self.__model__.query.filter_by(**kwargs).first()
def collection(self, page=None, per_page=None, serialized=None, **kwargs):
pass
|
{
"content_hash": "69d36a36d3cf154fc7726effe18dab89",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 78,
"avg_line_length": 28.44927536231884,
"alnum_prop": 0.5453387671930718,
"repo_name": "projectshift/shift-boiler",
"id": "8d69bc250b08a13dd10633b1b7464415cc20ce6a",
"size": "3926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boiler/abstract/abstract_service.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6144"
},
{
"name": "JavaScript",
"bytes": "10563"
},
{
"name": "Jinja",
"bytes": "13981"
},
{
"name": "Mako",
"bytes": "1382"
},
{
"name": "Python",
"bytes": "92204"
},
{
"name": "Shell",
"bytes": "513"
}
],
"symlink_target": ""
}
|
from django.db import models
class Airport(models.Model):
iata = models.CharField(max_length=3, null = True, blank=True)
icao = models.CharField(max_length=4, null = True, blank=True)
name = models.CharField(max_length=255)
city = models.CharField(max_length=255)
country = models.CharField(max_length=255)
country_code = models.CharField(max_length=10, null=True, blank=True)
time_zone = models.CharField(max_length=255, null=True, blank=True)
longitude = models.CharField(max_length=255, null=True, blank=True)
latitude = models.CharField(max_length=255, null=True, blank=True)
def __unicode__(self):
return '%s %s' % (self.iata,self.name)
|
{
"content_hash": "293a7564a7ffae257d83a3c0592cce81",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 73,
"avg_line_length": 43.8125,
"alnum_prop": 0.6904422253922967,
"repo_name": "illing2005/django-airports-apis",
"id": "75c679cfaa9c7fa4e08a1dfdc2672955f359c73f",
"size": "725",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airports/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16156"
}
],
"symlink_target": ""
}
|
import json
import warnings
import requests
class Connection:
"""
Handle connections to the KF5 API.
"""
def __init__(self, baseUrl, username, password):
self.valid = False
if baseUrl[-1:] != '/':
warnings.warn('Adding / to baseUrl', RuntimeWarning)
baseUrl = baseUrl + '/'
auth = requests.post(baseUrl + 'rest/account/userLogin',
data={'userName': username, 'password': password})
if auth.status_code == 200:
self.valid = True
self.baseUrl = baseUrl
self.username = username
self.password = password
self.cookies = auth.cookies
self.json = json.loads(auth.text)
self.sectionTitleById = {}
self.sectionIdByTitle = {}
for e in self.json:
self.sectionTitleById[e['sectionId']] = e['sectionTitle']
self.sectionIdByTitle[e['sectionTitle']] = e['sectionId']
self.postsBySectionId = {}
self.viewsBySectionId = {}
self.postsByViewId = {}
self.buildonsBySectionId = {}
self.buildonsByViewId = {}
self.buildonsByViewId = {}
# self.postsByPostId = {}
def is_valid(self):
""" Is the connection valid? """
return self.valid
def get_section_ids(self):
""" Return the user's section IDs. """
return self.sectionTitleById.keys()
def get_section_titles(self):
"""Return the user's section titles."""
return self.sectionIdByTitle
def get_section_id_by_title(self, sectionTitle):
""" Return the ID of a section given its title. """
return self.sectionIdByTitle.get(sectionTitle)
def get_posts_by_sectionid(self, sectionId):
""" Return the posts in a section given the section's ID. Some memoization is used. """
if sectionId not in self.postsBySectionId:
posts = requests.get(
self.baseUrl + 'rest/content/getSectionPosts/%s' % sectionId,
cookies=self.cookies)
self.postsBySectionId[sectionId] = json.loads(posts.text)
return self.postsBySectionId[sectionId]
def get_posts_by_sectiontitle(self, sectionTitle):
""" Return the posts in a section given the section's title. """
return self.get_posts_by_sectionid(self.get_section_id_by_title(sectionTitle))
def get_views_by_sectionid(self, sectionId):
""" Return the posts in a section given the section's ID. Some memoization is used. """
if sectionId not in self.viewsBySectionId:
posts = requests.get(
self.baseUrl + 'rest/content/getSectionViews/%s' % sectionId,
cookies=self.cookies)
self.viewsBySectionId[sectionId] = json.loads(posts.text)
return self.viewsBySectionId[sectionId]
def get_views_by_sectiontitle(self, sectionTitle):
""" Return the posts in a section given the section's title. """
return self.get_views_by_sectionid(self.get_section_id_by_title(sectionTitle))
def get_view_by_viewid(self, viewId):
if viewId not in self.postsByViewId:
posts = requests.get(
self.baseUrl + 'rest/content/getView/%s' % viewId,
cookies=self.cookies)
self.postsByViewId[viewId] = json.loads(posts.text)
return self.postsByViewId[viewId]
def get_buildons_by_sectiontitle(self, sectionTitle):
""" Return the buildons in a section given the section's title. """
return self.get_buildons_by_sectionid(self.get_section_id_by_title(sectionTitle))
def get_buildons_by_sectionid(self, sectionId):
"""
Return list of build-on links for an entire section, given the ID of the section.
For example:
[
{
"type":"buildson",
"from":"f07ca24b-850a-420f-bfe9-bdb865b030b5",
"to":"ac10b2ab-b25c-4ede-8c72-3238b7322672"
},
{"type":"buildson",
"from":"20f72522-4d62-4d89-907c-c32b447a54dc",
"to":"add053d2-296a-40c9-b2e5-b53ed4b31e3d"
}
]
"""
if sectionId not in self.buildonsBySectionId:
posts = requests.get(
self.baseUrl + 'rest/mobile/getBuildsOnInCommunity/%s' % sectionId,
cookies=self.cookies)
self.buildonsBySectionId[sectionId] = json.loads(posts.text)
return self.buildonsBySectionId[sectionId]
def get_buildons_by_viewid(self, viewId):
""" Return list of build-on links for an entire section, given the title of the section. """
if viewId not in self.buildonsByViewId:
posts = requests.get(
self.baseUrl + 'rest/mobile/getBuildsOnInView/%s' % viewId,
cookies=self.cookies)
self.buildonsByViewId[viewId] = json.loads(posts.text)
return self.buildonsByViewId[viewId]
def get_buildons_by_postid(self, postId):
"""
Return list of build-on links for an entire section, given the ID of the section.
"""
if viewId not in self.buildonsByPostId:
posts = requests.get(
self.baseUrl + 'rest/mobile/getBuildsOnInPost/%s' % postId,
cookies=self.cookies)
self.buildonsByPostId[postId] = json.loads(posts.text)
return self.buildonsByPostId[postId]
def get_post_history(self, postId):
history = requests.get(
self.baseUrl + 'rest/mobile/getPostHistory/%s' % postId,
cookies=self.cookies)
return json.loads(history.text)
def get_all_authors(self, sectionId):
authors = requests.get(
self.baseUrl + 'rest/mobile/getAllAuthors/%s' % sectionId,
cookies=self.cookies)
return json.loads(authors.text)
#def get_post_by_postid(self,postId):
# if postId not in self.postsByPostId:
# post = requests.get(
# self.baseUrl + 'rest/content/getPost/%s' % postId,
# cookies=self.cookies)
# self.postsByPostId[postId] = json.loads(post.text)
# return self.postsByPostId[postId]
|
{
"content_hash": "831707690ce0b2f9f38dcd0b7b0f9057",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 100,
"avg_line_length": 41.03225806451613,
"alnum_prop": 0.5932389937106918,
"repo_name": "problemshift/kf5py",
"id": "374bd2c8981feccfcd76df688f4a27de621b732a",
"size": "6360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kf5py.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9521"
}
],
"symlink_target": ""
}
|
import sys, rospy, math
from pimouse_ros.msg import MotorFreqs
from geometry_msgs.msg import Twist
class Motor():
def __init__(self):
if not self.set_power(True): sys.exit(1)
rospy.on_shutdown(self.set_power)
self.sub_raw = rospy.Subscriber('motor_raw', MotorFreqs, self.callback_raw_freq)
self.sub_cmd_vel = rospy.Subscriber('cmd_vel', Twist, self.callback_cmd_vel)
self.last_time = rospy.Time.now()
self.using_cmd_vel = False
def set_power(self,onoff=False):
en = "/dev/rtmotoren0"
try:
with open(en,'w') as f:
f.write("1\n" if onoff else "0\n")
self.is_on = onoff
return True
except:
rospy.logerr("cannot write to " + en)
return False
def set_raw_freq(self,left_hz,right_hz):
if not self.is_on:
rospy.logerr("not enpowered")
return
try:
with open("/dev/rtmotor_raw_l0",'w') as lf,\
open("/dev/rtmotor_raw_r0",'w') as rf:
lf.write(str(int(round(left_hz))) + "\n")
rf.write(str(int(round(right_hz))) + "\n")
except:
rospy.logerr("cannot write to rtmotor_raw_*")
def callback_raw_freq(self,message):
self.set_raw_freq(message.left_hz,message.right_hz)
def callback_cmd_vel(self,message):
forward_hz = 80000.0*message.linear.x/(9*math.pi)
rot_hz = 400.0*message.angular.z/math.pi
self.set_raw_freq(forward_hz-rot_hz, forward_hz+rot_hz)
self.using_cmd_vel = True
self.last_time = rospy.Time.now()
if __name__ == '__main__':
rospy.init_node('motors')
m = Motor()
rate = rospy.Rate(10)
while not rospy.is_shutdown():
if m.using_cmd_vel and rospy.Time.now().to_sec() - m.last_time.to_sec() >= 1.0:
m.set_raw_freq(0,0)
m.using_cmd_vel = False
rate.sleep()
|
{
"content_hash": "6d2438b861209e74796a8fa97511244c",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 82,
"avg_line_length": 27.7,
"alnum_prop": 0.6582430806257521,
"repo_name": "HiroyukiAbe/pimouse_ros",
"id": "0ce468b46cf14bfd8c590e0ddda16d5c2aae76df",
"size": "1700",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/motors1.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CMake",
"bytes": "6794"
},
{
"name": "Python",
"bytes": "24095"
},
{
"name": "Shell",
"bytes": "889"
}
],
"symlink_target": ""
}
|
import unittest
import iot_button
class FunctionalTest(unittest.TestCase):
def test_1(self):
event = {"batteryVoltage": "testing", "serialNumber": "testing", "not_a_real_clickType": "LONG"}
context = {"aws_request_id": "foo",
"log_stream_name": "foo",
"invoked_function_arn": "foo",
"client_context": "",
"log_group_name": "foo",
"function_name": "foo",
"function_version": "$LATEST",
"identity": TestingCognitoIdentity(),
"memory_limit_in_mb": "128",
}
self.assertEqual(iot_button.lambda_handler(event, context), 'success')
class TestingCognitoIdentity:
foo = "foo"
bar = "bar"
def some_method(self):
return True
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "64395b4fdd5af341db3df582559afc25",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 104,
"avg_line_length": 27.78125,
"alnum_prop": 0.5140607424071991,
"repo_name": "nanderson/iot_button_lambda_handler",
"id": "7c56bb4611bc95c22744313f32e98d1c22c030f0",
"size": "889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1759"
},
{
"name": "Python",
"bytes": "6958"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import ssl
import typing as t
import pytest
from neo4j import (
AsyncBoltDriver,
AsyncGraphDatabase,
AsyncNeo4jDriver,
ExperimentalWarning,
TRUST_ALL_CERTIFICATES,
TRUST_SYSTEM_CA_SIGNED_CERTIFICATES,
TrustAll,
TrustCustomCAs,
TrustSystemCAs,
)
from neo4j.api import (
AsyncBookmarkManager,
BookmarkManager,
READ_ACCESS,
WRITE_ACCESS,
)
from neo4j.exceptions import ConfigurationError
from ..._async_compat import (
AsyncTestDecorators,
mark_async_test,
)
@pytest.mark.parametrize("protocol", ("bolt://", "bolt+s://", "bolt+ssc://"))
@pytest.mark.parametrize("host", ("localhost", "127.0.0.1",
"[::1]", "[0:0:0:0:0:0:0:1]"))
@pytest.mark.parametrize("port", (":1234", "", ":7687"))
@pytest.mark.parametrize("params", ("", "?routing_context=test"))
@pytest.mark.parametrize("auth_token", (("test", "test"), None))
@mark_async_test
async def test_direct_driver_constructor(protocol, host, port, params, auth_token):
uri = protocol + host + port + params
if params:
with pytest.warns(DeprecationWarning, match="routing context"):
driver = AsyncGraphDatabase.driver(uri, auth=auth_token)
else:
driver = AsyncGraphDatabase.driver(uri, auth=auth_token)
assert isinstance(driver, AsyncBoltDriver)
await driver.close()
@pytest.mark.parametrize("protocol",
("neo4j://", "neo4j+s://", "neo4j+ssc://"))
@pytest.mark.parametrize("host", ("localhost", "127.0.0.1",
"[::1]", "[0:0:0:0:0:0:0:1]"))
@pytest.mark.parametrize("port", (":1234", "", ":7687"))
@pytest.mark.parametrize("params", ("", "?routing_context=test"))
@pytest.mark.parametrize("auth_token", (("test", "test"), None))
@mark_async_test
async def test_routing_driver_constructor(protocol, host, port, params, auth_token):
uri = protocol + host + port + params
driver = AsyncGraphDatabase.driver(uri, auth=auth_token)
assert isinstance(driver, AsyncNeo4jDriver)
await driver.close()
@pytest.mark.parametrize("test_uri", (
"bolt+ssc://127.0.0.1:9001",
"bolt+s://127.0.0.1:9001",
"bolt://127.0.0.1:9001",
"neo4j+ssc://127.0.0.1:9001",
"neo4j+s://127.0.0.1:9001",
"neo4j://127.0.0.1:9001",
))
@pytest.mark.parametrize(
("test_config", "expected_failure", "expected_failure_message"),
(
({"encrypted": False}, ConfigurationError, "The config settings"),
({"encrypted": True}, ConfigurationError, "The config settings"),
(
{"encrypted": True, "trust": TRUST_ALL_CERTIFICATES},
ConfigurationError, "The config settings"
),
(
{"trust": TRUST_ALL_CERTIFICATES},
ConfigurationError, "The config settings"
),
(
{"trust": TRUST_SYSTEM_CA_SIGNED_CERTIFICATES},
ConfigurationError, "The config settings"
),
(
{"encrypted": True, "trusted_certificates": TrustAll()},
ConfigurationError, "The config settings"
),
(
{"trusted_certificates": TrustAll()},
ConfigurationError, "The config settings"
),
(
{"trusted_certificates": TrustSystemCAs()},
ConfigurationError, "The config settings"
),
(
{"trusted_certificates": TrustCustomCAs("foo", "bar")},
ConfigurationError, "The config settings"
),
(
{"ssl_context": None},
ConfigurationError, "The config settings"
),
(
{"ssl_context": ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)},
ConfigurationError, "The config settings"
),
)
)
@mark_async_test
async def test_driver_config_error(
test_uri, test_config, expected_failure, expected_failure_message
):
def driver_builder():
if "trust" in test_config:
with pytest.warns(DeprecationWarning, match="trust"):
return AsyncGraphDatabase.driver(test_uri, **test_config)
else:
return AsyncGraphDatabase.driver(test_uri, **test_config)
if "+" in test_uri:
# `+s` and `+ssc` are short hand syntax for not having to configure the
# encryption behavior of the driver. Specifying both is invalid.
with pytest.raises(expected_failure, match=expected_failure_message):
driver_builder()
else:
driver = driver_builder()
await driver.close()
@pytest.mark.parametrize("test_uri", (
"http://localhost:9001",
"ftp://localhost:9001",
"x://localhost:9001",
))
def test_invalid_protocol(test_uri):
with pytest.raises(ConfigurationError, match="scheme"):
AsyncGraphDatabase.driver(test_uri)
@pytest.mark.parametrize(
("test_config", "expected_failure", "expected_failure_message"),
(
({"trust": 1}, ConfigurationError, "The config setting `trust`"),
({"trust": True}, ConfigurationError, "The config setting `trust`"),
({"trust": None}, ConfigurationError, "The config setting `trust`"),
)
)
def test_driver_trust_config_error(
test_config, expected_failure, expected_failure_message
):
with pytest.raises(expected_failure, match=expected_failure_message):
AsyncGraphDatabase.driver("bolt://127.0.0.1:9001", **test_config)
@pytest.mark.parametrize("uri", (
"bolt://127.0.0.1:9000",
"neo4j://127.0.0.1:9000",
))
@mark_async_test
async def test_driver_opens_write_session_by_default(uri, fake_pool, mocker):
driver = AsyncGraphDatabase.driver(uri)
# we set a specific db, because else the driver would try to fetch a RT
# to get hold of the actual home database (which won't work in this
# unittest)
driver._pool = fake_pool
async with driver.session(database="foobar") as session:
mocker.patch("neo4j._async.work.session.AsyncTransaction",
autospec=True)
tx = await session.begin_transaction()
fake_pool.acquire.assert_awaited_once_with(
access_mode=WRITE_ACCESS,
timeout=mocker.ANY,
database=mocker.ANY,
bookmarks=mocker.ANY,
liveness_check_timeout=mocker.ANY
)
tx._begin.assert_awaited_once_with(
mocker.ANY,
mocker.ANY,
mocker.ANY,
WRITE_ACCESS,
mocker.ANY,
mocker.ANY
)
await driver.close()
@pytest.mark.parametrize("uri", (
"bolt://127.0.0.1:9000",
"neo4j://127.0.0.1:9000",
))
@mark_async_test
async def test_verify_connectivity(uri, mocker):
driver = AsyncGraphDatabase.driver(uri)
pool_mock = mocker.patch.object(driver, "_pool", autospec=True)
try:
ret = await driver.verify_connectivity()
finally:
await driver.close()
assert ret is None
pool_mock.acquire.assert_awaited_once()
assert pool_mock.acquire.call_args.kwargs["liveness_check_timeout"] == 0
pool_mock.release.assert_awaited_once()
@pytest.mark.parametrize("uri", (
"bolt://127.0.0.1:9000",
"neo4j://127.0.0.1:9000",
))
@pytest.mark.parametrize("kwargs", (
{"default_access_mode": WRITE_ACCESS},
{"default_access_mode": READ_ACCESS},
{"fetch_size": 69},
))
@mark_async_test
async def test_verify_connectivity_parameters_are_deprecated(
uri, kwargs, mocker
):
driver = AsyncGraphDatabase.driver(uri)
mocker.patch.object(driver, "_pool", autospec=True)
try:
with pytest.warns(ExperimentalWarning, match="configuration"):
await driver.verify_connectivity(**kwargs)
finally:
await driver.close()
@pytest.mark.parametrize("uri", (
"bolt://127.0.0.1:9000",
"neo4j://127.0.0.1:9000",
))
@pytest.mark.parametrize("kwargs", (
{"default_access_mode": WRITE_ACCESS},
{"default_access_mode": READ_ACCESS},
{"fetch_size": 69},
))
@mark_async_test
async def test_get_server_info_parameters_are_experimental(
uri, kwargs, mocker
):
driver = AsyncGraphDatabase.driver(uri)
mocker.patch.object(driver, "_pool", autospec=True)
try:
with pytest.warns(ExperimentalWarning, match="configuration"):
await driver.get_server_info(**kwargs)
finally:
await driver.close()
@mark_async_test
async def test_with_builtin_bookmark_manager(mocker) -> None:
with pytest.warns(ExperimentalWarning, match="bookmark manager"):
bmm = AsyncGraphDatabase.bookmark_manager()
# could be one line, but want to make sure the type checker assigns
# bmm whatever type AsyncGraphDatabase.bookmark_manager() returns
session_cls_mock = mocker.patch("neo4j._async.driver.AsyncSession",
autospec=True)
driver = AsyncGraphDatabase.driver("bolt://localhost")
async with driver as driver:
with pytest.warns(ExperimentalWarning, match="bookmark_manager"):
_ = driver.session(bookmark_manager=bmm)
session_cls_mock.assert_called_once()
assert session_cls_mock.call_args[0][1].bookmark_manager is bmm
@AsyncTestDecorators.mark_async_only_test
async def test_with_custom_inherited_async_bookmark_manager(mocker) -> None:
class BMM(AsyncBookmarkManager):
async def update_bookmarks(
self, database: str, previous_bookmarks: t.Iterable[str],
new_bookmarks: t.Iterable[str]
) -> None:
...
async def get_bookmarks(self, database: str) -> t.Collection[str]:
return []
async def get_all_bookmarks(self) -> t.Collection[str]:
return []
async def forget(self, databases: t.Iterable[str]) -> None:
...
bmm = BMM()
# could be one line, but want to make sure the type checker assigns
# bmm whatever type AsyncGraphDatabase.bookmark_manager() returns
session_cls_mock = mocker.patch("neo4j._async.driver.AsyncSession",
autospec=True)
driver = AsyncGraphDatabase.driver("bolt://localhost")
async with driver as driver:
with pytest.warns(ExperimentalWarning, match="bookmark_manager"):
_ = driver.session(bookmark_manager=bmm)
session_cls_mock.assert_called_once()
assert session_cls_mock.call_args[0][1].bookmark_manager is bmm
@mark_async_test
async def test_with_custom_inherited_sync_bookmark_manager(mocker) -> None:
class BMM(BookmarkManager):
def update_bookmarks(
self, database: str, previous_bookmarks: t.Iterable[str],
new_bookmarks: t.Iterable[str]
) -> None:
...
def get_bookmarks(self, database: str) -> t.Collection[str]:
return []
def get_all_bookmarks(self) -> t.Collection[str]:
return []
def forget(self, databases: t.Iterable[str]) -> None:
...
bmm = BMM()
# could be one line, but want to make sure the type checker assigns
# bmm whatever type AsyncGraphDatabase.bookmark_manager() returns
session_cls_mock = mocker.patch("neo4j._async.driver.AsyncSession",
autospec=True)
driver = AsyncGraphDatabase.driver("bolt://localhost")
async with driver as driver:
with pytest.warns(ExperimentalWarning, match="bookmark_manager"):
_ = driver.session(bookmark_manager=bmm)
session_cls_mock.assert_called_once()
assert session_cls_mock.call_args[0][1].bookmark_manager is bmm
@AsyncTestDecorators.mark_async_only_test
async def test_with_custom_ducktype_async_bookmark_manager(mocker) -> None:
class BMM:
async def update_bookmarks(
self, database: str, previous_bookmarks: t.Iterable[str],
new_bookmarks: t.Iterable[str]
) -> None:
...
async def get_bookmarks(self, database: str) -> t.Collection[str]:
return []
async def get_all_bookmarks(self) -> t.Collection[str]:
return []
async def forget(self, databases: t.Iterable[str]) -> None:
...
bmm = BMM()
# could be one line, but want to make sure the type checker assigns
# bmm whatever type AsyncGraphDatabase.bookmark_manager() returns
session_cls_mock = mocker.patch("neo4j._async.driver.AsyncSession",
autospec=True)
driver = AsyncGraphDatabase.driver("bolt://localhost")
async with driver as driver:
with pytest.warns(ExperimentalWarning, match="bookmark_manager"):
_ = driver.session(bookmark_manager=bmm)
session_cls_mock.assert_called_once()
assert session_cls_mock.call_args[0][1].bookmark_manager is bmm
@mark_async_test
async def test_with_custom_ducktype_sync_bookmark_manager(mocker) -> None:
class BMM:
def update_bookmarks(
self, database: str, previous_bookmarks: t.Iterable[str],
new_bookmarks: t.Iterable[str]
) -> None:
...
def get_bookmarks(self, database: str) -> t.Collection[str]:
return []
def get_all_bookmarks(self) -> t.Collection[str]:
return []
def forget(self, databases: t.Iterable[str]) -> None:
...
bmm = BMM()
# could be one line, but want to make sure the type checker assigns
# bmm whatever type AsyncGraphDatabase.bookmark_manager() returns
session_cls_mock = mocker.patch("neo4j._async.driver.AsyncSession",
autospec=True)
driver = AsyncGraphDatabase.driver("bolt://localhost")
async with driver as driver:
with pytest.warns(ExperimentalWarning, match="bookmark_manager"):
_ = driver.session(bookmark_manager=bmm)
session_cls_mock.assert_called_once()
assert session_cls_mock.call_args[0][1].bookmark_manager is bmm
|
{
"content_hash": "ab29262fd53db06b24680e41f7fc76a4",
"timestamp": "",
"source": "github",
"line_count": 400,
"max_line_length": 84,
"avg_line_length": 34.635,
"alnum_prop": 0.6271834849141043,
"repo_name": "neo4j/neo4j-python-driver",
"id": "693adf94ab6c726b23b9c3ceb64dc3c4f661ad98",
"size": "14497",
"binary": false,
"copies": "1",
"ref": "refs/heads/5.0",
"path": "tests/unit/async_/test_driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2068"
},
{
"name": "Python",
"bytes": "1654566"
},
{
"name": "Shell",
"bytes": "4165"
}
],
"symlink_target": ""
}
|
def main():
parking_lots = get_array("../data/parkinglots.dat")
buildings = get_array("../data/buildings.dat")
print(buildings)
print(parking_lots)
def get_array(filename):
print(filename)
data_array = []
in_file = open(filename,"r")
max = len(in_file.readline().split())
for line in in_file:
data_array.append(line.split())
for i in range(1,max):
data_array[-1][i] = int(data_array[-1][i])
in_file.close()
return data_array
main()
|
{
"content_hash": "2facc66e56a15bb28907f9e23263bdb5",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 55,
"avg_line_length": 27.68421052631579,
"alnum_prop": 0.5722433460076045,
"repo_name": "sremedios/ParkMT",
"id": "8bc60bb23d8090180c7087c96e417e4f31a1bb2d",
"size": "526",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analysis/getdata.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "19827"
},
{
"name": "CSS",
"bytes": "221"
},
{
"name": "HTML",
"bytes": "2975713"
},
{
"name": "JavaScript",
"bytes": "5691"
},
{
"name": "Jupyter Notebook",
"bytes": "163374"
},
{
"name": "Python",
"bytes": "29565"
}
],
"symlink_target": ""
}
|
"""
Simple Line Chart
-----------------
This chart shows the most basic line chart, made from a dataframe with two
columns.
"""
# category: simple charts
import altair as alt
import numpy as np
import pandas as pd
x = np.arange(100)
source = pd.DataFrame({
'x': x,
'f(x)': np.sin(x / 5)
})
alt.Chart(source).mark_line().encode(
x='x',
y='f(x)'
)
|
{
"content_hash": "130b067a263862c61632df592ca567be",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 74,
"avg_line_length": 16.40909090909091,
"alnum_prop": 0.6177285318559557,
"repo_name": "jakevdp/altair",
"id": "7461362257f18c47c5d23aef81a44b7a1ba67677",
"size": "361",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "altair/examples/simple_line_chart.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "343"
},
{
"name": "Python",
"bytes": "5353045"
},
{
"name": "TeX",
"bytes": "2684"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, division
from sympy.logic.boolalg import And
from sympy.core import oo
from sympy.core.basic import Basic
from sympy.core.compatibility import as_int, with_metaclass, range, PY3
from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union,
FiniteSet)
from sympy.core.singleton import Singleton, S, sympify
from sympy.core.sympify import _sympify, converter
from sympy.core.function import Lambda
from sympy.utilities.misc import filldedent, func_name
class Naturals(with_metaclass(Singleton, Set)):
"""
Represents the natural numbers (or counting numbers) which are all
positive integers starting from 1. This set is also available as
the Singleton, S.Naturals.
Examples
========
>>> from sympy import S, Interval, pprint
>>> 5 in S.Naturals
True
>>> iterable = iter(S.Naturals)
>>> next(iterable)
1
>>> next(iterable)
2
>>> next(iterable)
3
>>> pprint(S.Naturals.intersect(Interval(0, 10)))
{1, 2, ..., 10}
See Also
========
Naturals0 : non-negative integers (i.e. includes 0, too)
Integers : also includes negative integers
"""
is_iterable = True
_inf = S.One
_sup = S.Infinity
def _intersect(self, other):
if other.is_Interval:
return Intersection(
S.Integers, other, Interval(self._inf, S.Infinity))
return None
def _contains(self, other):
if other.is_positive and other.is_integer:
return S.true
elif other.is_integer is False or other.is_positive is False:
return S.false
def __iter__(self):
i = self._inf
while True:
yield i
i = i + 1
@property
def _boundary(self):
return self
class Naturals0(Naturals):
"""Represents the whole numbers which are all the non-negative integers,
inclusive of zero.
See Also
========
Naturals : positive integers; does not include 0
Integers : also includes the negative integers
"""
_inf = S.Zero
def _contains(self, other):
if other.is_integer and other.is_nonnegative:
return S.true
elif other.is_integer is False or other.is_nonnegative is False:
return S.false
class Integers(with_metaclass(Singleton, Set)):
"""
Represents all integers: positive, negative and zero. This set is also
available as the Singleton, S.Integers.
Examples
========
>>> from sympy import S, Interval, pprint
>>> 5 in S.Naturals
True
>>> iterable = iter(S.Integers)
>>> next(iterable)
0
>>> next(iterable)
1
>>> next(iterable)
-1
>>> next(iterable)
2
>>> pprint(S.Integers.intersect(Interval(-4, 4)))
{-4, -3, ..., 4}
See Also
========
Naturals0 : non-negative integers
Integers : positive and negative integers and zero
"""
is_iterable = True
def _intersect(self, other):
from sympy.functions.elementary.integers import floor, ceiling
if other is Interval(S.NegativeInfinity, S.Infinity) or other is S.Reals:
return self
elif other.is_Interval:
s = Range(ceiling(other.left), floor(other.right) + 1)
return s.intersect(other) # take out endpoints if open interval
return None
def _contains(self, other):
if other.is_integer:
return S.true
elif other.is_integer is False:
return S.false
def __iter__(self):
yield S.Zero
i = S.One
while True:
yield i
yield -i
i = i + 1
@property
def _inf(self):
return -S.Infinity
@property
def _sup(self):
return S.Infinity
@property
def _boundary(self):
return self
def _eval_imageset(self, f):
from sympy import Wild
expr = f.expr
if len(f.variables) > 1:
return
n = f.variables[0]
a = Wild('a')
b = Wild('b')
match = expr.match(a*n + b)
if match[a].is_negative:
expr = -expr
match = expr.match(a*n + b)
if match[a] is S.One and match[b].is_integer:
expr = expr - match[b]
return ImageSet(Lambda(n, expr), S.Integers)
class Reals(with_metaclass(Singleton, Interval)):
def __new__(cls):
return Interval.__new__(cls, -S.Infinity, S.Infinity)
def __eq__(self, other):
return other == Interval(-S.Infinity, S.Infinity)
def __hash__(self):
return hash(Interval(-S.Infinity, S.Infinity))
class ImageSet(Set):
"""
Image of a set under a mathematical function
Examples
========
>>> from sympy import Symbol, S, ImageSet, FiniteSet, Lambda
>>> x = Symbol('x')
>>> N = S.Naturals
>>> squares = ImageSet(Lambda(x, x**2), N) # {x**2 for x in N}
>>> 4 in squares
True
>>> 5 in squares
False
>>> FiniteSet(0, 1, 2, 3, 4, 5, 6, 7, 9, 10).intersect(squares)
{1, 4, 9}
>>> square_iterable = iter(squares)
>>> for i in range(4):
... next(square_iterable)
1
4
9
16
"""
def __new__(cls, lamda, base_set):
return Basic.__new__(cls, lamda, base_set)
lamda = property(lambda self: self.args[0])
base_set = property(lambda self: self.args[1])
def __iter__(self):
already_seen = set()
for i in self.base_set:
val = self.lamda(i)
if val in already_seen:
continue
else:
already_seen.add(val)
yield val
def _is_multivariate(self):
return len(self.lamda.variables) > 1
def _contains(self, other):
from sympy.solvers.solveset import solveset, linsolve
L = self.lamda
if self._is_multivariate():
solns = list(linsolve([expr - val for val, expr in
zip(other, L.expr)], L.variables).args[0])
else:
solnsSet = solveset(L.expr-other, L.variables[0])
if solnsSet.is_FiniteSet:
solns = list(solveset(L.expr - other, L.variables[0]))
else:
raise NotImplementedError(filldedent('''
Determining whether an ImageSet contains %s has not
been implemented.''' % func_name(other)))
for soln in solns:
try:
if soln in self.base_set:
return S.true
except TypeError:
return self.base_set.contains(soln.evalf())
return S.false
@property
def is_iterable(self):
return self.base_set.is_iterable
def _intersect(self, other):
from sympy import Dummy
from sympy.solvers.diophantine import diophantine
from sympy.sets.sets import imageset
if self.base_set is S.Integers:
if isinstance(other, ImageSet) and other.base_set is S.Integers:
f, g = self.lamda.expr, other.lamda.expr
n, m = self.lamda.variables[0], other.lamda.variables[0]
# Diophantine sorts the solutions according to the alphabetic
# order of the variable names, since the result should not depend
# on the variable name, they are replaced by the dummy variables
# below
a, b = Dummy('a'), Dummy('b')
f, g = f.subs(n, a), g.subs(m, b)
solns_set = diophantine(f - g)
if solns_set == set():
return EmptySet()
solns = list(diophantine(f - g))
if len(solns) == 1:
t = list(solns[0][0].free_symbols)[0]
else:
return None
# since 'a' < 'b'
return imageset(Lambda(t, f.subs(a, solns[0][0])), S.Integers)
if other == S.Reals:
from sympy.solvers.solveset import solveset_real
from sympy.core.function import expand_complex
if len(self.lamda.variables) > 1:
return None
f = self.lamda.expr
n = self.lamda.variables[0]
n_ = Dummy(n.name, real=True)
f_ = f.subs(n, n_)
re, im = f_.as_real_imag()
im = expand_complex(im)
return imageset(Lambda(n_, re),
self.base_set.intersect(
solveset_real(im, n_)))
class Range(Set):
"""
Represents a range of integers.
Examples
========
>>> from sympy import Range
>>> list(Range(5)) # 0 to 5
[0, 1, 2, 3, 4]
>>> list(Range(10, 15)) # 10 to 15
[10, 11, 12, 13, 14]
>>> list(Range(10, 20, 2)) # 10 to 20 in steps of 2
[10, 12, 14, 16, 18]
>>> list(Range(20, 10, -2)) # 20 to 10 backward in steps of 2
[12, 14, 16, 18, 20]
"""
is_iterable = True
def __new__(cls, *args):
from sympy.functions.elementary.integers import ceiling
if len(args) == 1:
if isinstance(args[0], range if PY3 else xrange):
args = args[0].__reduce__()[1] # use pickle method
# expand range
slc = slice(*args)
start, stop, step = slc.start or 0, slc.stop, slc.step or 1
try:
start, stop, step = [w if w in [S.NegativeInfinity, S.Infinity] else sympify(as_int(w))
for w in (start, stop, step)]
except ValueError:
raise ValueError("Inputs to Range must be Integer Valued\n" +
"Use ImageSets of Ranges for other cases")
if not step.is_finite:
raise ValueError("Infinite step is not allowed")
if start == stop:
return S.EmptySet
n = ceiling((stop - start)/step)
if n <= 0:
return S.EmptySet
# normalize args: regardless of how they are entered they will show
# canonically as Range(inf, sup, step) with step > 0
if n.is_finite:
start, stop = sorted((start, start + (n - 1)*step))
else:
start, stop = sorted((start, stop - step))
step = abs(step)
if (start, stop) == (S.NegativeInfinity, S.Infinity):
raise ValueError("Both the start and end value of "
"Range cannot be unbounded")
else:
return Basic.__new__(cls, start, stop + step, step)
start = property(lambda self: self.args[0])
stop = property(lambda self: self.args[1])
step = property(lambda self: self.args[2])
def _intersect(self, other):
from sympy.functions.elementary.integers import floor, ceiling
from sympy.functions.elementary.miscellaneous import Min, Max
if other.is_Interval:
osup = other.sup
oinf = other.inf
# if other is [0, 10) we can only go up to 9
if osup.is_integer and other.right_open:
osup -= 1
if oinf.is_integer and other.left_open:
oinf += 1
# Take the most restrictive of the bounds set by the two sets
# round inwards
inf = ceiling(Max(self.inf, oinf))
sup = floor(Min(self.sup, osup))
# if we are off the sequence, get back on
if inf.is_finite and self.inf.is_finite:
off = (inf - self.inf) % self.step
else:
off = S.Zero
if off:
inf += self.step - off
return Range(inf, sup + 1, self.step)
if other == S.Naturals:
return self._intersect(Interval(1, S.Infinity))
if other == S.Integers:
return self
return None
def _contains(self, other):
if (((self.start - other)/self.step).is_integer or
((self.stop - other)/self.step).is_integer):
return _sympify(other >= self.inf and other <= self.sup)
elif (((self.start - other)/self.step).is_integer is False and
((self.stop - other)/self.step).is_integer is False):
return S.false
def __iter__(self):
if self.start is S.NegativeInfinity:
i = self.stop - self.step
step = -self.step
else:
i = self.start
step = self.step
while(i < self.stop and i >= self.start):
yield i
i += step
def __len__(self):
return (self.stop - self.start)//self.step
def __nonzero__(self):
return True
__bool__ = __nonzero__
def _ith_element(self, i):
return self.start + i*self.step
@property
def _last_element(self):
if self.stop is S.Infinity:
return S.Infinity
elif self.start is S.NegativeInfinity:
return self.stop - self.step
else:
return self._ith_element(len(self) - 1)
@property
def _inf(self):
return self.start
@property
def _sup(self):
return self.stop - self.step
@property
def _boundary(self):
return self
if PY3:
converter[range] = Range
else:
converter[xrange] = Range
def normalize_theta_set(theta):
"""
Normalize a Real Set `theta` in the Interval [0, 2*pi). It returns
a normalized value of theta in the Set. For Interval, a maximum of
one cycle [0, 2*pi], is returned i.e. for theta equal to [0, 10*pi],
returned normalized value would be [0, 2*pi). As of now intervals
with end points as non-multiples of `pi` is not supported.
Raises
======
NotImplementedError
The algorithms for Normalizing theta Set are not yet
implemented.
ValueError
The input is not valid, i.e. the input is not a real set.
RuntimeError
It is a bug, please report to the github issue tracker.
Examples
========
>>> from sympy.sets.fancysets import normalize_theta_set
>>> from sympy import Interval, FiniteSet, pi
>>> normalize_theta_set(Interval(9*pi/2, 5*pi))
[pi/2, pi]
>>> normalize_theta_set(Interval(-3*pi/2, pi/2))
[0, 2*pi)
>>> normalize_theta_set(Interval(-pi/2, pi/2))
[0, pi/2] U [3*pi/2, 2*pi)
>>> normalize_theta_set(Interval(-4*pi, 3*pi))
[0, 2*pi)
>>> normalize_theta_set(Interval(-3*pi/2, -pi/2))
[pi/2, 3*pi/2]
>>> normalize_theta_set(FiniteSet(0, pi, 3*pi))
{0, pi}
"""
from sympy.functions.elementary.trigonometric import _pi_coeff as coeff
if theta.is_Interval:
interval_len = theta.measure
# one complete circle
if interval_len >= 2*S.Pi:
if interval_len == 2*S.Pi and theta.left_open and theta.right_open:
k = coeff(theta.start)
return Union(Interval(0, k*S.Pi, False, True),
Interval(k*S.Pi, 2*S.Pi, True, True))
return Interval(0, 2*S.Pi, False, True)
k_start, k_end = coeff(theta.start), coeff(theta.end)
if k_start is None or k_end is None:
raise NotImplementedError("Normalizing theta without pi as coefficient is "
"not yet implemented")
new_start = k_start*S.Pi
new_end = k_end*S.Pi
if new_start > new_end:
return Union(Interval(S.Zero, new_end, False, theta.right_open),
Interval(new_start, 2*S.Pi, theta.left_open, True))
else:
return Interval(new_start, new_end, theta.left_open, theta.right_open)
elif theta.is_FiniteSet:
new_theta = []
for element in theta:
k = coeff(element)
if k is None:
raise NotImplementedError('Normalizing theta without pi as '
'coefficient, is not Implemented.')
else:
new_theta.append(k*S.Pi)
return FiniteSet(*new_theta)
elif theta.is_Union:
return Union(*[normalize_theta_set(interval) for interval in theta.args])
elif theta.is_subset(S.Reals):
raise NotImplementedError("Normalizing theta when, it is of type %s is not "
"implemented" % type(theta))
else:
raise ValueError(" %s is not a real set" % (theta))
class ComplexRegion(Set):
"""
Represents the Set of all Complex Numbers. It can represent a
region of Complex Plane in both the standard forms Polar and
Rectangular coordinates.
* Polar Form
Input is in the form of the ProductSet or Union of ProductSets
of the intervals of r and theta, & use the flag polar=True.
Z = {z in C | z = r*[cos(theta) + I*sin(theta)], r in [r], theta in [theta]}
* Rectangular Form
Input is in the form of the ProductSet or Union of ProductSets
of interval of x and y the of the Complex numbers in a Plane.
Default input type is in rectangular form.
Z = {z in C | z = x + I*y, x in [Re(z)], y in [Im(z)]}
Examples
========
>>> from sympy.sets.fancysets import ComplexRegion
>>> from sympy.sets import Interval
>>> from sympy import S, I, Union
>>> a = Interval(2, 3)
>>> b = Interval(4, 6)
>>> c = Interval(1, 8)
>>> c1 = ComplexRegion(a*b) # Rectangular Form
>>> c1
ComplexRegion([2, 3] x [4, 6], False)
* c1 represents the rectangular region in complex plane
surrounded by the coordinates (2, 4), (3, 4), (3, 6) and
(2, 6), of the four vertices.
>>> c2 = ComplexRegion(Union(a*b, b*c))
>>> c2
ComplexRegion([2, 3] x [4, 6] U [4, 6] x [1, 8], False)
* c2 represents the Union of two rectangular regions in complex
plane. One of them surrounded by the coordinates of c1 and
other surrounded by the coordinates (4, 1), (6, 1), (6, 8) and
(4, 8).
>>> 2.5 + 4.5*I in c1
True
>>> 2.5 + 6.5*I in c1
False
>>> r = Interval(0, 1)
>>> theta = Interval(0, 2*S.Pi)
>>> c2 = ComplexRegion(r*theta, polar=True) # Polar Form
>>> c2 # unit Disk
ComplexRegion([0, 1] x [0, 2*pi), True)
* c2 represents the region in complex plane inside the
Unit Disk centered at the origin.
>>> 0.5 + 0.5*I in c2
True
>>> 1 + 2*I in c2
False
>>> unit_disk = ComplexRegion(Interval(0, 1)*Interval(0, 2*S.Pi), polar=True)
>>> upper_half_unit_disk = ComplexRegion(Interval(0, 1)*Interval(0, S.Pi), polar=True)
>>> intersection = unit_disk.intersect(upper_half_unit_disk)
>>> intersection
ComplexRegion([0, 1] x [0, pi], True)
>>> intersection == upper_half_unit_disk
True
See Also
========
Reals
"""
is_ComplexRegion = True
def __new__(cls, sets, polar=False):
from sympy import symbols, Dummy, sympify, sin, cos
x, y, r, theta = symbols('x, y, r, theta', cls=Dummy)
I = S.ImaginaryUnit
polar = sympify(polar)
# Rectangular Form
if polar == False:
if all(_a.is_FiniteSet for _a in sets.args) and (len(sets.args) == 2):
# ** ProductSet of FiniteSets in the Complex Plane. **
# For Cases like ComplexRegion({2, 4}*{3}), It
# would return {2 + 3*I, 4 + 3*I}
complex_num = []
for x in sets.args[0]:
for y in sets.args[1]:
complex_num.append(x + I*y)
obj = FiniteSet(*complex_num)
else:
obj = ImageSet.__new__(cls, Lambda((x, y), x + I*y), sets)
obj._variables = (x, y)
obj._expr = x + I*y
# Polar Form
elif polar == True:
new_sets = []
# sets is Union of ProductSets
if not sets.is_ProductSet:
for k in sets.args:
new_sets.append(k)
# sets is ProductSets
else:
new_sets.append(sets)
# Normalize input theta
for k, v in enumerate(new_sets):
from sympy.sets import ProductSet
new_sets[k] = ProductSet(v.args[0],
normalize_theta_set(v.args[1]))
sets = Union(*new_sets)
obj = ImageSet.__new__(cls, Lambda((r, theta),
r*(cos(theta) + I*sin(theta))),
sets)
obj._variables = (r, theta)
obj._expr = r*(cos(theta) + I*sin(theta))
else:
raise ValueError("polar should be either True or False")
obj._sets = sets
obj._polar = polar
return obj
@property
def sets(self):
"""
Return raw input sets to the self.
Examples
========
>>> from sympy import Interval, ComplexRegion, Union
>>> a = Interval(2, 3)
>>> b = Interval(4, 5)
>>> c = Interval(1, 7)
>>> C1 = ComplexRegion(a*b)
>>> C1.sets
[2, 3] x [4, 5]
>>> C2 = ComplexRegion(Union(a*b, b*c))
>>> C2.sets
[2, 3] x [4, 5] U [4, 5] x [1, 7]
"""
return self._sets
@property
def args(self):
return (self._sets, self._polar)
@property
def variables(self):
return self._variables
@property
def expr(self):
return self._expr
@property
def psets(self):
"""
Return a tuple of sets (ProductSets) input of the self.
Examples
========
>>> from sympy import Interval, ComplexRegion, Union
>>> a = Interval(2, 3)
>>> b = Interval(4, 5)
>>> c = Interval(1, 7)
>>> C1 = ComplexRegion(a*b)
>>> C1.psets
([2, 3] x [4, 5],)
>>> C2 = ComplexRegion(Union(a*b, b*c))
>>> C2.psets
([2, 3] x [4, 5], [4, 5] x [1, 7])
"""
if self.sets.is_ProductSet:
psets = ()
psets = psets + (self.sets, )
else:
psets = self.sets.args
return psets
@property
def a_interval(self):
"""
Return the union of intervals of `x` when, self is in
rectangular form, or the union of intervals of `r` when
self is in polar form.
Examples
========
>>> from sympy import Interval, ComplexRegion, Union
>>> a = Interval(2, 3)
>>> b = Interval(4, 5)
>>> c = Interval(1, 7)
>>> C1 = ComplexRegion(a*b)
>>> C1.a_interval
[2, 3]
>>> C2 = ComplexRegion(Union(a*b, b*c))
>>> C2.a_interval
[2, 3] U [4, 5]
"""
a_interval = []
for element in self.psets:
a_interval.append(element.args[0])
a_interval = Union(*a_interval)
return a_interval
@property
def b_interval(self):
"""
Return the union of intervals of `y` when, self is in
rectangular form, or the union of intervals of `theta`
when self is in polar form.
Examples
========
>>> from sympy import Interval, ComplexRegion, Union
>>> a = Interval(2, 3)
>>> b = Interval(4, 5)
>>> c = Interval(1, 7)
>>> C1 = ComplexRegion(a*b)
>>> C1.b_interval
[4, 5]
>>> C2 = ComplexRegion(Union(a*b, b*c))
>>> C2.b_interval
[1, 7]
"""
b_interval = []
for element in self.psets:
b_interval.append(element.args[1])
b_interval = Union(*b_interval)
return b_interval
@property
def polar(self):
"""
Returns True if self is in polar form.
Examples
========
>>> from sympy import Interval, ComplexRegion, Union, S
>>> a = Interval(2, 3)
>>> b = Interval(4, 5)
>>> theta = Interval(0, 2*S.Pi)
>>> C1 = ComplexRegion(a*b)
>>> C1.polar
False
>>> C2 = ComplexRegion(a*theta, polar=True)
>>> C2.polar
True
"""
return self._polar
@property
def _measure(self):
"""
The measure of self.sets.
Examples
========
>>> from sympy import Interval, ComplexRegion, S
>>> a, b = Interval(2, 5), Interval(4, 8)
>>> c = Interval(0, 2*S.Pi)
>>> c1 = ComplexRegion(a*b)
>>> c1.measure
12
>>> c2 = ComplexRegion(a*c, polar=True)
>>> c2.measure
6*pi
"""
return self.sets._measure
def _contains(self, other):
from sympy.functions import arg, Abs
# self in rectangular form
if not self.polar:
re, im = other.as_real_imag()
for element in self.psets:
if And(element.args[0]._contains(re),
element.args[1]._contains(im)):
return True
return False
# self in polar form
elif self.polar:
if sympify(other).is_zero:
r, theta = S.Zero, S.Zero
else:
r, theta = Abs(other), arg(other)
for element in self.psets:
if And(element.args[0]._contains(r),
element.args[1]._contains(theta)):
return True
return False
def _intersect(self, other):
if other.is_ComplexRegion:
# self in rectangular form
if (not self.polar) and (not other.polar):
return ComplexRegion(Intersection(self.sets, other.sets))
# self in polar form
elif self.polar and other.polar:
r1, theta1 = self.a_interval, self.b_interval
r2, theta2 = other.a_interval, other.b_interval
new_r_interval = Intersection(r1, r2)
new_theta_interval = Intersection(theta1, theta2)
# 0 and 2*Pi means the same
if ((2*S.Pi in theta1 and S.Zero in theta2) or
(2*S.Pi in theta2 and S.Zero in theta1)):
new_theta_interval = Union(new_theta_interval,
FiniteSet(0))
return ComplexRegion(new_r_interval*new_theta_interval,
polar=True)
if other is S.Reals:
return other
if other.is_subset(S.Reals):
new_interval = []
# self in rectangular form
if not self.polar:
for element in self.psets:
if S.Zero in element.args[0]:
new_interval.append(element.args[0])
new_interval = Union(*new_interval)
return Intersection(new_interval, other)
# self in polar form
elif self.polar:
for element in self.psets:
if (0 in element.args[1]) or (S.Pi in element.args[1]):
new_interval.append(element.args[0])
new_interval = Union(*new_interval)
return Intersection(new_interval, other)
def _union(self, other):
if other.is_ComplexRegion:
# self in rectangular form
if (not self.polar) and (not other.polar):
return ComplexRegion(Union(self.sets, other.sets))
# self in polar form
elif self.polar and other.polar:
return ComplexRegion(Union(self.sets, other.sets), polar=True)
if self == S.Complexes:
return self
return None
class Complexes(with_metaclass(Singleton, ComplexRegion)):
def __new__(cls):
return ComplexRegion.__new__(cls, S.Reals*S.Reals)
def __eq__(self, other):
return other == ComplexRegion(S.Reals*S.Reals)
def __hash__(self):
return hash(ComplexRegion(S.Reals*S.Reals))
def __str__(self):
return "S.Complexes"
def __repr__(self):
return "S.Complexes"
|
{
"content_hash": "2e5e2315d347c791d0646eee7f4926fb",
"timestamp": "",
"source": "github",
"line_count": 945,
"max_line_length": 99,
"avg_line_length": 29.72275132275132,
"alnum_prop": 0.5315793221304471,
"repo_name": "Curious72/sympy",
"id": "4f2c46c112a1139a59027f767060fd6ec3afa3ed",
"size": "28088",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sympy/sets/fancysets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "14138756"
},
{
"name": "Ruby",
"bytes": "304"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "4734"
},
{
"name": "Tcl",
"bytes": "1048"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
}
|
__author__ = "Nitin Kumar, Rick Sherman"
__credits__ = "Jeremy Schulman"
import unittest
import os
from nose.plugins.attrib import attr
from jnpr.junos.device import Device
from jnpr.junos.rpcmeta import _RpcMetaExec
from jnpr.junos.facts.swver import version_info
from ncclient.manager import Manager, make_device_handler
from ncclient.transport import SSHSession
from mock import patch, MagicMock, call
from lxml import etree
@attr('unit')
class Test_RpcMetaExec(unittest.TestCase):
@patch('ncclient.manager.connect')
def setUp(self, mock_connect):
mock_connect.side_effect = self._mock_manager
self.dev = Device(host='1.1.1.1', user='rick', password='password123',
gather_facts=False)
self.dev.open()
self.rpc = _RpcMetaExec(self.dev)
def test_rpcmeta_constructor(self):
self.assertTrue(isinstance(self.rpc._junos, Device))
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_load_config(self, mock_execute_fn):
root = etree.XML('<root><a>test</a></root>')
self.rpc.load_config(root)
self.assertEqual(mock_execute_fn.call_args[0][0].tag,
'load-configuration')
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_load_config_with_configuration_tag(self, mock_execute_fn):
root = etree.XML(
'<configuration><root><a>test</a></root></configuration>')
self.rpc.load_config(root)
self.assertEqual(mock_execute_fn.call_args[0][0].tag,
'load-configuration')
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_load_config_option_action(self, mock_execute_fn):
set_commands = """
set system host-name test_rpc
set system domain-name test.juniper.net
"""
self.rpc.load_config(set_commands, action='set')
self.assertEqual(mock_execute_fn.call_args[0][0].get('action'),
'set')
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_option_format(self, mock_execute_fn):
set_commands = """
set system host-name test_rpc
set system domain-name test.juniper.net
"""
self.rpc.load_config(set_commands, format='text')
self.assertEqual(mock_execute_fn.call_args[0][0].get('format'),
'text')
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_option_format_json(self, mock_execute_fn):
json_commands = """
{
"configuration" : {
"system" : {
"services" : {
"telnet" : [null]
}
}
}
}
"""
self.rpc.load_config(json_commands, format='json')
self.assertEqual(mock_execute_fn.call_args[0][0].get('format'),
'json')
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_exec_rpc_vargs(self, mock_execute_fn):
self.rpc.system_users_information(dict(format='text'))
self.assertEqual(mock_execute_fn.call_args[0][0].get('format'),
'text')
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_exec_rpc_kvargs(self, mock_execute_fn):
self.rpc.system_users_information(set_data=('test',))
self.assertEqual(mock_execute_fn.call_args[0][0][0].text,
'test')
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_exec_rpc_normalize(self, mock_execute_fn):
self.rpc.any_ole_rpc(normalize=True)
self.assertEqual(mock_execute_fn.call_args[1], {'normalize': True})
@patch('jnpr.junos.device.Device.execute')
def test_rpcmeta_get_config(self, mock_execute_fn):
root = etree.XML('<root><a>test</a></root>')
self.rpc.get_config(root)
self.assertEqual(mock_execute_fn.call_args[0][0].tag,
'get-configuration')
def test_rpcmeta_exec_rpc_format_json_14_2(self):
self.dev._conn.rpc = MagicMock(side_effect=self._mock_manager)
self.dev._facts['version_info'] = version_info('14.2X46-D15.3')
op = self.rpc.get_system_users_information(dict(format='json'))
self.assertEqual(op['system-users-information'][0]
['uptime-information'][0]['date-time'][0]['data'],
u'4:43AM')
def test_rpcmeta_exec_rpc_format_json_gt_14_2(self):
self.dev._conn.rpc = MagicMock(side_effect=self._mock_manager)
self.dev._facts['version_info'] = version_info('15.1X46-D15.3')
op = self.rpc.get_system_users_information(dict(format='json'))
self.assertEqual(op['system-users-information'][0]
['uptime-information'][0]['date-time'][0]['data'],
u'4:43AM')
@patch('jnpr.junos.device.warnings')
def test_rpcmeta_exec_rpc_format_json_lt_14_2(self, mock_warn):
self.dev._conn.rpc = MagicMock(side_effect=self._mock_manager)
self.dev._facts['version_info'] = version_info('13.1X46-D15.3')
self.rpc.get_system_users_information(dict(format='json'))
mock_warn.assert_has_calls(call.warn(
'Native JSON support is only from 14.2 onwards', RuntimeWarning))
def _mock_manager(self, *args, **kwargs):
if kwargs:
if 'normalize' in kwargs and args:
return self._read_file(args[0].tag + '.xml')
device_params = kwargs['device_params']
device_handler = make_device_handler(device_params)
session = SSHSession(device_handler)
return Manager(session, device_handler)
if args:
return self._read_file(args[0].tag + '.xml')
def _read_file(self, fname):
from ncclient.xml_ import NCElement
fpath = os.path.join(os.path.dirname(__file__),
'rpc-reply', fname)
with open(fpath) as fp:
foo = fp.read()
if fname == 'get-system-users-information.xml':
return NCElement(foo,
self.dev._conn._device_handler.transform_reply())
rpc_reply = NCElement(foo, self.dev._conn.
_device_handler.transform_reply())\
._NCElement__doc[0]
return rpc_reply
|
{
"content_hash": "d26991d6f2b1e8908074c729428c1759",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 79,
"avg_line_length": 40.910828025477706,
"alnum_prop": 0.5881986610618091,
"repo_name": "fostasha/pynet_test",
"id": "8df095fdb8b3cde787749f11d30683f9e478e8dc",
"size": "6423",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "py-junos-eznc/tests/unit/test_rpcmeta.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6465"
},
{
"name": "Makefile",
"bytes": "7048"
},
{
"name": "Pascal",
"bytes": "408"
},
{
"name": "Puppet",
"bytes": "2263"
},
{
"name": "Python",
"bytes": "258744"
},
{
"name": "Ruby",
"bytes": "4840"
},
{
"name": "Shell",
"bytes": "597"
}
],
"symlink_target": ""
}
|
''' Tasks related to our celery functions '''
import time
import random
import datetime
import numpy as np
from io import BytesIO
from celery import Celery, current_task
from celery.result import AsyncResult
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.dates import DateFormatter
import stravalib
import json
from python_weather import tcxweather
import configparser
Config = configparser.ConfigParser()
Config.read('config.ini')
ds_api = Config['darksky']['client_secret']
REDIS_URL = 'redis://redis:6379/0'
BROKER_URL = 'amqp://admin:mypass@rabbit//'
CELERY = Celery('tasks',
backend=REDIS_URL,
broker=BROKER_URL)
CELERY.conf.accept_content = ['json', 'msgpack']
CELERY.conf.result_serializer = 'msgpack'
def get_job(job_id):
'''
To be called from our web app.
The job ID is passed and the celery job is returned.
'''
return AsyncResult(job_id, app=CELERY)
@CELERY.task()
def simple_json_c():
x = [1,2,3,4,5]
y = [3,4,3,2,2]
jmeme = json.dumps(({'x': x, 'y': y}))
return jmeme
@CELERY.task()
def celery_json_strava(userkey):
client = stravalib.client.Client()
client.access_token = userkey
athlete = client.get_athlete()
# TODO : Make this better
for activity in client.get_activities(before="3000-01-01T00:00:00Z", limit=1):
latest_ride = activity
types = ['distance', 'time', 'latlng', 'altitude', 'heartrate', 'temp', ]
streams = client.get_activity_streams(latest_ride.id, types=types, resolution='medium')
y = streams['altitude'].data
x = streams['distance'].data
hr = streams['time'].data
x = np.array(x)
x = x/1000
x= np.around(x, decimals=3)
x = x.tolist()
#my_list = list()
#for ii, data in enumerate(y):
# my_list.append((x[ii], data))
jmeme = json.dumps([{'key': ['Distance [km]', 'Altitude [m]'], 'x': x, 'y':y,'hr':hr}])
return jmeme
@CELERY.task()
def celery_json_weather(userkey, course_id, date):
current_task.update_state(state='PROGRESS', meta={'current': 0.1})
client = stravalib.client.Client()
client.access_token = userkey
athlete = client.get_athlete()
route = client.get_route_streams(course_id)
current_task.update_state(state='PROGRESS', meta={'current': 0.2})
weather = tcxweather.RideWeather(strava_course=route)
weather.speed(kph=25)
weather.set_ride_start_time(unix=date)
weather.decimate(Points=10)
current_task.update_state(state='PROGRESS', meta={'current': 0.3})
weather.get_weather_data(ds_api, fileDirectory='weatherWEB_TEST', fileName='weatherWebTest', units='si')
current_task.update_state(state='PROGRESS', meta={'current': 0.8})
weather.get_forecast()
current_task.update_state(state='PROGRESS', meta={'current': 0.9})
dist = weather.dist
#y = route['altitude'].data
#x = route['distance'].data
#hr = route['altitude'].data
app_temp = weather.weather['apparent_temperature']
rel_wind = weather.weather['rel_wind_bear']
dist = np.array(dist)
dist = dist/1000
dist = np.around(dist, decimals=3)
dist = dist.tolist()
wind_speed = (np.array(weather.weather['wind_speed'])*3.6).tolist()
wind_head = (np.array(weather.weather['wind_head'])*3.6).tolist()
wind_precip = weather.weather['precip_intensity']
wind_cross = (np.array(weather.weather['wind_cross'])*3.6).tolist()
#my_list = list()
#for ii, data in enumerate(y):
# my_list.append((x[ii], data))
# TODO prepare data ready to dump straight into graph
jmeme = json.dumps([{'key': ['Distance [km]', 'Apparent Temperature [°C]', 'Rel Wind Bearing [°]',
'Wind Speed [km/h]', 'Head Wind Component [km/h]', 'Cross Wind Component [km/h]',
'Precipitation [mm]'],
'dist': dist, 'app_temp':app_temp, 'rel_wind':rel_wind, 'wind_speed':wind_speed,
'wind_head':wind_head, 'wind_cross':wind_cross, 'wind_precip':wind_precip}])
return jmeme
@CELERY.task()
def simple(userkey):
current_task.update_state(state='PROGRESS', meta={'current':0.1})
current_task.update_state(state='PROGRESS', meta={'current':0.3})
fig=Figure()
ax=fig.add_subplot(111)
client = stravalib.client.Client()
client.access_token = userkey
athlete = client.get_athlete()
# TODO : Make this better
for activity in client.get_activities(before="3000-01-01T00:00:00Z", limit=1):
latest_ride = activity
types = ['distance', 'time', 'latlng', 'altitude', 'heartrate', 'temp', ]
streams = client.get_activity_streams(latest_ride.id, types=types, resolution='medium')
y = streams['altitude'].data
x = streams['distance'].data
ax.plot(x, y, '-')
ax.set_title('{name} lives in {city}, {key}'.format(name=athlete.city,city=athlete.city,key=userkey))
#ax.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
#fig.autofmt_xdate()
canvas=FigureCanvas(fig)
current_task.update_state(state='PROGRESS', meta={'current':0.8})
png_output = BytesIO()
canvas.print_png(png_output)
out = png_output.getvalue()
return out
@CELERY.task()
def get_data_from_strava():
'''
Generate a random image.
A sleep makes this task take artifically longer.
'''
current_task.update_state(state='PROGRESS', meta={'current':0.1})
time.sleep(2)
current_task.update_state(state='PROGRESS', meta={'current':0.3})
fig = Figure()
ax_handle = fig.add_subplot(111)
x_axis = []
y_axis = []
now = datetime.datetime.now()
delta = datetime.timedelta(days=1)
current_task.update_state(state='PROGRESS', meta={'current':0.5})
for _ in range(10):
x_axis.append(now)
now += delta
y_axis.append(random.randint(0, 1000))
ax_handle.plot_date(x_axis, y_axis, '-')
ax_handle.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
fig.autofmt_xdate()
canvas = FigureCanvas(fig)
current_task.update_state(state='PROGRESS', meta={'current':0.8})
png_output = BytesIO()
canvas.print_png(png_output)
out = png_output.getvalue()
return out
|
{
"content_hash": "07dd8f60d8a7612175f7f25ccf3fb7a5",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 114,
"avg_line_length": 35.56818181818182,
"alnum_prop": 0.6442492012779553,
"repo_name": "will-jj/strava_flask",
"id": "bc76b2d7ce0aac53d45d167de42a496e23ca0510",
"size": "6262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1391"
},
{
"name": "HTML",
"bytes": "15648"
},
{
"name": "JavaScript",
"bytes": "14346"
},
{
"name": "Python",
"bytes": "16114"
}
],
"symlink_target": ""
}
|
"""
For more details, see the class documentation.
"""
class LegislatorFilter:
"""
This class encapsulates functionality and data entities for filtering legislator data
based on the client's query.
"""
def __init__(self, north_east, south_west):
"""
Initializes the C{LegislatorFilter} object.
No implementation is required.
"""
pass
|
{
"content_hash": "b28426797a98f611e2d597ae4651028c",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 89,
"avg_line_length": 23.352941176470587,
"alnum_prop": 0.6347607052896725,
"repo_name": "tushar-agarwal/map_annotate",
"id": "26441078a457a936ac1aac90f1a535a9a1c78dbf",
"size": "397",
"binary": false,
"copies": "3",
"ref": "refs/heads/wiki-nearby",
"path": "map_annotate_app/filters/LegislatorFilter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6418"
},
{
"name": "HTML",
"bytes": "10538"
},
{
"name": "JavaScript",
"bytes": "46133"
},
{
"name": "Python",
"bytes": "30907"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('job', '0022_jobtype_configuration'),
]
operations = [
migrations.AddField(
model_name='jobtype',
name='shared_mem_required',
field=models.FloatField(default=0.0),
preserve_default=True,
),
]
|
{
"content_hash": "31294fceb6d90533dacd997e1a2dc135",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 49,
"avg_line_length": 22.263157894736842,
"alnum_prop": 0.5933806146572104,
"repo_name": "ngageoint/scale",
"id": "f36487987176cbf1024f31ea9edf2694d460f063",
"size": "447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scale/job/migrations/0023_jobtype_shared_mem_required.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7219"
},
{
"name": "CSS",
"bytes": "12193"
},
{
"name": "Dockerfile",
"bytes": "14853"
},
{
"name": "HCL",
"bytes": "301"
},
{
"name": "HTML",
"bytes": "48818"
},
{
"name": "JavaScript",
"bytes": "503"
},
{
"name": "Makefile",
"bytes": "5852"
},
{
"name": "Python",
"bytes": "5295677"
},
{
"name": "Shell",
"bytes": "26650"
}
],
"symlink_target": ""
}
|
"""Main API for Authbox.
Your business logic should subclass BaseDispatcher and set up your peripherals
in its __init__ method. Most simple uses will use callbacks for everything.
See two_button.py as an example workflow.
Peripherals are kept in other files in this same package, and should be listed
in CLASS_REGISTRY so they can be loaded lazily.
"""
from __future__ import print_function
import sys
import threading
import traceback
import types
from authbox.compat import queue
from RPi import GPIO
# The line above simplifies imports for other modules that are already importing from api.
# TODO give each object a logger and use that instead of prints
CLASS_REGISTRY = [
"authbox.badgereader_hid_keystroking.HIDKeystrokingReader",
"authbox.badgereader_wiegand_gpio.WiegandGPIOReader",
"authbox.gpio_button.Button",
"authbox.gpio_relay.Relay",
"authbox.gpio_buzzer.Buzzer",
"authbox.timer.Timer",
]
# Add this to event_queue to request a graceful shutdown.
SHUTDOWN_SENTINEL = object()
class BaseDispatcher(object):
def __init__(self, config):
self.config = config
self.event_queue = queue.Queue() # unbounded
self.threads = []
def load_config_object(self, name, **kwargs):
# N.b. args are from config, kwargs are passed from python.
# This sometimes causes confusing error messages like
# "takes at least 5 arguments (5 given)".
config_items = split_escaped(self.config.get("pins", name), preserve=True)
objs = []
for item in config_items:
options = list(split_escaped(item.strip(), glue=":"))
cls_name = options[0]
for c in CLASS_REGISTRY:
if c.endswith("." + cls_name):
cls = _import(c)
break
else:
# This is a Python for-else, which executes if the body above didn't
# execute 'break'.
raise Exception("Unknown item", name)
print("Instantiating", cls, self.event_queue, name, options[1:], kwargs)
obj = cls(self.event_queue, name, *options[1:], **kwargs)
objs.append(obj)
self.threads.append(obj)
if len(objs) == 1:
setattr(self, name, obj)
else:
setattr(self, name, MultiProxy(objs))
def run_loop(self):
# Doesn't really support calling run_loop() more than once
for th in self.threads:
th.start()
try:
while True:
# We pass a small timeout because .get(block=True) without it causes
# trouble handling Ctrl-C.
try:
item = self.event_queue.get(timeout=1.0)
except queue.Empty:
continue
if item is SHUTDOWN_SENTINEL:
break
# These only happen here to serialize access regardless of what thread
# handled it.
func, args = item[0], item[1:]
try:
func(*args)
except Exception as e:
traceback.print_exc()
print("Got exception", repr(e), "executing", func, args)
except KeyboardInterrupt:
print("Got Ctrl-C, shutting down.")
# Assuming all threads are daemonized, we will now shut down.
class BaseDerivedThread(threading.Thread):
def __init__(self, event_queue, config_name):
# TODO should they also have numeric ids?
thread_name = "%s %s" % (self.__class__.__name__, config_name)
super(BaseDerivedThread, self).__init__(name=thread_name)
self.daemon = True
self.event_queue = event_queue
self.config_name = config_name
def run(self):
while True:
try:
self.run_inner()
except Exception:
traceback.print_exc()
class BasePinThread(BaseDerivedThread):
def __init__(
self, event_queue, config_name, input_pin, output_pin, initial_output=GPIO.LOW
):
super(BasePinThread, self).__init__(event_queue, config_name)
self.input_pin = input_pin
self.output_pin = output_pin
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False) # for reusing pins
if self.input_pin:
GPIO.setup(self.input_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
if self.output_pin:
GPIO.setup(self.output_pin, GPIO.OUT, initial=initial_output)
class BaseWiegandPinThread(BaseDerivedThread):
def __init__(self, event_queue, config_name, d0_pin, d1_pin):
super(BaseWiegandPinThread, self).__init__(event_queue, config_name)
self.d0_pin = d0_pin
self.d1_pin = d1_pin
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False) # for reusing pins
if self.d0_pin:
GPIO.setup(self.d0_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
if self.d1_pin:
GPIO.setup(self.d1_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
class NoMatchingDevice(Exception):
"""Generic exception for missing devices."""
def _import(name):
module, object_name = name.rsplit(".", 1)
# The return value of __import__ requires walking the dots, so
# this is a fairly standard workaround that's easier. Intermediate
# names appear to always get added to sys.modules.
__import__(module)
return getattr(sys.modules[module], object_name)
class MultiMethodProxy(object):
def __init__(self, objs, meth):
self.objs = objs
self.meth = meth
def __call__(self, *args, **kwargs):
for i in self.objs:
getattr(i, self.meth)(*args, **kwargs)
class MultiProxy(object):
def __init__(self, objs):
self.objs = objs
def __getattr__(self, name):
if isinstance(getattr(self.objs[0], name), types.MethodType):
return MultiMethodProxy(self.objs, name)
else:
return getattr(self.objs[0], name)
def split_escaped(s, glue=",", preserve=False):
"""Handle single-char escapes using backslash."""
buf = []
it = iter(s)
for c in it:
if c == glue:
yield "".join(buf)
del buf[:]
elif c == "\\":
if preserve:
buf.append(c)
c = next(it)
buf.append(c)
else:
buf.append(c)
if buf:
yield "".join(buf)
|
{
"content_hash": "b3c14e04b576ea1ef5574ad54f6d8933",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 90,
"avg_line_length": 32.52261306532663,
"alnum_prop": 0.5894622991347342,
"repo_name": "thatch/makerspace-auth",
"id": "f8ec85ecc7addd67ebb5f22abbdb32538ce59999",
"size": "7074",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "software/authbox/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1402"
},
{
"name": "Python",
"bytes": "75946"
},
{
"name": "Shell",
"bytes": "3662"
}
],
"symlink_target": ""
}
|
from django.utils.module_loading import import_string
from .app import AppConfig # noqa: F401
from .appmodel import AppModelConfig # noqa: F401
from .legacy import LegacyConfig # noqa: F401
from ..settings import PUSH_NOTIFICATIONS_SETTINGS as SETTINGS # noqa: I001
manager = None
def get_manager(reload=False):
global manager
if not manager or reload is True:
manager = import_string(SETTINGS["CONFIG"])()
return manager
# implementing get_manager as a function allows tests to reload settings
get_manager()
|
{
"content_hash": "e6902652e9459a4421a986116de46be6",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 76,
"avg_line_length": 23.954545454545453,
"alnum_prop": 0.7647058823529411,
"repo_name": "rsalmaso/django-push-notifications",
"id": "4e798ccdc9b99b8140904d546fce1cce5342c32a",
"size": "527",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "push_notifications/conf/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "135531"
}
],
"symlink_target": ""
}
|
"""Simple REST service for creating users with useradd"""
from __future__ import print_function
import json
import os
import sys
from pwd import getpwnam
from subprocess import Popen, PIPE
from tornado import gen, web
from tornado.log import app_log
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.netutil import bind_unix_socket
from tornado.options import define, parse_command_line, options
class UserHandler(web.RequestHandler):
def get_user(self, name):
"""Get a user struct by name, None if no such user"""
try:
return getpwnam(name)
except KeyError:
return None
def write_error(self, status_code, **kwargs):
"""Simple (not html) errors"""
exc = kwargs['exc_info'][1]
self.write(exc.log_message or str(error))
def new_user(self, name):
"""Create a new user.
Return the new user's struct
"""
group = self.settings.get('group', None)
shell = self.settings.get('shell', '/bin/bash')
skeldir = self.settings.get('skeldir', None)
cmd = ['useradd', '-m', '-s', shell]
if group:
cmd.extend(['-G', group])
if skeldir and os.path.exists(skeldir):
cmd.extend(['-k', skeldir])
cmd.append(name)
app_log.info("Running %s", cmd)
p = Popen(cmd, stderr=PIPE)
_, err = p.communicate()
if p.returncode:
err = err.decode('utf8', 'replace').strip()
raise web.HTTPError(400, err)
return self.get_user(name)
def post(self, name):
user = self.get_user(name)
d = {}
if user is None:
self.finish(json.dumps(d))
#user = self.new_user(name)
for attr in ['name', 'dir', 'shell', 'uid', 'gid']:
d[attr] = getattr(user, 'pw_' + attr)
self.finish(json.dumps(d))
def main():
define('ip', default=None, help='IP to listen on')
define('port', default=None, help='port to listen on')
define('socket', default=None, help='unix socket path to bind (instead of ip:port)')
define('group', default='', help='comma separated group list for new users `students,other`')
define('skeldir', default='', help='skeleton directory that will be used for new homedirs')
define('shell', default='/bin/bash', help='default shell')
parse_command_line()
if not options.socket and not (options.port):
options.socket = '/var/run/restuser.sock'
app = web.Application(
[(r'/([^/]+)', UserHandler)],
group=options.group,
skeldir=options.skeldir,
shell=options.shell)
if options.socket:
socket = bind_unix_socket(options.socket, mode=0o600)
server = HTTPServer(app)
server.add_socket(socket)
else:
app.listen(options.port, options.ip)
try:
IOLoop.current().start()
except KeyboardInterrupt:
print("\ninterrupted\n", file=sys.stderr)
return
if __name__ == '__main__':
main()
|
{
"content_hash": "8c37592373a9242b7a213593670f7355",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 97,
"avg_line_length": 31.693877551020407,
"alnum_prop": 0.5943335479716677,
"repo_name": "kylemvz/nbhub",
"id": "911d6a3a84eba403d6cf607a2888567f5907f1d0",
"size": "3106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "restuser/restuser.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "42684"
},
{
"name": "Shell",
"bytes": "2353"
}
],
"symlink_target": ""
}
|
"""Configuration system for CherryPy.
Configuration in CherryPy is implemented via dictionaries. Keys are strings
which name the mapped value, which may be of any type.
Architecture
------------
CherryPy Requests are part of an Application, which runs in a global context,
and configuration data may apply to any of those three scopes:
Global: configuration entries which apply everywhere are stored in
cherrypy.config.
Application: entries which apply to each mounted application are stored
on the Application object itself, as 'app.config'. This is a two-level
dict where each key is a path, or "relative URL" (for example, "/" or
"/path/to/my/page"), and each value is a config dict. Usually, this
data is provided in the call to tree.mount(root(), config=conf),
although you may also use app.merge(conf).
Request: each Request object possesses a single 'Request.config' dict.
Early in the request process, this dict is populated by merging global
config entries, Application entries (whose path equals or is a parent
of Request.path_info), and any config acquired while looking up the
page handler (see next).
Declaration
-----------
Configuration data may be supplied as a Python dictionary, as a filename,
or as an open file object. When you supply a filename or file, CherryPy
uses Python's builtin ConfigParser; you declare Application config by
writing each path as a section header:
[/path/to/my/page]
request.stream = True
To declare global configuration entries, place them in a [global] section.
You may also declare config entries directly on the classes and methods
(page handlers) that make up your CherryPy application via the '_cp_config'
attribute. For example:
class Demo:
_cp_config = {'tools.gzip.on': True}
def index(self):
return "Hello world"
index.exposed = True
index._cp_config = {'request.show_tracebacks': False}
Note, however, that this behavior is only guaranteed for the default
dispatcher. Other dispatchers may have different restrictions on where
you can attach _cp_config attributes.
Namespaces
----------
Configuration keys are separated into namespaces by the first "." in the key.
Current namespaces:
engine: Controls the 'application engine', including autoreload.
These can only be declared in the global config.
tree: Grafts cherrypy.Application objects onto cherrypy.tree.
These can only be declared in the global config.
hooks: Declares additional request-processing functions.
log: Configures the logging for each application.
These can only be declared in the global or / config.
request: Adds attributes to each Request.
response: Adds attributes to each Response.
server: Controls the default HTTP server via cherrypy.server.
These can only be declared in the global config.
tools: Runs and configures additional request-processing packages.
wsgi: Adds WSGI middleware to an Application's "pipeline".
These can only be declared in the app's root config ("/").
checker: Controls the 'checker', which looks for common errors in
app state (including config) when the engine starts.
Global config only.
The only key that does not exist in a namespace is the "environment" entry.
This special entry 'imports' other config entries from a template stored in
cherrypy._cpconfig.environments[environment]. It only applies to the global
config, and only when you use cherrypy.config.update.
You can define your own namespaces to be called at the Global, Application,
or Request level, by adding a named handler to cherrypy.config.namespaces,
app.namespaces, or app.request_class.namespaces. The name can
be any string, and the handler must be either a callable or a (Python 2.5
style) context manager.
"""
import ConfigParser
try:
set
except NameError:
from sets import Set as set
import sys
import cherrypy
environments = {
"staging": {
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
},
"production": {
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'log.screen': False,
},
"embedded": {
# For use with CherryPy embedded in another deployment stack.
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'log.screen': False,
'engine.SIGHUP': None,
'engine.SIGTERM': None,
},
"test_suite": {
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': True,
'log.screen': False,
},
}
def as_dict(config):
"""Return a dict from 'config' whether it is a dict, file, or filename."""
if isinstance(config, basestring):
config = _Parser().dict_from_file(config)
elif hasattr(config, 'read'):
config = _Parser().dict_from_file(config)
return config
def merge(base, other):
"""Merge one app config (from a dict, file, or filename) into another.
If the given config is a filename, it will be appended to
the list of files to monitor for "autoreload" changes.
"""
if isinstance(other, basestring):
cherrypy.engine.autoreload.files.add(other)
# Load other into base
for section, value_map in as_dict(other).iteritems():
base.setdefault(section, {}).update(value_map)
class NamespaceSet(dict):
"""A dict of config namespace names and handlers.
Each config entry should begin with a namespace name; the corresponding
namespace handler will be called once for each config entry in that
namespace, and will be passed two arguments: the config key (with the
namespace removed) and the config value.
Namespace handlers may be any Python callable; they may also be
Python 2.5-style 'context managers', in which case their __enter__
method should return a callable to be used as the handler.
See cherrypy.tools (the Toolbox class) for an example.
"""
def __call__(self, config):
"""Iterate through config and pass it to each namespace handler.
'config' should be a flat dict, where keys use dots to separate
namespaces, and values are arbitrary.
The first name in each config key is used to look up the corresponding
namespace handler. For example, a config entry of {'tools.gzip.on': v}
will call the 'tools' namespace handler with the args: ('gzip.on', v)
"""
# Separate the given config into namespaces
ns_confs = {}
for k in config:
if "." in k:
ns, name = k.split(".", 1)
bucket = ns_confs.setdefault(ns, {})
bucket[name] = config[k]
# I chose __enter__ and __exit__ so someday this could be
# rewritten using Python 2.5's 'with' statement:
# for ns, handler in self.iteritems():
# with handler as callable:
# for k, v in ns_confs.get(ns, {}).iteritems():
# callable(k, v)
for ns, handler in self.iteritems():
exit = getattr(handler, "__exit__", None)
if exit:
callable = handler.__enter__()
no_exc = True
try:
try:
for k, v in ns_confs.get(ns, {}).iteritems():
callable(k, v)
except:
# The exceptional case is handled here
no_exc = False
if exit is None:
raise
if not exit(*sys.exc_info()):
raise
# The exception is swallowed if exit() returns true
finally:
# The normal and non-local-goto cases are handled here
if no_exc and exit:
exit(None, None, None)
else:
for k, v in ns_confs.get(ns, {}).iteritems():
handler(k, v)
def __repr__(self):
return "%s.%s(%s)" % (self.__module__, self.__class__.__name__,
dict.__repr__(self))
def __copy__(self):
newobj = self.__class__()
newobj.update(self)
return newobj
copy = __copy__
class Config(dict):
"""The 'global' configuration data for the entire CherryPy process."""
defaults = {
'tools.log_tracebacks.on': True,
'tools.log_headers.on': True,
'tools.trailing_slash.on': True,
}
namespaces = NamespaceSet(
**{"server": lambda k, v: setattr(cherrypy.server, k, v),
"log": lambda k, v: setattr(cherrypy.log, k, v),
"checker": lambda k, v: setattr(cherrypy.checker, k, v),
})
def __init__(self):
self.reset()
def reset(self):
"""Reset self to default values."""
self.clear()
dict.update(self, self.defaults)
def update(self, config):
"""Update self from a dict, file or filename."""
if isinstance(config, basestring):
# Filename
cherrypy.engine.autoreload.files.add(config)
config = _Parser().dict_from_file(config)
elif hasattr(config, 'read'):
# Open file object
config = _Parser().dict_from_file(config)
else:
config = config.copy()
if isinstance(config.get("global", None), dict):
if len(config) > 1:
cherrypy.checker.global_config_contained_paths = True
config = config["global"]
which_env = config.get('environment')
if which_env:
env = environments[which_env]
for k in env:
if k not in config:
config[k] = env[k]
if 'tools.staticdir.dir' in config:
config['tools.staticdir.section'] = "global"
dict.update(self, config)
self.namespaces(config)
def __setitem__(self, k, v):
dict.__setitem__(self, k, v)
self.namespaces({k: v})
def _engine_namespace_handler(k, v):
"""Backward compatibility handler for the "engine" namespace."""
engine = cherrypy.engine
if k == 'autoreload_on':
if v:
engine.autoreload.subscribe()
else:
engine.autoreload.unsubscribe()
elif k == 'autoreload_frequency':
engine.autoreload.frequency = v
elif k == 'autoreload_match':
engine.autoreload.match = v
elif k == 'reload_files':
engine.autoreload.files = set(v)
elif k == 'deadlock_poll_freq':
engine.timeout_monitor.frequency = v
elif k == 'SIGHUP':
engine.listeners['SIGHUP'] = set([v])
elif k == 'SIGTERM':
engine.listeners['SIGTERM'] = set([v])
elif "." in k:
plugin, attrname = k.split(".", 1)
plugin = getattr(engine, plugin)
if attrname == 'on':
if v and callable(getattr(plugin, 'subscribe', None)):
plugin.subscribe()
return
elif (not v) and callable(getattr(plugin, 'unsubscribe', None)):
plugin.unsubscribe()
return
setattr(plugin, attrname, v)
else:
setattr(engine, k, v)
Config.namespaces["engine"] = _engine_namespace_handler
def _tree_namespace_handler(k, v):
"""Namespace handler for the 'tree' config namespace."""
cherrypy.tree.graft(v, v.script_name)
cherrypy.engine.log("Mounted: %s on %s" % (v, v.script_name or "/"))
Config.namespaces["tree"] = _tree_namespace_handler
class _Parser(ConfigParser.ConfigParser):
"""Sub-class of ConfigParser that keeps the case of options and that raises
an exception if the file cannot be read.
"""
def optionxform(self, optionstr):
return optionstr
def read(self, filenames):
if isinstance(filenames, basestring):
filenames = [filenames]
for filename in filenames:
# try:
# fp = open(filename)
# except IOError:
# continue
fp = open(filename)
try:
self._read(fp, filename)
finally:
fp.close()
def as_dict(self, raw=False, vars=None):
"""Convert an INI file to a dictionary"""
# Load INI file into a dict
from cherrypy.lib import unrepr
result = {}
for section in self.sections():
if section not in result:
result[section] = {}
for option in self.options(section):
value = self.get(section, option, raw, vars)
try:
value = unrepr(value)
except Exception, x:
msg = ("Config error in section: %r, option: %r, "
"value: %r. Config values must be valid Python." %
(section, option, value))
raise ValueError(msg, x.__class__.__name__, x.args)
result[section][option] = value
return result
def dict_from_file(self, file):
if hasattr(file, 'read'):
self.readfp(file)
else:
self.read(file)
return self.as_dict()
del ConfigParser
|
{
"content_hash": "7a116c9b7c56015f07204cbf8f394f6d",
"timestamp": "",
"source": "github",
"line_count": 385,
"max_line_length": 79,
"avg_line_length": 37.08311688311688,
"alnum_prop": 0.5746305246200182,
"repo_name": "cread/ec2id",
"id": "f40e4fbfb5230a5036ba0cd5bf4ddd2ef2aa6db6",
"size": "14277",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cherrypy/_cpconfig.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "807550"
},
{
"name": "Shell",
"bytes": "223"
}
],
"symlink_target": ""
}
|
"""Unit tests for the API version support."""
import path_initializer
path_initializer.InitSysPath()
import unittest
from gcutil_lib import gcutil_unittest
from gcutil_lib import version
class VersionTest(gcutil_unittest.GcutilTestCase):
def testApiVersion(self):
api_version = version.get(self.version)
self.assertTrue(api_version is not None)
self.assertEqual(self.version.split('_')[-1], api_version._name)
def testValidateSupportedVersions(self):
base_version = version._ApiVersions._ExtractBaseVersion(self.version)
self.assertEqual(self.version.split('_')[-1], base_version)
def testApiVersionsAreModuleGlobals(self):
base_version = version._ApiVersions._ExtractBaseVersion(self.version)
self.assertTrue(hasattr(version, base_version))
version_object = getattr(version, base_version)
self.assertEqual(base_version, version_object._name)
class SimpleVersionTest(unittest.TestCase):
def testKnownVersionsExist(self):
self.assertEqual('v1', version.v1._name)
def testValidateAndExtractBaseVersion(self):
valid_versions = (
'v1',
'v2',
'v1beta1',
'v1alpha2')
for valid in valid_versions:
base_version = version._ApiVersions._ExtractBaseVersion(valid)
self.assertEqual(valid.split('_')[-1], base_version)
def testValidateAndExtractBaseVersionInvalid(self):
invalid_versions = (
'v',
'1',
'beta',
'vbeta',
'v1beta',
'v1beta1a'
'1beta17')
for invalid in invalid_versions:
self.assertRaises(
ValueError, version._ApiVersions._ExtractBaseVersion, invalid)
def testVersionComparison(self):
versions = version._ApiVersions(('v1',))
v1 = versions.get('v1')
self.assertEqual(0, v1._index)
def testVersionStringComparison(self):
versions = version._ApiVersions(('v1', 'v2', 'v3'))
v2 = versions.get('v2')
self.assertTrue(v2 < 'v3')
self.assertTrue('v1' < v2)
self.assertEqual('v2', v2)
self.assertEqual(v2, 'v2')
self.assertNotEqual(v2, versions.get('v3'))
self.assertNotEqual(v2, 'v1')
def testVersionGetItem(self):
versions = version._ApiVersions(('v1', 'v2', 'v3'))
v1 = versions['v1']
v2 = versions['v2']
v3 = versions['v3']
self.assertTrue(v1 < v2 < v3)
if __name__ == '__main__':
unittest.main(testLoader=gcutil_unittest.GcutilLoader())
|
{
"content_hash": "a5038592c23d60dadbd44acd6da95a62",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 73,
"avg_line_length": 28.702380952380953,
"alnum_prop": 0.6744089589382,
"repo_name": "ychen820/microblog",
"id": "0c8bf71685c89dc926b83702b4c47ab156571698",
"size": "3028",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "y/google-cloud-sdk/platform/gcutil/lib/google_compute_engine/gcutil_lib/version_test.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "414229"
},
{
"name": "CSS",
"bytes": "257787"
},
{
"name": "Emacs Lisp",
"bytes": "4733"
},
{
"name": "Groff",
"bytes": "1236200"
},
{
"name": "HTML",
"bytes": "2617468"
},
{
"name": "JavaScript",
"bytes": "1106437"
},
{
"name": "Makefile",
"bytes": "15714"
},
{
"name": "Objective-C",
"bytes": "26302"
},
{
"name": "PHP",
"bytes": "2511443"
},
{
"name": "Perl",
"bytes": "1109010"
},
{
"name": "Python",
"bytes": "71588489"
},
{
"name": "R",
"bytes": "548"
},
{
"name": "Shell",
"bytes": "49796"
},
{
"name": "TeX",
"bytes": "3149"
},
{
"name": "VimL",
"bytes": "5645"
}
],
"symlink_target": ""
}
|
'''
The MIT License (MIT)
Copyright (c) 2016 Sean UN Wood
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
@author: Sean UN Wood
'''
from gccNMFFunctions import *
from gccNMFPlotting import *
def runGCCNMF(mixtureFilePrefix, windowSize, hopSize, numTDOAs, microphoneSeparationInMetres, numTargets=None, windowFunction=hanning):
maxTDOA = microphoneSeparationInMetres / SPEED_OF_SOUND_IN_METRES_PER_SECOND
tdoasInSeconds = linspace(-maxTDOA, maxTDOA, numTDOAs).astype(float32)
mixtureFileName = getMixtureFileName(mixtureFilePrefix)
stereoSamples, sampleRate = loadMixtureSignal(mixtureFileName)
complexMixtureSpectrogram = computeComplexMixtureSpectrogram(stereoSamples, windowSize, hopSize, windowFunction)
numChannels, numFrequencies, numTime = complexMixtureSpectrogram.shape
frequenciesInHz = linspace(0, sampleRate / 2.0, numFrequencies)
V = concatenate( abs(complexMixtureSpectrogram), axis=-1 )
W, H = performKLNMF(V, dictionarySize=128, numIterations=100, sparsityAlpha=0)
stereoH = array( hsplit(H, numChannels) )
spectralCoherenceV = complexMixtureSpectrogram[0] * complexMixtureSpectrogram[1].conj() / abs(complexMixtureSpectrogram[0]) / abs(complexMixtureSpectrogram[1])
angularSpectrogram = getAngularSpectrogram(spectralCoherenceV, frequenciesInHz, microphoneSeparationInMetres, numTDOAs)
meanAngularSpectrum = mean(angularSpectrogram, axis=-1)
targetTDOAIndexes = estimateTargetTDOAIndexesFromAngularSpectrum(meanAngularSpectrum, microphoneSeparationInMetres, numTDOAs, numTargets)
targetTDOAGCCNMFs = getTargetTDOAGCCNMFs(spectralCoherenceV, microphoneSeparationInMetres, numTDOAs, frequenciesInHz, targetTDOAIndexes, W, stereoH)
targetCoefficientMasks = getTargetCoefficientMasks(targetTDOAGCCNMFs, numTargets)
targetSpectrogramEstimates = getTargetSpectrogramEstimates(targetCoefficientMasks, complexMixtureSpectrogram, W, stereoH)
targetSignalEstimates = getTargetSignalEstimates(targetSpectrogramEstimates, windowSize, hopSize, windowFunction)
saveTargetSignalEstimates(targetSignalEstimates, sampleRate, mixtureFileNamePrefix)
if __name__ == '__main__':
# Preprocessing params
windowSize = 1024
fftSize = windowSize
hopSize = 128
windowFunction = hanning
# TDOA params
numTDOAs = 128
# NMF params
dictionarySize = 128
numIterations = 100
sparsityAlpha = 0
# Input params
mixtureFileNamePrefix = '../data/dev1_female3_liverec_130ms_1m'
microphoneSeparationInMetres = 1.0
numSources = 3
runGCCNMF( mixtureFileNamePrefix, windowSize, hopSize, numTDOAs,
microphoneSeparationInMetres, numSources, windowFunction )
|
{
"content_hash": "3d8803cbb24b54c94b9471b2d8d1d8bd",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 163,
"avg_line_length": 48.18181818181818,
"alnum_prop": 0.7835579514824798,
"repo_name": "seanwood/gcc-nmf",
"id": "7ea7a9e6d33edf7c57719c8fe8c740b9af8874c4",
"size": "3710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gccNMF/runGCCNMF.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "129346"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.