file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
settings-panel-mixin.js | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
define(function (require, exports, module) {
'use strict';
const $ = require('jquery');
const chai = require('chai');
const Cocktail = require('cocktail');
const FormView = require('views/form');
const Metrics = require('lib/metrics');
const Notifier = require('lib/channels/notifier');
const SettingsPanelMixin = require('views/mixins/settings-panel-mixin');
const sinon = require('sinon');
const TestTemplate = require('templates/test_template.mustache');
var assert = chai.assert;
var SettingsPanelView = FormView.extend({
template: TestTemplate
});
Cocktail.mixin(
SettingsPanelView,
SettingsPanelMixin
);
describe('views/mixins/settings-panel-mixin', function () {
var view;
var metrics;
var notifier;
beforeEach(function () {
notifier = new Notifier();
metrics = new Metrics({ notifier });
view = new SettingsPanelView({
metrics: metrics,
notifier,
parentView: {
$: $,
displaySuccess: sinon.spy()
}
});
return view.render()
.then(function () {
$('#container').html(view.el);
});
});
afterEach(function () {
metrics.destroy();
view.remove();
view.destroy();
view = metrics = null;
});
describe('autofocus elements', () => {
it('are converted to [data-autofocus-on-panel-open] to prevent attempts at autofocusing hidden elements', () => {
assert.lengthOf(view.$('[autofocus]'), 0);
assert.lengthOf(view.$('[data-autofocus-on-panel-open]'), 1);
});
});
describe('events', function () {
it('toggles button', function () {
sinon.stub(view, 'navigate').callsFake(function () {});
$('.settings-unit-toggle').click();
assert.isTrue(view.navigate.calledWith('settings/display_name'));
});
it('toggles open and closed', function () {
sinon.stub(view, 'closePanel').callsFake(function () {});
sinon.stub(view, 'clearInput').callsFake(function () {});
sinon.stub(view, 'navigate').callsFake(function () {});
$('button.cancel').click(); | assert.isTrue(view.navigate.calledWith('settings'));
});
});
describe('methods', function () {
it('open and close', function () {
view.openPanel();
assert.isTrue($('.settings-unit').hasClass('open'));
assert.isTrue(view.isPanelOpen());
view.closePanel();
assert.isFalse($('.settings-unit').hasClass('open'));
assert.isFalse(view.isPanelOpen());
});
it('openPanel focuses the first autofocus element if present', function () {
// create and append an input field
var $dummyInput = $('<input type="text" name="dummyholder" data-autofocus-on-panel-open>');
view.$('.settings-unit').append($dummyInput);
// make sure that it is a non-touch device
$('html').addClass('no-touch');
view.openPanel();
// input field should be present, we just appended it
var $autofocusEl = view.$('.open [data-autofocus-on-panel-open]');
assert.lengthOf($autofocusEl, 1);
// autofocusEl should have been focused
assert.equal($autofocusEl[0], document.activeElement, 'autofocus element has focus');
});
it('hidePanel hides the open panel', function () {
sinon.stub(view, 'closePanel').callsFake(function () {});
sinon.stub(view, 'navigate').callsFake(function () { });
view.openPanel();
view.hidePanel();
assert.isTrue(view.closePanel.called);
assert.isTrue(view.navigate.calledWith('settings'));
});
it('displaySuccess', function () {
sinon.stub(view, 'closePanel').callsFake(function () {});
view.displaySuccess('hi');
assert.isTrue(view.parentView.displaySuccess.calledWith('hi'));
assert.isTrue(view.closePanel.called);
});
});
});
}); | assert.isTrue(view.closePanel.called);
assert.isTrue(view.clearInput.called); | random_line_split |
nodeHealthReport.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* @class
* Initializes a new instance of the NodeHealthReport class.
* @constructor
* The report of the node health
*
*/
class NodeHealthReport extends models['HealthReport'] {
| () {
super();
}
/**
* Defines the metadata of NodeHealthReport
*
* @returns {object} metadata of NodeHealthReport
*
*/
mapper() {
return {
required: false,
serializedName: 'NodeHealthReport',
type: {
name: 'Composite',
className: 'NodeHealthReport',
modelProperties: {
sourceId: {
required: false,
serializedName: 'SourceId',
type: {
name: 'String'
}
},
property: {
required: false,
serializedName: 'Property',
type: {
name: 'String'
}
},
healthState: {
required: false,
serializedName: 'HealthState',
type: {
name: 'String'
}
},
description: {
required: false,
serializedName: 'Description',
type: {
name: 'String'
}
},
timeToLiveInMilliSeconds: {
required: false,
serializedName: 'TimeToLiveInMilliSeconds',
type: {
name: 'String'
}
},
sequenceNumber: {
required: false,
serializedName: 'SequenceNumber',
type: {
name: 'String'
}
},
removeWhenExpired: {
required: false,
serializedName: 'RemoveWhenExpired',
type: {
name: 'Boolean'
}
}
}
}
};
}
}
module.exports = NodeHealthReport;
| constructor | identifier_name |
nodeHealthReport.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* @class
* Initializes a new instance of the NodeHealthReport class.
* @constructor
* The report of the node health
*
*/
class NodeHealthReport extends models['HealthReport'] {
constructor() |
/**
* Defines the metadata of NodeHealthReport
*
* @returns {object} metadata of NodeHealthReport
*
*/
mapper() {
return {
required: false,
serializedName: 'NodeHealthReport',
type: {
name: 'Composite',
className: 'NodeHealthReport',
modelProperties: {
sourceId: {
required: false,
serializedName: 'SourceId',
type: {
name: 'String'
}
},
property: {
required: false,
serializedName: 'Property',
type: {
name: 'String'
}
},
healthState: {
required: false,
serializedName: 'HealthState',
type: {
name: 'String'
}
},
description: {
required: false,
serializedName: 'Description',
type: {
name: 'String'
}
},
timeToLiveInMilliSeconds: {
required: false,
serializedName: 'TimeToLiveInMilliSeconds',
type: {
name: 'String'
}
},
sequenceNumber: {
required: false,
serializedName: 'SequenceNumber',
type: {
name: 'String'
}
},
removeWhenExpired: {
required: false,
serializedName: 'RemoveWhenExpired',
type: {
name: 'Boolean'
}
}
}
}
};
}
}
module.exports = NodeHealthReport;
| {
super();
} | identifier_body |
nodeHealthReport.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* @class
* Initializes a new instance of the NodeHealthReport class.
* @constructor
* The report of the node health
*
*/
class NodeHealthReport extends models['HealthReport'] {
constructor() {
super();
}
/**
* Defines the metadata of NodeHealthReport
*
* @returns {object} metadata of NodeHealthReport
*
*/
mapper() {
return {
required: false,
serializedName: 'NodeHealthReport',
type: {
name: 'Composite',
className: 'NodeHealthReport',
modelProperties: {
sourceId: {
required: false,
serializedName: 'SourceId',
type: {
name: 'String'
}
},
property: {
required: false,
serializedName: 'Property',
type: {
name: 'String'
}
},
healthState: {
required: false,
serializedName: 'HealthState',
type: {
name: 'String'
}
},
description: {
required: false,
serializedName: 'Description',
type: {
name: 'String'
}
},
timeToLiveInMilliSeconds: {
required: false,
serializedName: 'TimeToLiveInMilliSeconds',
type: {
name: 'String' | }
},
sequenceNumber: {
required: false,
serializedName: 'SequenceNumber',
type: {
name: 'String'
}
},
removeWhenExpired: {
required: false,
serializedName: 'RemoveWhenExpired',
type: {
name: 'Boolean'
}
}
}
}
};
}
}
module.exports = NodeHealthReport; | random_line_split | |
beta_decrease.py | from .naive import StratNaive
import random
import numpy as np
| StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context) | class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2): | random_line_split |
beta_decrease.py |
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def update_speaker(self, ms, w, mh, voc, mem, bool_succ, context=[]):
|
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)
| self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context) | identifier_body |
beta_decrease.py |
from .naive import StratNaive
import random
import numpy as np
class BetaDecreaseStrat(StratNaive):
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2):
StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2)
self.time_scale = time_scale
def | (self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_speaker(ms, w, mh, voc, mem, bool_succ, context)
def update_hearer(self, ms, w, mh, voc, mem, bool_succ, context=[]):
self.voc_update.beta = max(0,self.voc_update.beta - 1./self.time_scale)
return self.voc_update.update_hearer(ms, w, mh, voc, mem, bool_succ, context)
| update_speaker | identifier_name |
views.py | from rest_framework.decorators import api_view
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import status
from .models import Person
from .serializers import PersonSerializer
@api_view(['GET', 'DELETE', 'PUT'])
def get_delete_update_person(request, fstname):
person = get_object_or_404(Person, firstname=fstname)
# get details of a single person
if request.method == 'GET':
serializer = PersonSerializer(person)
return Response(serializer.data)
# delete a single person
elif request.method == 'DELETE':
person.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# update details of a single person
elif request.method == 'PUT':
serializer = PersonSerializer(person, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST'])
def get_post_people(request):
# get all people
| if request.method == 'GET':
people = Person.objects.all()
serializer = PersonSerializer(people, many=True)
return Response(serializer.data)
# insert a new record for a person
elif request.method == 'POST':
data = {
'firstname': request.data.get('firstname'),
'lastname': request.data.get('lastname'),
'country': request.data.get('country'),
'email': request.data.get('email'),
'phone': request.data.get('phone'),
'occupation_field': request.data.get('occupation_field'),
'occupation': request.data.get('occupation'),
'birthdate': request.data.get('birthdate'),
'description': request.data.get('description')
}
serializer = PersonSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | identifier_body | |
views.py | from rest_framework.decorators import api_view
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import status
from .models import Person
from .serializers import PersonSerializer
@api_view(['GET', 'DELETE', 'PUT'])
def get_delete_update_person(request, fstname):
person = get_object_or_404(Person, firstname=fstname)
# get details of a single person
if request.method == 'GET':
serializer = PersonSerializer(person)
return Response(serializer.data)
# delete a single person
elif request.method == 'DELETE':
person.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# update details of a single person
elif request.method == 'PUT':
|
@api_view(['GET', 'POST'])
def get_post_people(request):
# get all people
if request.method == 'GET':
people = Person.objects.all()
serializer = PersonSerializer(people, many=True)
return Response(serializer.data)
# insert a new record for a person
elif request.method == 'POST':
data = {
'firstname': request.data.get('firstname'),
'lastname': request.data.get('lastname'),
'country': request.data.get('country'),
'email': request.data.get('email'),
'phone': request.data.get('phone'),
'occupation_field': request.data.get('occupation_field'),
'occupation': request.data.get('occupation'),
'birthdate': request.data.get('birthdate'),
'description': request.data.get('description')
}
serializer = PersonSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | serializer = PersonSerializer(person, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | conditional_block |
views.py | from rest_framework.decorators import api_view
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import status
from .models import Person
from .serializers import PersonSerializer
@api_view(['GET', 'DELETE', 'PUT'])
def get_delete_update_person(request, fstname):
person = get_object_or_404(Person, firstname=fstname)
# get details of a single person
if request.method == 'GET':
serializer = PersonSerializer(person)
return Response(serializer.data)
# delete a single person
elif request.method == 'DELETE':
person.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# update details of a single person
elif request.method == 'PUT':
serializer = PersonSerializer(person, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST'])
def | (request):
# get all people
if request.method == 'GET':
people = Person.objects.all()
serializer = PersonSerializer(people, many=True)
return Response(serializer.data)
# insert a new record for a person
elif request.method == 'POST':
data = {
'firstname': request.data.get('firstname'),
'lastname': request.data.get('lastname'),
'country': request.data.get('country'),
'email': request.data.get('email'),
'phone': request.data.get('phone'),
'occupation_field': request.data.get('occupation_field'),
'occupation': request.data.get('occupation'),
'birthdate': request.data.get('birthdate'),
'description': request.data.get('description')
}
serializer = PersonSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | get_post_people | identifier_name |
views.py | from rest_framework.decorators import api_view
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import status
from .models import Person
from .serializers import PersonSerializer
@api_view(['GET', 'DELETE', 'PUT'])
def get_delete_update_person(request, fstname):
person = get_object_or_404(Person, firstname=fstname)
# get details of a single person
if request.method == 'GET':
serializer = PersonSerializer(person)
return Response(serializer.data) | person.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# update details of a single person
elif request.method == 'PUT':
serializer = PersonSerializer(person, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST'])
def get_post_people(request):
# get all people
if request.method == 'GET':
people = Person.objects.all()
serializer = PersonSerializer(people, many=True)
return Response(serializer.data)
# insert a new record for a person
elif request.method == 'POST':
data = {
'firstname': request.data.get('firstname'),
'lastname': request.data.get('lastname'),
'country': request.data.get('country'),
'email': request.data.get('email'),
'phone': request.data.get('phone'),
'occupation_field': request.data.get('occupation_field'),
'occupation': request.data.get('occupation'),
'birthdate': request.data.get('birthdate'),
'description': request.data.get('description')
}
serializer = PersonSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | # delete a single person
elif request.method == 'DELETE': | random_line_split |
webdav.py | # -*- coding: utf-8 -*-
# pylint: disable=locally-disabled, star-args
""" WebDAV upload method for dput.
Install to "/usr/share/dput/webdav.py".
"""
from __future__ import with_statement
import re
import os
import sys
import cgi
import netrc
import socket
import fnmatch
import getpass
import httplib
import urllib2
import urlparse
import unittest
from contextlib import closing
from email import parser as rfc2822_parser
try:
import dputhelper
except ImportError:
sys.path.insert(0, "/usr/share/dput/helper")
import dputhelper
# Block size for upload streaming
CHUNK_SIZE = 16 * 1024
def trace(msg, **kwargs):
"""Emit log traces in debug mode."""
if trace.debug:
print("D: webdav: " + (msg % kwargs))
trace.debug = False
def log(msg, **kwargs):
"""Emit log message to stderr."""
sys.stdout.flush()
sys.stderr.write("webdav: " + (msg % kwargs) + "\n")
sys.stderr.flush()
def _resolve_credentials(fqdn, login):
"""Look up special forms of credential references."""
result = login
if "$" in result:
result = os.path.expandvars(result)
if result.startswith("netrc:"):
result = result.split(':', 1)[1]
if result:
result = os.path.abspath(os.path.expanduser(result))
accounts = netrc.netrc(result or None)
account = accounts.authenticators(fqdn)
if not account or not(account[0] or account[1]):
raise dputhelper.DputUploadFatalException("Cannot find account for host %s in %s netrc file" % (
fqdn, result or "default"))
# account is (login, account, password)
user, pwd = account[0] or account[1], account[2] or ""
result = "%s:%s" % (user, pwd)
else:
if result.startswith("file:"):
result = os.path.abspath(os.path.expanduser(result.split(':', 1)[1]))
with closing(open(result, "r")) as handle:
result = handle.read().strip()
try:
user, pwd = result.split(':', 1)
except ValueError:
user, pwd = result, ""
trace("Resolved login credentials to %(user)s:%(pwd)s", user=user, pwd='*' * len(pwd))
return result
class PromptingPasswordMgr(urllib2.HTTPPasswordMgr):
""" Custom password manager that prompts for a password once, if none is available otherwise.
Based on code in dput 0.9.6 (http method).
"""
def __init__(self, login):
urllib2.HTTPPasswordMgr.__init__(self)
self.login = login
def find_user_password(self, realm, authuri):
"""Prompt for a password once and remember it, unless already provided in the configuration."""
authuri = self.reduce_uri(authuri)[0]
authinfo = urllib2.HTTPPasswordMgr.find_user_password(self, realm, authuri)
if authinfo == (None, None):
credentials = self.login
if ':' in credentials:
authinfo = credentials.split(':', 1)
else:
password = getpass.getpass(" Password for %s:" % realm)
self.add_password(realm, authuri, credentials, password)
authinfo = credentials, password
return authinfo
def _distro2repo(distro, repo_mappings):
"""Map distribution names to repo names according to config settings."""
# Parse the mapping config
mappings = [(i.split('=', 1) if '=' in i else (i, i)) for i in repo_mappings.split()]
# Try to find a match
result = distro
for pattern, target in mappings:
if fnmatch.fnmatchcase(distro.lower(), pattern.lower()):
result = target
break
trace("Mapped distro '%(distro)s' to '%(repo)s'", distro=distro, repo=result)
return result
def _resolve_incoming(fqdn, login, incoming, changes=None, cli_params=None, repo_mappings=""):
"""Resolve the given `incoming` value to a working URL."""
# Build fully qualified URL
scheme, netloc, path, params, query, anchor = urlparse.urlparse(incoming, scheme="http", allow_fragments=True)
if scheme not in ("http", "https"):
raise dputhelper.DputUploadFatalException("Unsupported URL scheme '%s'" % scheme)
url = urlparse.urlunparse((scheme, netloc or fqdn, path.rstrip('/') + '/', params, query, None))
# Parse anchor to parameters
url_params = dict(cgi.parse_qsl(anchor or '', keep_blank_values=True))
# Read changes from stream or file
pkgdata = {}
if changes:
try:
changes.read # pylint: disable=maybe-no-member
except AttributeError:
with closing(open(changes, "r")) as handle:
changes = handle.read()
else:
changes = changes.read() # pylint: disable=maybe-no-member
if changes.startswith("-----BEGIN PGP SIGNED MESSAGE-----"):
# Let someone else check this, we don't care a bit; gimme the data already
trace("Extracting package metadata from PGP signed message...")
changes = changes.split("-----BEGIN PGP")[1].replace('\r', '').split('\n\n', 1)[1]
pkgdata = dict([(key.lower().replace('-', '_'), val.strip())
for key, val in rfc2822_parser.HeaderParser().parsestr(changes).items()
])
# Extend changes metadata
pkgdata["loginuser"] = login.split(':')[0]
if "version" in pkgdata:
pkgdata["upstream"] = re.split(r"[-~]", pkgdata["version"])[0]
pkgdata.update(dict(
fqdn=fqdn, repo=_distro2repo(pkgdata.get("distribution", "unknown"), repo_mappings),
))
pkgdata.update(cli_params or {}) # CLI options can overwrite anything
trace("Collected metadata:\n %(meta)s", meta="\n ".join(["%s = %s" % (key, val)
for key, val in sorted(pkgdata.items())
if '\n' not in val # only print 'simple' values
]))
# Interpolate `url`
try:
try:
url.format
except AttributeError:
url = url % pkgdata # Python 2.5
else:
url = url.format(**pkgdata) # Python 2.6+
except KeyError, exc:
raise dputhelper.DputUploadFatalException("Unknown key (%s) in incoming templates '%s'" % (exc, incoming))
trace("Resolved incoming to `%(url)s' params=%(params)r", url=url, params=url_params)
return url, url_params
def _url_connection(url, method, skip_host=False, skip_accept_encoding=False):
"""Create HTTP[S] connection for `url`."""
scheme, netloc, path, params, query, _ = urlparse.urlparse(url)
result = conn = (httplib.HTTPSConnection if scheme == "https" else httplib.HTTPConnection)(netloc)
conn.debuglevel = int(trace.debug)
try:
conn.putrequest(method, urlparse.urlunparse((None, None, path, params, query, None)), skip_host, skip_accept_encoding)
conn.putheader("User-Agent", "dput")
conn.putheader("Connection", "close")
conn = None
finally:
if conn:
conn.close() # close in case of errors
return result
def _file_url(filepath, url):
"""Return URL for the given `filepath` in the DAV collection `url`."""
basename = os.path.basename(filepath)
return urlparse.urljoin(url.rstrip('/') + '/', basename)
def _dav_put(filepath, url, login, progress=None):
"""Upload `filepath` to given `url` (referring to a WebDAV collection)."""
fileurl = _file_url(filepath, url)
sys.stdout.write(" Uploading %s: " % os.path.basename(filepath))
sys.stdout.flush()
size = os.path.getsize(filepath)
with closing(open(filepath, 'r')) as handle:
if progress:
handle = dputhelper.FileWithProgress(handle, ptype=progress, progressf=sys.stdout, size=size)
trace("HTTP PUT to URL: %s" % fileurl)
try:
conn = _url_connection(fileurl, "PUT")
try:
conn.putheader("Authorization", 'Basic %s' % login.encode('base64').replace('\n', '').strip())
conn.putheader("Content-Length", str(size))
conn.endheaders()
conn.debuglevel = 0
while True:
data = handle.read(CHUNK_SIZE)
if not data:
break
conn.send(data)
conn.debuglevel = int(trace.debug)
resp = conn.getresponse()
if 200 <= resp.status <= 299:
print " done."
#elif res.status == 401 and not auth_headers:
#print "need authentication."
#auth_headers = AuthHandlerHackAround(url, res.msg, pwman).get_auth_headers()
elif resp.status == 401:
print " unauthorized."
raise urllib2.URLError("Upload failed as unauthorized (%s),"
" maybe wrong username or password?" % resp.reason)
else:
print " failed."
raise urllib2.URLError("Unexpected HTTP status %d %s" % (resp.status, resp.reason))
resp.read() # eat response body
finally:
conn.close()
except httplib.HTTPException, exc:
raise urllib2.URLError(exc)
def _check_url(url, allowed, mindepth=0):
"""Check if HTTP GET `url` returns a status code in `allowed`."""
if mindepth:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
path = '/'.join(path.split('/')[:mindepth+1]).rstrip('/') + '/'
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
trace("Checking URL '%(url)s'", url=url)
try:
# TODO: Check requests need to use login credentials
with closing(urllib2.urlopen(url)) as handle:
handle.read()
code = handle.code
if code not in allowed:
raise urllib2.HTTPError(url, code,
"Unallowed HTTP status %d (%s)" % (code, handle.msg),
handle.headers, None)
except urllib2.HTTPError, exc:
code = exc.code
if code not in allowed:
raise
trace("Code %(code)d OK for URL '%(url)s'", url=url, code=code)
def _get_host_argument(fqdn):
""" We have to jump through several hoops to get to our config section,
which in turn is the only place where the host argument is available.
"""
import __main__ as dput # if only we would get passed our config section...
config = dput.config # pylint: disable=no-member
result = ""
for section in config.sections():
if (config.has_option(section, "fqdn")
and config.get(section, "fqdn") == fqdn
and config.has_option(section, section)):
result = config.get(section, section)
return result
def _get_config_data(fqdn):
"""Get configuration section for the chosen host, and CLI host parameters."""
# Without the patch applied, fall back to ugly hacks
if not upload.extended_info:
try:
caller = sys._getframe(2) # pylint: disable=protected-access
except AttributeError:
pass # somehow not CPython
else:
config = caller.f_globals.get("config")
host = caller.f_locals.get("host")
del caller
if config and host:
upload.extended_info = dict(config=config, host=host)
if upload.extended_info:
host_config = dict(upload.extended_info["config"].items(upload.extended_info["host"]))
host_argument = host_config.get(upload.extended_info["host"], "")
else:
host_config = {}
host_argument = _get_host_argument(fqdn)
log("WARN: Extended host configuration not available!")
# Parse "host:key=val;..." argument from command line into a dict
cli_params = dict(cgi.parse_qsl(host_argument.replace(',', ';'), keep_blank_values=True))
return host_config, cli_params
def upload(fqdn, login, incoming, files_to_upload, # pylint: disable=too-many-arguments
debug, dummy, progress=None):
"""Upload the files via WebDAV."""
assert sys.version_info >= (2, 5), "Your snake is a rotting corpse (Python 2.5+ required)"
trace.debug = bool(debug)
try:
host_config, cli_params = _get_config_data(fqdn)
login = _resolve_credentials(fqdn, login)
# Handle .changes file
changes_file = [i for i in files_to_upload if i.endswith(".changes")]
if not changes_file:
log("WARN: No changes file found in %(n)d files to upload", n=len(files_to_upload))
changes_file = None
else:
if len(changes_file) > 1:
log("WARN: More than one changes file found in %(n)d files to upload,"
" taking the 1st:\n %(changes)s",
n=len(files_to_upload), changes="\n ".join(changes_file))
changes_file = changes_file[0]
# Prepare for uploading
incoming, repo_params = _resolve_incoming(fqdn, login, incoming, changes=changes_file,
cli_params=cli_params, repo_mappings=host_config.get("repo_mappings", ""))
log("INFO: Destination base URL is\n %(url)s", url=urllib2.quote(incoming, safe=":/~;#"))
repo_params.update(cli_params)
mindepth = int(repo_params.get("mindepth", "0"), 10)
overwrite = int(repo_params.get("overwrite", "0"), 10)
# TODO: Add ability to enter missing password via terminal
# auth_handler = PromptingPasswordMgr(login)
# Special handling for integration test code
if "integration-test" in cli_params:
import pprint
print "upload arguments = ",
pprint.pprint(dict((k, v) for k, v in locals().iteritems() if k in (
"fqdn", "login", "incoming", "files_to_upload", "debug", "dummy", "progress")))
print "host config = ",
pprint.pprint(host_config)
print "host arguments = ",
pprint.pprint(cli_params)
else:
# TODO: "bintray" REST API support
# POST /packages/:subject/:repo
# POST /packages/:subject/:repo/:package/versions
# Check if .changes file already exists
|
except (dputhelper.DputUploadFatalException, socket.error, urllib2.URLError, EnvironmentError), exc:
log("FATAL: %(exc)s", exc=exc)
sys.exit(1)
upload.extended_info = {}
#
# Unit Tests
#
def py25_format(template):
"""Helper for testing under Python 2.5."""
return template if sys.version_info >= (2, 6) else template.replace("{", "%(").replace("}", ")s")
class WebdavTest(unittest.TestCase): # pylint: disable=too-many-public-methods
"""Local unittests."""
DISTRO2REPO_DATA = [
("unknown", "incoming"),
("foobar", "incoming"),
("unstable", "snapshots"),
("snapshots", "snapshots"),
("foo-experimental", "snapshots"),
("bar-experimental", "snapshots"),
]
def test_distro2repo(self):
"""Test distribution mapping."""
cfg = "snapshots unstable=snapshots *-experimental=snapshots *=incoming"
for distro, repo in self.DISTRO2REPO_DATA:
result = _distro2repo(distro, cfg)
self.assertEquals(result, repo)
def test_resolve_incoming(self):
"""Test URL resolving."""
result, params = _resolve_incoming("repo.example.com:80", "", "incoming")
self.assertEquals(result, "http://repo.example.com:80/incoming/")
self.assertEquals(params, {})
result, _ = _resolve_incoming("repo.example.com:80", "", "https:///incoming/")
self.assertEquals(result, "https://repo.example.com:80/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", "//explicit/incoming/")
self.assertEquals(result, "http://explicit/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", py25_format("//{fqdn}/incoming/"))
self.assertEquals(result, "http://repo.example.com:80/incoming/")
_, params = _resolve_incoming("", "", "incoming#a=1&b=c")
self.assertEquals(params, dict(a="1", b="c"))
result, _ = _resolve_incoming("repo.example.com:80", "johndoe", py25_format("incoming/{loginuser}"))
self.assertEquals(result, "http://repo.example.com:80/incoming/johndoe/")
# Unsupported URL scheme
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming, "", "", "file:///incoming/")
# Unknown key
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming,
"", "", py25_format("http://example.com/incoming/{not_defined_ever}/"))
if __name__ == "__main__":
print("artifactory webdav plugin tests")
unittest.main()
| if not overwrite and changes_file:
try:
_check_url(_file_url(changes_file, incoming), [404])
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Overwriting existing changes at '%s' not allowed: %s" % (
_file_url(changes_file, incoming), exc))
# Check for existence of target path with minimal depth
if mindepth:
try:
_check_url(incoming, range(200, 300), mindepth=mindepth)
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Required repository path '%s' doesn't exist: %s" % (
exc.filename, exc))
# Upload the files in the given order
for filepath in files_to_upload:
if "simulate" in cli_params:
log("WOULD upload '%(filename)s'", filename=os.path.basename(filepath))
else:
_dav_put(filepath, incoming, login, progress) | conditional_block |
webdav.py | # -*- coding: utf-8 -*-
# pylint: disable=locally-disabled, star-args
""" WebDAV upload method for dput.
Install to "/usr/share/dput/webdav.py".
"""
from __future__ import with_statement
import re
import os
import sys
import cgi
import netrc
import socket
import fnmatch
import getpass
import httplib
import urllib2
import urlparse
import unittest
from contextlib import closing
from email import parser as rfc2822_parser
try:
import dputhelper
except ImportError:
sys.path.insert(0, "/usr/share/dput/helper")
import dputhelper
# Block size for upload streaming
CHUNK_SIZE = 16 * 1024
def trace(msg, **kwargs):
"""Emit log traces in debug mode."""
if trace.debug:
print("D: webdav: " + (msg % kwargs))
trace.debug = False
def log(msg, **kwargs):
"""Emit log message to stderr."""
sys.stdout.flush()
sys.stderr.write("webdav: " + (msg % kwargs) + "\n")
sys.stderr.flush()
def _resolve_credentials(fqdn, login):
"""Look up special forms of credential references."""
result = login
if "$" in result:
result = os.path.expandvars(result)
if result.startswith("netrc:"):
result = result.split(':', 1)[1]
if result:
result = os.path.abspath(os.path.expanduser(result))
accounts = netrc.netrc(result or None)
account = accounts.authenticators(fqdn)
if not account or not(account[0] or account[1]):
raise dputhelper.DputUploadFatalException("Cannot find account for host %s in %s netrc file" % (
fqdn, result or "default"))
# account is (login, account, password)
user, pwd = account[0] or account[1], account[2] or ""
result = "%s:%s" % (user, pwd)
else:
if result.startswith("file:"):
result = os.path.abspath(os.path.expanduser(result.split(':', 1)[1]))
with closing(open(result, "r")) as handle:
result = handle.read().strip()
try:
user, pwd = result.split(':', 1)
except ValueError:
user, pwd = result, ""
trace("Resolved login credentials to %(user)s:%(pwd)s", user=user, pwd='*' * len(pwd))
return result
class PromptingPasswordMgr(urllib2.HTTPPasswordMgr):
""" Custom password manager that prompts for a password once, if none is available otherwise.
Based on code in dput 0.9.6 (http method).
"""
def __init__(self, login):
urllib2.HTTPPasswordMgr.__init__(self)
self.login = login
def find_user_password(self, realm, authuri):
"""Prompt for a password once and remember it, unless already provided in the configuration."""
authuri = self.reduce_uri(authuri)[0]
authinfo = urllib2.HTTPPasswordMgr.find_user_password(self, realm, authuri)
if authinfo == (None, None):
credentials = self.login
if ':' in credentials:
authinfo = credentials.split(':', 1)
else:
password = getpass.getpass(" Password for %s:" % realm)
self.add_password(realm, authuri, credentials, password)
authinfo = credentials, password
return authinfo
def _distro2repo(distro, repo_mappings):
"""Map distribution names to repo names according to config settings."""
# Parse the mapping config
mappings = [(i.split('=', 1) if '=' in i else (i, i)) for i in repo_mappings.split()]
# Try to find a match
result = distro
for pattern, target in mappings:
if fnmatch.fnmatchcase(distro.lower(), pattern.lower()):
result = target
break
trace("Mapped distro '%(distro)s' to '%(repo)s'", distro=distro, repo=result)
return result
def _resolve_incoming(fqdn, login, incoming, changes=None, cli_params=None, repo_mappings=""):
"""Resolve the given `incoming` value to a working URL."""
# Build fully qualified URL
scheme, netloc, path, params, query, anchor = urlparse.urlparse(incoming, scheme="http", allow_fragments=True)
if scheme not in ("http", "https"):
raise dputhelper.DputUploadFatalException("Unsupported URL scheme '%s'" % scheme)
url = urlparse.urlunparse((scheme, netloc or fqdn, path.rstrip('/') + '/', params, query, None))
# Parse anchor to parameters
url_params = dict(cgi.parse_qsl(anchor or '', keep_blank_values=True))
# Read changes from stream or file
pkgdata = {}
if changes:
try:
changes.read # pylint: disable=maybe-no-member
except AttributeError: | if changes.startswith("-----BEGIN PGP SIGNED MESSAGE-----"):
# Let someone else check this, we don't care a bit; gimme the data already
trace("Extracting package metadata from PGP signed message...")
changes = changes.split("-----BEGIN PGP")[1].replace('\r', '').split('\n\n', 1)[1]
pkgdata = dict([(key.lower().replace('-', '_'), val.strip())
for key, val in rfc2822_parser.HeaderParser().parsestr(changes).items()
])
# Extend changes metadata
pkgdata["loginuser"] = login.split(':')[0]
if "version" in pkgdata:
pkgdata["upstream"] = re.split(r"[-~]", pkgdata["version"])[0]
pkgdata.update(dict(
fqdn=fqdn, repo=_distro2repo(pkgdata.get("distribution", "unknown"), repo_mappings),
))
pkgdata.update(cli_params or {}) # CLI options can overwrite anything
trace("Collected metadata:\n %(meta)s", meta="\n ".join(["%s = %s" % (key, val)
for key, val in sorted(pkgdata.items())
if '\n' not in val # only print 'simple' values
]))
# Interpolate `url`
try:
try:
url.format
except AttributeError:
url = url % pkgdata # Python 2.5
else:
url = url.format(**pkgdata) # Python 2.6+
except KeyError, exc:
raise dputhelper.DputUploadFatalException("Unknown key (%s) in incoming templates '%s'" % (exc, incoming))
trace("Resolved incoming to `%(url)s' params=%(params)r", url=url, params=url_params)
return url, url_params
def _url_connection(url, method, skip_host=False, skip_accept_encoding=False):
"""Create HTTP[S] connection for `url`."""
scheme, netloc, path, params, query, _ = urlparse.urlparse(url)
result = conn = (httplib.HTTPSConnection if scheme == "https" else httplib.HTTPConnection)(netloc)
conn.debuglevel = int(trace.debug)
try:
conn.putrequest(method, urlparse.urlunparse((None, None, path, params, query, None)), skip_host, skip_accept_encoding)
conn.putheader("User-Agent", "dput")
conn.putheader("Connection", "close")
conn = None
finally:
if conn:
conn.close() # close in case of errors
return result
def _file_url(filepath, url):
"""Return URL for the given `filepath` in the DAV collection `url`."""
basename = os.path.basename(filepath)
return urlparse.urljoin(url.rstrip('/') + '/', basename)
def _dav_put(filepath, url, login, progress=None):
"""Upload `filepath` to given `url` (referring to a WebDAV collection)."""
fileurl = _file_url(filepath, url)
sys.stdout.write(" Uploading %s: " % os.path.basename(filepath))
sys.stdout.flush()
size = os.path.getsize(filepath)
with closing(open(filepath, 'r')) as handle:
if progress:
handle = dputhelper.FileWithProgress(handle, ptype=progress, progressf=sys.stdout, size=size)
trace("HTTP PUT to URL: %s" % fileurl)
try:
conn = _url_connection(fileurl, "PUT")
try:
conn.putheader("Authorization", 'Basic %s' % login.encode('base64').replace('\n', '').strip())
conn.putheader("Content-Length", str(size))
conn.endheaders()
conn.debuglevel = 0
while True:
data = handle.read(CHUNK_SIZE)
if not data:
break
conn.send(data)
conn.debuglevel = int(trace.debug)
resp = conn.getresponse()
if 200 <= resp.status <= 299:
print " done."
#elif res.status == 401 and not auth_headers:
#print "need authentication."
#auth_headers = AuthHandlerHackAround(url, res.msg, pwman).get_auth_headers()
elif resp.status == 401:
print " unauthorized."
raise urllib2.URLError("Upload failed as unauthorized (%s),"
" maybe wrong username or password?" % resp.reason)
else:
print " failed."
raise urllib2.URLError("Unexpected HTTP status %d %s" % (resp.status, resp.reason))
resp.read() # eat response body
finally:
conn.close()
except httplib.HTTPException, exc:
raise urllib2.URLError(exc)
def _check_url(url, allowed, mindepth=0):
"""Check if HTTP GET `url` returns a status code in `allowed`."""
if mindepth:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
path = '/'.join(path.split('/')[:mindepth+1]).rstrip('/') + '/'
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
trace("Checking URL '%(url)s'", url=url)
try:
# TODO: Check requests need to use login credentials
with closing(urllib2.urlopen(url)) as handle:
handle.read()
code = handle.code
if code not in allowed:
raise urllib2.HTTPError(url, code,
"Unallowed HTTP status %d (%s)" % (code, handle.msg),
handle.headers, None)
except urllib2.HTTPError, exc:
code = exc.code
if code not in allowed:
raise
trace("Code %(code)d OK for URL '%(url)s'", url=url, code=code)
def _get_host_argument(fqdn):
""" We have to jump through several hoops to get to our config section,
which in turn is the only place where the host argument is available.
"""
import __main__ as dput # if only we would get passed our config section...
config = dput.config # pylint: disable=no-member
result = ""
for section in config.sections():
if (config.has_option(section, "fqdn")
and config.get(section, "fqdn") == fqdn
and config.has_option(section, section)):
result = config.get(section, section)
return result
def _get_config_data(fqdn):
"""Get configuration section for the chosen host, and CLI host parameters."""
# Without the patch applied, fall back to ugly hacks
if not upload.extended_info:
try:
caller = sys._getframe(2) # pylint: disable=protected-access
except AttributeError:
pass # somehow not CPython
else:
config = caller.f_globals.get("config")
host = caller.f_locals.get("host")
del caller
if config and host:
upload.extended_info = dict(config=config, host=host)
if upload.extended_info:
host_config = dict(upload.extended_info["config"].items(upload.extended_info["host"]))
host_argument = host_config.get(upload.extended_info["host"], "")
else:
host_config = {}
host_argument = _get_host_argument(fqdn)
log("WARN: Extended host configuration not available!")
# Parse "host:key=val;..." argument from command line into a dict
cli_params = dict(cgi.parse_qsl(host_argument.replace(',', ';'), keep_blank_values=True))
return host_config, cli_params
def upload(fqdn, login, incoming, files_to_upload, # pylint: disable=too-many-arguments
debug, dummy, progress=None):
"""Upload the files via WebDAV."""
assert sys.version_info >= (2, 5), "Your snake is a rotting corpse (Python 2.5+ required)"
trace.debug = bool(debug)
try:
host_config, cli_params = _get_config_data(fqdn)
login = _resolve_credentials(fqdn, login)
# Handle .changes file
changes_file = [i for i in files_to_upload if i.endswith(".changes")]
if not changes_file:
log("WARN: No changes file found in %(n)d files to upload", n=len(files_to_upload))
changes_file = None
else:
if len(changes_file) > 1:
log("WARN: More than one changes file found in %(n)d files to upload,"
" taking the 1st:\n %(changes)s",
n=len(files_to_upload), changes="\n ".join(changes_file))
changes_file = changes_file[0]
# Prepare for uploading
incoming, repo_params = _resolve_incoming(fqdn, login, incoming, changes=changes_file,
cli_params=cli_params, repo_mappings=host_config.get("repo_mappings", ""))
log("INFO: Destination base URL is\n %(url)s", url=urllib2.quote(incoming, safe=":/~;#"))
repo_params.update(cli_params)
mindepth = int(repo_params.get("mindepth", "0"), 10)
overwrite = int(repo_params.get("overwrite", "0"), 10)
# TODO: Add ability to enter missing password via terminal
# auth_handler = PromptingPasswordMgr(login)
# Special handling for integration test code
if "integration-test" in cli_params:
import pprint
print "upload arguments = ",
pprint.pprint(dict((k, v) for k, v in locals().iteritems() if k in (
"fqdn", "login", "incoming", "files_to_upload", "debug", "dummy", "progress")))
print "host config = ",
pprint.pprint(host_config)
print "host arguments = ",
pprint.pprint(cli_params)
else:
# TODO: "bintray" REST API support
# POST /packages/:subject/:repo
# POST /packages/:subject/:repo/:package/versions
# Check if .changes file already exists
if not overwrite and changes_file:
try:
_check_url(_file_url(changes_file, incoming), [404])
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Overwriting existing changes at '%s' not allowed: %s" % (
_file_url(changes_file, incoming), exc))
# Check for existence of target path with minimal depth
if mindepth:
try:
_check_url(incoming, range(200, 300), mindepth=mindepth)
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Required repository path '%s' doesn't exist: %s" % (
exc.filename, exc))
# Upload the files in the given order
for filepath in files_to_upload:
if "simulate" in cli_params:
log("WOULD upload '%(filename)s'", filename=os.path.basename(filepath))
else:
_dav_put(filepath, incoming, login, progress)
except (dputhelper.DputUploadFatalException, socket.error, urllib2.URLError, EnvironmentError), exc:
log("FATAL: %(exc)s", exc=exc)
sys.exit(1)
upload.extended_info = {}
#
# Unit Tests
#
def py25_format(template):
"""Helper for testing under Python 2.5."""
return template if sys.version_info >= (2, 6) else template.replace("{", "%(").replace("}", ")s")
class WebdavTest(unittest.TestCase): # pylint: disable=too-many-public-methods
"""Local unittests."""
DISTRO2REPO_DATA = [
("unknown", "incoming"),
("foobar", "incoming"),
("unstable", "snapshots"),
("snapshots", "snapshots"),
("foo-experimental", "snapshots"),
("bar-experimental", "snapshots"),
]
def test_distro2repo(self):
"""Test distribution mapping."""
cfg = "snapshots unstable=snapshots *-experimental=snapshots *=incoming"
for distro, repo in self.DISTRO2REPO_DATA:
result = _distro2repo(distro, cfg)
self.assertEquals(result, repo)
def test_resolve_incoming(self):
"""Test URL resolving."""
result, params = _resolve_incoming("repo.example.com:80", "", "incoming")
self.assertEquals(result, "http://repo.example.com:80/incoming/")
self.assertEquals(params, {})
result, _ = _resolve_incoming("repo.example.com:80", "", "https:///incoming/")
self.assertEquals(result, "https://repo.example.com:80/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", "//explicit/incoming/")
self.assertEquals(result, "http://explicit/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", py25_format("//{fqdn}/incoming/"))
self.assertEquals(result, "http://repo.example.com:80/incoming/")
_, params = _resolve_incoming("", "", "incoming#a=1&b=c")
self.assertEquals(params, dict(a="1", b="c"))
result, _ = _resolve_incoming("repo.example.com:80", "johndoe", py25_format("incoming/{loginuser}"))
self.assertEquals(result, "http://repo.example.com:80/incoming/johndoe/")
# Unsupported URL scheme
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming, "", "", "file:///incoming/")
# Unknown key
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming,
"", "", py25_format("http://example.com/incoming/{not_defined_ever}/"))
if __name__ == "__main__":
print("artifactory webdav plugin tests")
unittest.main() | with closing(open(changes, "r")) as handle:
changes = handle.read()
else:
changes = changes.read() # pylint: disable=maybe-no-member
| random_line_split |
webdav.py | # -*- coding: utf-8 -*-
# pylint: disable=locally-disabled, star-args
""" WebDAV upload method for dput.
Install to "/usr/share/dput/webdav.py".
"""
from __future__ import with_statement
import re
import os
import sys
import cgi
import netrc
import socket
import fnmatch
import getpass
import httplib
import urllib2
import urlparse
import unittest
from contextlib import closing
from email import parser as rfc2822_parser
try:
import dputhelper
except ImportError:
sys.path.insert(0, "/usr/share/dput/helper")
import dputhelper
# Block size for upload streaming
CHUNK_SIZE = 16 * 1024
def trace(msg, **kwargs):
"""Emit log traces in debug mode."""
if trace.debug:
print("D: webdav: " + (msg % kwargs))
trace.debug = False
def log(msg, **kwargs):
"""Emit log message to stderr."""
sys.stdout.flush()
sys.stderr.write("webdav: " + (msg % kwargs) + "\n")
sys.stderr.flush()
def _resolve_credentials(fqdn, login):
|
class PromptingPasswordMgr(urllib2.HTTPPasswordMgr):
""" Custom password manager that prompts for a password once, if none is available otherwise.
Based on code in dput 0.9.6 (http method).
"""
def __init__(self, login):
urllib2.HTTPPasswordMgr.__init__(self)
self.login = login
def find_user_password(self, realm, authuri):
"""Prompt for a password once and remember it, unless already provided in the configuration."""
authuri = self.reduce_uri(authuri)[0]
authinfo = urllib2.HTTPPasswordMgr.find_user_password(self, realm, authuri)
if authinfo == (None, None):
credentials = self.login
if ':' in credentials:
authinfo = credentials.split(':', 1)
else:
password = getpass.getpass(" Password for %s:" % realm)
self.add_password(realm, authuri, credentials, password)
authinfo = credentials, password
return authinfo
def _distro2repo(distro, repo_mappings):
"""Map distribution names to repo names according to config settings."""
# Parse the mapping config
mappings = [(i.split('=', 1) if '=' in i else (i, i)) for i in repo_mappings.split()]
# Try to find a match
result = distro
for pattern, target in mappings:
if fnmatch.fnmatchcase(distro.lower(), pattern.lower()):
result = target
break
trace("Mapped distro '%(distro)s' to '%(repo)s'", distro=distro, repo=result)
return result
def _resolve_incoming(fqdn, login, incoming, changes=None, cli_params=None, repo_mappings=""):
"""Resolve the given `incoming` value to a working URL."""
# Build fully qualified URL
scheme, netloc, path, params, query, anchor = urlparse.urlparse(incoming, scheme="http", allow_fragments=True)
if scheme not in ("http", "https"):
raise dputhelper.DputUploadFatalException("Unsupported URL scheme '%s'" % scheme)
url = urlparse.urlunparse((scheme, netloc or fqdn, path.rstrip('/') + '/', params, query, None))
# Parse anchor to parameters
url_params = dict(cgi.parse_qsl(anchor or '', keep_blank_values=True))
# Read changes from stream or file
pkgdata = {}
if changes:
try:
changes.read # pylint: disable=maybe-no-member
except AttributeError:
with closing(open(changes, "r")) as handle:
changes = handle.read()
else:
changes = changes.read() # pylint: disable=maybe-no-member
if changes.startswith("-----BEGIN PGP SIGNED MESSAGE-----"):
# Let someone else check this, we don't care a bit; gimme the data already
trace("Extracting package metadata from PGP signed message...")
changes = changes.split("-----BEGIN PGP")[1].replace('\r', '').split('\n\n', 1)[1]
pkgdata = dict([(key.lower().replace('-', '_'), val.strip())
for key, val in rfc2822_parser.HeaderParser().parsestr(changes).items()
])
# Extend changes metadata
pkgdata["loginuser"] = login.split(':')[0]
if "version" in pkgdata:
pkgdata["upstream"] = re.split(r"[-~]", pkgdata["version"])[0]
pkgdata.update(dict(
fqdn=fqdn, repo=_distro2repo(pkgdata.get("distribution", "unknown"), repo_mappings),
))
pkgdata.update(cli_params or {}) # CLI options can overwrite anything
trace("Collected metadata:\n %(meta)s", meta="\n ".join(["%s = %s" % (key, val)
for key, val in sorted(pkgdata.items())
if '\n' not in val # only print 'simple' values
]))
# Interpolate `url`
try:
try:
url.format
except AttributeError:
url = url % pkgdata # Python 2.5
else:
url = url.format(**pkgdata) # Python 2.6+
except KeyError, exc:
raise dputhelper.DputUploadFatalException("Unknown key (%s) in incoming templates '%s'" % (exc, incoming))
trace("Resolved incoming to `%(url)s' params=%(params)r", url=url, params=url_params)
return url, url_params
def _url_connection(url, method, skip_host=False, skip_accept_encoding=False):
"""Create HTTP[S] connection for `url`."""
scheme, netloc, path, params, query, _ = urlparse.urlparse(url)
result = conn = (httplib.HTTPSConnection if scheme == "https" else httplib.HTTPConnection)(netloc)
conn.debuglevel = int(trace.debug)
try:
conn.putrequest(method, urlparse.urlunparse((None, None, path, params, query, None)), skip_host, skip_accept_encoding)
conn.putheader("User-Agent", "dput")
conn.putheader("Connection", "close")
conn = None
finally:
if conn:
conn.close() # close in case of errors
return result
def _file_url(filepath, url):
"""Return URL for the given `filepath` in the DAV collection `url`."""
basename = os.path.basename(filepath)
return urlparse.urljoin(url.rstrip('/') + '/', basename)
def _dav_put(filepath, url, login, progress=None):
"""Upload `filepath` to given `url` (referring to a WebDAV collection)."""
fileurl = _file_url(filepath, url)
sys.stdout.write(" Uploading %s: " % os.path.basename(filepath))
sys.stdout.flush()
size = os.path.getsize(filepath)
with closing(open(filepath, 'r')) as handle:
if progress:
handle = dputhelper.FileWithProgress(handle, ptype=progress, progressf=sys.stdout, size=size)
trace("HTTP PUT to URL: %s" % fileurl)
try:
conn = _url_connection(fileurl, "PUT")
try:
conn.putheader("Authorization", 'Basic %s' % login.encode('base64').replace('\n', '').strip())
conn.putheader("Content-Length", str(size))
conn.endheaders()
conn.debuglevel = 0
while True:
data = handle.read(CHUNK_SIZE)
if not data:
break
conn.send(data)
conn.debuglevel = int(trace.debug)
resp = conn.getresponse()
if 200 <= resp.status <= 299:
print " done."
#elif res.status == 401 and not auth_headers:
#print "need authentication."
#auth_headers = AuthHandlerHackAround(url, res.msg, pwman).get_auth_headers()
elif resp.status == 401:
print " unauthorized."
raise urllib2.URLError("Upload failed as unauthorized (%s),"
" maybe wrong username or password?" % resp.reason)
else:
print " failed."
raise urllib2.URLError("Unexpected HTTP status %d %s" % (resp.status, resp.reason))
resp.read() # eat response body
finally:
conn.close()
except httplib.HTTPException, exc:
raise urllib2.URLError(exc)
def _check_url(url, allowed, mindepth=0):
"""Check if HTTP GET `url` returns a status code in `allowed`."""
if mindepth:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
path = '/'.join(path.split('/')[:mindepth+1]).rstrip('/') + '/'
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
trace("Checking URL '%(url)s'", url=url)
try:
# TODO: Check requests need to use login credentials
with closing(urllib2.urlopen(url)) as handle:
handle.read()
code = handle.code
if code not in allowed:
raise urllib2.HTTPError(url, code,
"Unallowed HTTP status %d (%s)" % (code, handle.msg),
handle.headers, None)
except urllib2.HTTPError, exc:
code = exc.code
if code not in allowed:
raise
trace("Code %(code)d OK for URL '%(url)s'", url=url, code=code)
def _get_host_argument(fqdn):
""" We have to jump through several hoops to get to our config section,
which in turn is the only place where the host argument is available.
"""
import __main__ as dput # if only we would get passed our config section...
config = dput.config # pylint: disable=no-member
result = ""
for section in config.sections():
if (config.has_option(section, "fqdn")
and config.get(section, "fqdn") == fqdn
and config.has_option(section, section)):
result = config.get(section, section)
return result
def _get_config_data(fqdn):
"""Get configuration section for the chosen host, and CLI host parameters."""
# Without the patch applied, fall back to ugly hacks
if not upload.extended_info:
try:
caller = sys._getframe(2) # pylint: disable=protected-access
except AttributeError:
pass # somehow not CPython
else:
config = caller.f_globals.get("config")
host = caller.f_locals.get("host")
del caller
if config and host:
upload.extended_info = dict(config=config, host=host)
if upload.extended_info:
host_config = dict(upload.extended_info["config"].items(upload.extended_info["host"]))
host_argument = host_config.get(upload.extended_info["host"], "")
else:
host_config = {}
host_argument = _get_host_argument(fqdn)
log("WARN: Extended host configuration not available!")
# Parse "host:key=val;..." argument from command line into a dict
cli_params = dict(cgi.parse_qsl(host_argument.replace(',', ';'), keep_blank_values=True))
return host_config, cli_params
def upload(fqdn, login, incoming, files_to_upload, # pylint: disable=too-many-arguments
debug, dummy, progress=None):
"""Upload the files via WebDAV."""
assert sys.version_info >= (2, 5), "Your snake is a rotting corpse (Python 2.5+ required)"
trace.debug = bool(debug)
try:
host_config, cli_params = _get_config_data(fqdn)
login = _resolve_credentials(fqdn, login)
# Handle .changes file
changes_file = [i for i in files_to_upload if i.endswith(".changes")]
if not changes_file:
log("WARN: No changes file found in %(n)d files to upload", n=len(files_to_upload))
changes_file = None
else:
if len(changes_file) > 1:
log("WARN: More than one changes file found in %(n)d files to upload,"
" taking the 1st:\n %(changes)s",
n=len(files_to_upload), changes="\n ".join(changes_file))
changes_file = changes_file[0]
# Prepare for uploading
incoming, repo_params = _resolve_incoming(fqdn, login, incoming, changes=changes_file,
cli_params=cli_params, repo_mappings=host_config.get("repo_mappings", ""))
log("INFO: Destination base URL is\n %(url)s", url=urllib2.quote(incoming, safe=":/~;#"))
repo_params.update(cli_params)
mindepth = int(repo_params.get("mindepth", "0"), 10)
overwrite = int(repo_params.get("overwrite", "0"), 10)
# TODO: Add ability to enter missing password via terminal
# auth_handler = PromptingPasswordMgr(login)
# Special handling for integration test code
if "integration-test" in cli_params:
import pprint
print "upload arguments = ",
pprint.pprint(dict((k, v) for k, v in locals().iteritems() if k in (
"fqdn", "login", "incoming", "files_to_upload", "debug", "dummy", "progress")))
print "host config = ",
pprint.pprint(host_config)
print "host arguments = ",
pprint.pprint(cli_params)
else:
# TODO: "bintray" REST API support
# POST /packages/:subject/:repo
# POST /packages/:subject/:repo/:package/versions
# Check if .changes file already exists
if not overwrite and changes_file:
try:
_check_url(_file_url(changes_file, incoming), [404])
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Overwriting existing changes at '%s' not allowed: %s" % (
_file_url(changes_file, incoming), exc))
# Check for existence of target path with minimal depth
if mindepth:
try:
_check_url(incoming, range(200, 300), mindepth=mindepth)
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Required repository path '%s' doesn't exist: %s" % (
exc.filename, exc))
# Upload the files in the given order
for filepath in files_to_upload:
if "simulate" in cli_params:
log("WOULD upload '%(filename)s'", filename=os.path.basename(filepath))
else:
_dav_put(filepath, incoming, login, progress)
except (dputhelper.DputUploadFatalException, socket.error, urllib2.URLError, EnvironmentError), exc:
log("FATAL: %(exc)s", exc=exc)
sys.exit(1)
upload.extended_info = {}
#
# Unit Tests
#
def py25_format(template):
"""Helper for testing under Python 2.5."""
return template if sys.version_info >= (2, 6) else template.replace("{", "%(").replace("}", ")s")
class WebdavTest(unittest.TestCase): # pylint: disable=too-many-public-methods
"""Local unittests."""
DISTRO2REPO_DATA = [
("unknown", "incoming"),
("foobar", "incoming"),
("unstable", "snapshots"),
("snapshots", "snapshots"),
("foo-experimental", "snapshots"),
("bar-experimental", "snapshots"),
]
def test_distro2repo(self):
"""Test distribution mapping."""
cfg = "snapshots unstable=snapshots *-experimental=snapshots *=incoming"
for distro, repo in self.DISTRO2REPO_DATA:
result = _distro2repo(distro, cfg)
self.assertEquals(result, repo)
def test_resolve_incoming(self):
"""Test URL resolving."""
result, params = _resolve_incoming("repo.example.com:80", "", "incoming")
self.assertEquals(result, "http://repo.example.com:80/incoming/")
self.assertEquals(params, {})
result, _ = _resolve_incoming("repo.example.com:80", "", "https:///incoming/")
self.assertEquals(result, "https://repo.example.com:80/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", "//explicit/incoming/")
self.assertEquals(result, "http://explicit/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", py25_format("//{fqdn}/incoming/"))
self.assertEquals(result, "http://repo.example.com:80/incoming/")
_, params = _resolve_incoming("", "", "incoming#a=1&b=c")
self.assertEquals(params, dict(a="1", b="c"))
result, _ = _resolve_incoming("repo.example.com:80", "johndoe", py25_format("incoming/{loginuser}"))
self.assertEquals(result, "http://repo.example.com:80/incoming/johndoe/")
# Unsupported URL scheme
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming, "", "", "file:///incoming/")
# Unknown key
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming,
"", "", py25_format("http://example.com/incoming/{not_defined_ever}/"))
if __name__ == "__main__":
print("artifactory webdav plugin tests")
unittest.main()
| """Look up special forms of credential references."""
result = login
if "$" in result:
result = os.path.expandvars(result)
if result.startswith("netrc:"):
result = result.split(':', 1)[1]
if result:
result = os.path.abspath(os.path.expanduser(result))
accounts = netrc.netrc(result or None)
account = accounts.authenticators(fqdn)
if not account or not(account[0] or account[1]):
raise dputhelper.DputUploadFatalException("Cannot find account for host %s in %s netrc file" % (
fqdn, result or "default"))
# account is (login, account, password)
user, pwd = account[0] or account[1], account[2] or ""
result = "%s:%s" % (user, pwd)
else:
if result.startswith("file:"):
result = os.path.abspath(os.path.expanduser(result.split(':', 1)[1]))
with closing(open(result, "r")) as handle:
result = handle.read().strip()
try:
user, pwd = result.split(':', 1)
except ValueError:
user, pwd = result, ""
trace("Resolved login credentials to %(user)s:%(pwd)s", user=user, pwd='*' * len(pwd))
return result | identifier_body |
webdav.py | # -*- coding: utf-8 -*-
# pylint: disable=locally-disabled, star-args
""" WebDAV upload method for dput.
Install to "/usr/share/dput/webdav.py".
"""
from __future__ import with_statement
import re
import os
import sys
import cgi
import netrc
import socket
import fnmatch
import getpass
import httplib
import urllib2
import urlparse
import unittest
from contextlib import closing
from email import parser as rfc2822_parser
try:
import dputhelper
except ImportError:
sys.path.insert(0, "/usr/share/dput/helper")
import dputhelper
# Block size for upload streaming
CHUNK_SIZE = 16 * 1024
def | (msg, **kwargs):
"""Emit log traces in debug mode."""
if trace.debug:
print("D: webdav: " + (msg % kwargs))
trace.debug = False
def log(msg, **kwargs):
"""Emit log message to stderr."""
sys.stdout.flush()
sys.stderr.write("webdav: " + (msg % kwargs) + "\n")
sys.stderr.flush()
def _resolve_credentials(fqdn, login):
"""Look up special forms of credential references."""
result = login
if "$" in result:
result = os.path.expandvars(result)
if result.startswith("netrc:"):
result = result.split(':', 1)[1]
if result:
result = os.path.abspath(os.path.expanduser(result))
accounts = netrc.netrc(result or None)
account = accounts.authenticators(fqdn)
if not account or not(account[0] or account[1]):
raise dputhelper.DputUploadFatalException("Cannot find account for host %s in %s netrc file" % (
fqdn, result or "default"))
# account is (login, account, password)
user, pwd = account[0] or account[1], account[2] or ""
result = "%s:%s" % (user, pwd)
else:
if result.startswith("file:"):
result = os.path.abspath(os.path.expanduser(result.split(':', 1)[1]))
with closing(open(result, "r")) as handle:
result = handle.read().strip()
try:
user, pwd = result.split(':', 1)
except ValueError:
user, pwd = result, ""
trace("Resolved login credentials to %(user)s:%(pwd)s", user=user, pwd='*' * len(pwd))
return result
class PromptingPasswordMgr(urllib2.HTTPPasswordMgr):
""" Custom password manager that prompts for a password once, if none is available otherwise.
Based on code in dput 0.9.6 (http method).
"""
def __init__(self, login):
urllib2.HTTPPasswordMgr.__init__(self)
self.login = login
def find_user_password(self, realm, authuri):
"""Prompt for a password once and remember it, unless already provided in the configuration."""
authuri = self.reduce_uri(authuri)[0]
authinfo = urllib2.HTTPPasswordMgr.find_user_password(self, realm, authuri)
if authinfo == (None, None):
credentials = self.login
if ':' in credentials:
authinfo = credentials.split(':', 1)
else:
password = getpass.getpass(" Password for %s:" % realm)
self.add_password(realm, authuri, credentials, password)
authinfo = credentials, password
return authinfo
def _distro2repo(distro, repo_mappings):
"""Map distribution names to repo names according to config settings."""
# Parse the mapping config
mappings = [(i.split('=', 1) if '=' in i else (i, i)) for i in repo_mappings.split()]
# Try to find a match
result = distro
for pattern, target in mappings:
if fnmatch.fnmatchcase(distro.lower(), pattern.lower()):
result = target
break
trace("Mapped distro '%(distro)s' to '%(repo)s'", distro=distro, repo=result)
return result
def _resolve_incoming(fqdn, login, incoming, changes=None, cli_params=None, repo_mappings=""):
"""Resolve the given `incoming` value to a working URL."""
# Build fully qualified URL
scheme, netloc, path, params, query, anchor = urlparse.urlparse(incoming, scheme="http", allow_fragments=True)
if scheme not in ("http", "https"):
raise dputhelper.DputUploadFatalException("Unsupported URL scheme '%s'" % scheme)
url = urlparse.urlunparse((scheme, netloc or fqdn, path.rstrip('/') + '/', params, query, None))
# Parse anchor to parameters
url_params = dict(cgi.parse_qsl(anchor or '', keep_blank_values=True))
# Read changes from stream or file
pkgdata = {}
if changes:
try:
changes.read # pylint: disable=maybe-no-member
except AttributeError:
with closing(open(changes, "r")) as handle:
changes = handle.read()
else:
changes = changes.read() # pylint: disable=maybe-no-member
if changes.startswith("-----BEGIN PGP SIGNED MESSAGE-----"):
# Let someone else check this, we don't care a bit; gimme the data already
trace("Extracting package metadata from PGP signed message...")
changes = changes.split("-----BEGIN PGP")[1].replace('\r', '').split('\n\n', 1)[1]
pkgdata = dict([(key.lower().replace('-', '_'), val.strip())
for key, val in rfc2822_parser.HeaderParser().parsestr(changes).items()
])
# Extend changes metadata
pkgdata["loginuser"] = login.split(':')[0]
if "version" in pkgdata:
pkgdata["upstream"] = re.split(r"[-~]", pkgdata["version"])[0]
pkgdata.update(dict(
fqdn=fqdn, repo=_distro2repo(pkgdata.get("distribution", "unknown"), repo_mappings),
))
pkgdata.update(cli_params or {}) # CLI options can overwrite anything
trace("Collected metadata:\n %(meta)s", meta="\n ".join(["%s = %s" % (key, val)
for key, val in sorted(pkgdata.items())
if '\n' not in val # only print 'simple' values
]))
# Interpolate `url`
try:
try:
url.format
except AttributeError:
url = url % pkgdata # Python 2.5
else:
url = url.format(**pkgdata) # Python 2.6+
except KeyError, exc:
raise dputhelper.DputUploadFatalException("Unknown key (%s) in incoming templates '%s'" % (exc, incoming))
trace("Resolved incoming to `%(url)s' params=%(params)r", url=url, params=url_params)
return url, url_params
def _url_connection(url, method, skip_host=False, skip_accept_encoding=False):
"""Create HTTP[S] connection for `url`."""
scheme, netloc, path, params, query, _ = urlparse.urlparse(url)
result = conn = (httplib.HTTPSConnection if scheme == "https" else httplib.HTTPConnection)(netloc)
conn.debuglevel = int(trace.debug)
try:
conn.putrequest(method, urlparse.urlunparse((None, None, path, params, query, None)), skip_host, skip_accept_encoding)
conn.putheader("User-Agent", "dput")
conn.putheader("Connection", "close")
conn = None
finally:
if conn:
conn.close() # close in case of errors
return result
def _file_url(filepath, url):
"""Return URL for the given `filepath` in the DAV collection `url`."""
basename = os.path.basename(filepath)
return urlparse.urljoin(url.rstrip('/') + '/', basename)
def _dav_put(filepath, url, login, progress=None):
"""Upload `filepath` to given `url` (referring to a WebDAV collection)."""
fileurl = _file_url(filepath, url)
sys.stdout.write(" Uploading %s: " % os.path.basename(filepath))
sys.stdout.flush()
size = os.path.getsize(filepath)
with closing(open(filepath, 'r')) as handle:
if progress:
handle = dputhelper.FileWithProgress(handle, ptype=progress, progressf=sys.stdout, size=size)
trace("HTTP PUT to URL: %s" % fileurl)
try:
conn = _url_connection(fileurl, "PUT")
try:
conn.putheader("Authorization", 'Basic %s' % login.encode('base64').replace('\n', '').strip())
conn.putheader("Content-Length", str(size))
conn.endheaders()
conn.debuglevel = 0
while True:
data = handle.read(CHUNK_SIZE)
if not data:
break
conn.send(data)
conn.debuglevel = int(trace.debug)
resp = conn.getresponse()
if 200 <= resp.status <= 299:
print " done."
#elif res.status == 401 and not auth_headers:
#print "need authentication."
#auth_headers = AuthHandlerHackAround(url, res.msg, pwman).get_auth_headers()
elif resp.status == 401:
print " unauthorized."
raise urllib2.URLError("Upload failed as unauthorized (%s),"
" maybe wrong username or password?" % resp.reason)
else:
print " failed."
raise urllib2.URLError("Unexpected HTTP status %d %s" % (resp.status, resp.reason))
resp.read() # eat response body
finally:
conn.close()
except httplib.HTTPException, exc:
raise urllib2.URLError(exc)
def _check_url(url, allowed, mindepth=0):
"""Check if HTTP GET `url` returns a status code in `allowed`."""
if mindepth:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
path = '/'.join(path.split('/')[:mindepth+1]).rstrip('/') + '/'
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
trace("Checking URL '%(url)s'", url=url)
try:
# TODO: Check requests need to use login credentials
with closing(urllib2.urlopen(url)) as handle:
handle.read()
code = handle.code
if code not in allowed:
raise urllib2.HTTPError(url, code,
"Unallowed HTTP status %d (%s)" % (code, handle.msg),
handle.headers, None)
except urllib2.HTTPError, exc:
code = exc.code
if code not in allowed:
raise
trace("Code %(code)d OK for URL '%(url)s'", url=url, code=code)
def _get_host_argument(fqdn):
""" We have to jump through several hoops to get to our config section,
which in turn is the only place where the host argument is available.
"""
import __main__ as dput # if only we would get passed our config section...
config = dput.config # pylint: disable=no-member
result = ""
for section in config.sections():
if (config.has_option(section, "fqdn")
and config.get(section, "fqdn") == fqdn
and config.has_option(section, section)):
result = config.get(section, section)
return result
def _get_config_data(fqdn):
"""Get configuration section for the chosen host, and CLI host parameters."""
# Without the patch applied, fall back to ugly hacks
if not upload.extended_info:
try:
caller = sys._getframe(2) # pylint: disable=protected-access
except AttributeError:
pass # somehow not CPython
else:
config = caller.f_globals.get("config")
host = caller.f_locals.get("host")
del caller
if config and host:
upload.extended_info = dict(config=config, host=host)
if upload.extended_info:
host_config = dict(upload.extended_info["config"].items(upload.extended_info["host"]))
host_argument = host_config.get(upload.extended_info["host"], "")
else:
host_config = {}
host_argument = _get_host_argument(fqdn)
log("WARN: Extended host configuration not available!")
# Parse "host:key=val;..." argument from command line into a dict
cli_params = dict(cgi.parse_qsl(host_argument.replace(',', ';'), keep_blank_values=True))
return host_config, cli_params
def upload(fqdn, login, incoming, files_to_upload, # pylint: disable=too-many-arguments
debug, dummy, progress=None):
"""Upload the files via WebDAV."""
assert sys.version_info >= (2, 5), "Your snake is a rotting corpse (Python 2.5+ required)"
trace.debug = bool(debug)
try:
host_config, cli_params = _get_config_data(fqdn)
login = _resolve_credentials(fqdn, login)
# Handle .changes file
changes_file = [i for i in files_to_upload if i.endswith(".changes")]
if not changes_file:
log("WARN: No changes file found in %(n)d files to upload", n=len(files_to_upload))
changes_file = None
else:
if len(changes_file) > 1:
log("WARN: More than one changes file found in %(n)d files to upload,"
" taking the 1st:\n %(changes)s",
n=len(files_to_upload), changes="\n ".join(changes_file))
changes_file = changes_file[0]
# Prepare for uploading
incoming, repo_params = _resolve_incoming(fqdn, login, incoming, changes=changes_file,
cli_params=cli_params, repo_mappings=host_config.get("repo_mappings", ""))
log("INFO: Destination base URL is\n %(url)s", url=urllib2.quote(incoming, safe=":/~;#"))
repo_params.update(cli_params)
mindepth = int(repo_params.get("mindepth", "0"), 10)
overwrite = int(repo_params.get("overwrite", "0"), 10)
# TODO: Add ability to enter missing password via terminal
# auth_handler = PromptingPasswordMgr(login)
# Special handling for integration test code
if "integration-test" in cli_params:
import pprint
print "upload arguments = ",
pprint.pprint(dict((k, v) for k, v in locals().iteritems() if k in (
"fqdn", "login", "incoming", "files_to_upload", "debug", "dummy", "progress")))
print "host config = ",
pprint.pprint(host_config)
print "host arguments = ",
pprint.pprint(cli_params)
else:
# TODO: "bintray" REST API support
# POST /packages/:subject/:repo
# POST /packages/:subject/:repo/:package/versions
# Check if .changes file already exists
if not overwrite and changes_file:
try:
_check_url(_file_url(changes_file, incoming), [404])
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Overwriting existing changes at '%s' not allowed: %s" % (
_file_url(changes_file, incoming), exc))
# Check for existence of target path with minimal depth
if mindepth:
try:
_check_url(incoming, range(200, 300), mindepth=mindepth)
except urllib2.HTTPError, exc:
raise dputhelper.DputUploadFatalException("Required repository path '%s' doesn't exist: %s" % (
exc.filename, exc))
# Upload the files in the given order
for filepath in files_to_upload:
if "simulate" in cli_params:
log("WOULD upload '%(filename)s'", filename=os.path.basename(filepath))
else:
_dav_put(filepath, incoming, login, progress)
except (dputhelper.DputUploadFatalException, socket.error, urllib2.URLError, EnvironmentError), exc:
log("FATAL: %(exc)s", exc=exc)
sys.exit(1)
upload.extended_info = {}
#
# Unit Tests
#
def py25_format(template):
"""Helper for testing under Python 2.5."""
return template if sys.version_info >= (2, 6) else template.replace("{", "%(").replace("}", ")s")
class WebdavTest(unittest.TestCase): # pylint: disable=too-many-public-methods
"""Local unittests."""
DISTRO2REPO_DATA = [
("unknown", "incoming"),
("foobar", "incoming"),
("unstable", "snapshots"),
("snapshots", "snapshots"),
("foo-experimental", "snapshots"),
("bar-experimental", "snapshots"),
]
def test_distro2repo(self):
"""Test distribution mapping."""
cfg = "snapshots unstable=snapshots *-experimental=snapshots *=incoming"
for distro, repo in self.DISTRO2REPO_DATA:
result = _distro2repo(distro, cfg)
self.assertEquals(result, repo)
def test_resolve_incoming(self):
"""Test URL resolving."""
result, params = _resolve_incoming("repo.example.com:80", "", "incoming")
self.assertEquals(result, "http://repo.example.com:80/incoming/")
self.assertEquals(params, {})
result, _ = _resolve_incoming("repo.example.com:80", "", "https:///incoming/")
self.assertEquals(result, "https://repo.example.com:80/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", "//explicit/incoming/")
self.assertEquals(result, "http://explicit/incoming/")
result, _ = _resolve_incoming("repo.example.com:80", "", py25_format("//{fqdn}/incoming/"))
self.assertEquals(result, "http://repo.example.com:80/incoming/")
_, params = _resolve_incoming("", "", "incoming#a=1&b=c")
self.assertEquals(params, dict(a="1", b="c"))
result, _ = _resolve_incoming("repo.example.com:80", "johndoe", py25_format("incoming/{loginuser}"))
self.assertEquals(result, "http://repo.example.com:80/incoming/johndoe/")
# Unsupported URL scheme
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming, "", "", "file:///incoming/")
# Unknown key
self.assertRaises(dputhelper.DputUploadFatalException, _resolve_incoming,
"", "", py25_format("http://example.com/incoming/{not_defined_ever}/"))
if __name__ == "__main__":
print("artifactory webdav plugin tests")
unittest.main()
| trace | identifier_name |
agent.py | #-*- coding: utf-8 -*-
'''
Created on 24 дек. 20%0
@author: ivan
'''
import random
all_agents = """
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1 | Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)
Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)
"""
def get_ranmom_agent():
agents = None
for i in xrange(10):
agents = all_agents.replace(str(i), str(random.randint(0, 10)))
return agents.splitlines()[random.randint(1, 10)] | Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1 | random_line_split |
agent.py | #-*- coding: utf-8 -*-
'''
Created on 24 дек. 20%0
@author: ivan
'''
import random
all_agents = """
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)
Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)
"""
def get_ranmom_agent():
agents = None
for i in xrange(10):
age | return agents.splitlines()[random.randint(1, 10)]
| nts = all_agents.replace(str(i), str(random.randint(0, 10)))
| conditional_block |
agent.py | #-*- coding: utf-8 -*-
'''
Created on 24 дек. 20%0
@author: ivan
'''
import random
all_agents = """
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)
Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)
"""
def get_ranmom_agent():
age | nts = None
for i in xrange(10):
agents = all_agents.replace(str(i), str(random.randint(0, 10)))
return agents.splitlines()[random.randint(1, 10)]
| identifier_body | |
agent.py | #-*- coding: utf-8 -*-
'''
Created on 24 дек. 20%0
@author: ivan
'''
import random
all_agents = """
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)
Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)
"""
def get |
agents = None
for i in xrange(10):
agents = all_agents.replace(str(i), str(random.randint(0, 10)))
return agents.splitlines()[random.randint(1, 10)]
| _ranmom_agent(): | identifier_name |
__init__.py | # The MIT License (MIT)
#
# Copyright (c) 2013 Numenta, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from cept import Cept | from version import version as __version__ | random_line_split | |
socketio-jwt-tests.ts | import * as fs from 'fs';
import * as http from 'http';
import * as SocketIo from 'socket.io';
import { authorize, JwtSecretFuncCallback } from 'socketio-jwt';
const app = http.createServer((req: any, rsp: any) => {
fs.readFile(__dirname + '/index.html',
(err: Error | null, data: any) => {
if (err) |
rsp.writeHead(200);
rsp.end(data);
});
});
const io = SocketIo(app);
// This example test code is using the Node Http Server
io.on('connection', authorize({
secret: 'Your Secret Here'
}));
io.on('authenticated', (socket: SocketIo.Socket) => {
console.log('Authenticated!');
console.log(JSON.stringify((socket as any).decoded_token));
});
const secrets: any = {
user1: 'secret 1',
user2: 'secret 2'
};
// Assume a claim name of userId
function secretFunc(request: any, payload: any, callback: JwtSecretFuncCallback): void {
callback(null, secrets[payload.userId]);
}
// This example test code provides a callback function to get the secret
io.on('connection', authorize({
secret: secretFunc
}));
| {
rsp.writeHead(500);
return rsp.end('Error loading index.html');
} | conditional_block |
socketio-jwt-tests.ts | import * as fs from 'fs';
import * as http from 'http';
import * as SocketIo from 'socket.io';
import { authorize, JwtSecretFuncCallback } from 'socketio-jwt';
const app = http.createServer((req: any, rsp: any) => {
fs.readFile(__dirname + '/index.html',
(err: Error | null, data: any) => {
if (err) {
rsp.writeHead(500);
return rsp.end('Error loading index.html');
}
rsp.writeHead(200);
rsp.end(data);
});
});
const io = SocketIo(app);
// This example test code is using the Node Http Server
io.on('connection', authorize({
secret: 'Your Secret Here'
}));
io.on('authenticated', (socket: SocketIo.Socket) => {
console.log('Authenticated!');
console.log(JSON.stringify((socket as any).decoded_token));
});
const secrets: any = {
user1: 'secret 1',
user2: 'secret 2'
};
// Assume a claim name of userId
function | (request: any, payload: any, callback: JwtSecretFuncCallback): void {
callback(null, secrets[payload.userId]);
}
// This example test code provides a callback function to get the secret
io.on('connection', authorize({
secret: secretFunc
}));
| secretFunc | identifier_name |
socketio-jwt-tests.ts | import * as fs from 'fs';
import * as http from 'http';
import * as SocketIo from 'socket.io';
import { authorize, JwtSecretFuncCallback } from 'socketio-jwt';
const app = http.createServer((req: any, rsp: any) => {
fs.readFile(__dirname + '/index.html',
(err: Error | null, data: any) => {
if (err) {
rsp.writeHead(500);
return rsp.end('Error loading index.html');
}
rsp.writeHead(200);
rsp.end(data);
});
});
const io = SocketIo(app);
// This example test code is using the Node Http Server
io.on('connection', authorize({
secret: 'Your Secret Here'
}));
io.on('authenticated', (socket: SocketIo.Socket) => {
console.log('Authenticated!');
console.log(JSON.stringify((socket as any).decoded_token));
});
const secrets: any = {
user1: 'secret 1',
user2: 'secret 2'
};
// Assume a claim name of userId
function secretFunc(request: any, payload: any, callback: JwtSecretFuncCallback): void |
// This example test code provides a callback function to get the secret
io.on('connection', authorize({
secret: secretFunc
}));
| {
callback(null, secrets[payload.userId]);
} | identifier_body |
socketio-jwt-tests.ts | import * as fs from 'fs';
import * as http from 'http';
import * as SocketIo from 'socket.io';
import { authorize, JwtSecretFuncCallback } from 'socketio-jwt';
const app = http.createServer((req: any, rsp: any) => {
fs.readFile(__dirname + '/index.html',
(err: Error | null, data: any) => {
if (err) {
rsp.writeHead(500);
return rsp.end('Error loading index.html');
}
rsp.writeHead(200);
rsp.end(data);
});
});
const io = SocketIo(app);
// This example test code is using the Node Http Server
io.on('connection', authorize({
secret: 'Your Secret Here'
}));
| console.log(JSON.stringify((socket as any).decoded_token));
});
const secrets: any = {
user1: 'secret 1',
user2: 'secret 2'
};
// Assume a claim name of userId
function secretFunc(request: any, payload: any, callback: JwtSecretFuncCallback): void {
callback(null, secrets[payload.userId]);
}
// This example test code provides a callback function to get the secret
io.on('connection', authorize({
secret: secretFunc
})); | io.on('authenticated', (socket: SocketIo.Socket) => {
console.log('Authenticated!'); | random_line_split |
region_IE.py | """Auto-generated file, do not edit by hand. IE metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
| toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='112|999', possible_number_pattern='\\d{3}', example_number='112'),
short_code=PhoneNumberDesc(national_number_pattern='112|51210|999', possible_number_pattern='\\d{3,5}', example_number='112'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
carrier_specific=PhoneNumberDesc(national_number_pattern='51210', possible_number_pattern='\\d{5}'),
short_data=True) | PHONE_METADATA_IE = PhoneMetadata(id='IE', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[159]\\d{2,4}', possible_number_pattern='\\d{3,5}'), | random_line_split |
rolling-array-test.js | import { isArray } from '@ember/array';
import { module, test } from 'qunit';
import RollingArray from 'nomad-ui/utils/classes/rolling-array';
module('Unit | Util | RollingArray', function() { | assert.deepEqual(
array,
['a', 'b', 'c'],
'additional arguments to the constructor become elements'
);
});
test('push works like Array#push', function(assert) {
const array = RollingArray(10);
const pushReturn = array.push('a');
assert.equal(
pushReturn,
array.length,
'the return value from push is equal to the return value of Array#push'
);
assert.equal(array[0], 'a', 'the arguments passed to push are appended to the array');
array.push('b', 'c', 'd');
assert.deepEqual(
array,
['a', 'b', 'c', 'd'],
'the elements already in the array are left in tact and new elements are appended'
);
});
test('when pushing past maxLength, items are removed from the head of the array', function(assert) {
const array = RollingArray(3);
const pushReturn = array.push(1, 2, 3, 4);
assert.deepEqual(
array,
[2, 3, 4],
'The first argument to push is not in the array, but the following three are'
);
assert.equal(
pushReturn,
array.length,
'The return value of push is still the array length despite more arguments than possible were provided to push'
);
});
test('when splicing past maxLength, items are removed from the head of the array', function(assert) {
const array = RollingArray(3, 'a', 'b', 'c');
array.splice(1, 0, 'z');
assert.deepEqual(
array,
['z', 'b', 'c'],
'The new element is inserted as the second element in the array and the first element is removed due to maxLength restrictions'
);
array.splice(0, 0, 'pickme');
assert.deepEqual(
array,
['z', 'b', 'c'],
'The new element never makes it into the array since it was added at the head of the array and immediately removed'
);
array.splice(0, 1, 'pickme');
assert.deepEqual(
array,
['pickme', 'b', 'c'],
'The new element makes it into the array since the previous element at the head of the array is first removed due to the second argument to splice'
);
});
test('unshift throws instead of prepending elements', function(assert) {
const array = RollingArray(5);
assert.throws(
() => {
array.unshift(1);
},
/Cannot unshift/,
'unshift is not supported, but is not undefined'
);
});
test('RollingArray is an instance of Array', function(assert) {
const array = RollingArray(5);
assert.ok(array.constructor === Array, 'The constructor is Array');
assert.ok(array instanceof Array, 'The instanceof check is true');
assert.ok(isArray(array), 'The ember isArray helper works');
});
}); | test('has a maxLength property that gets set in the constructor', function(assert) {
const array = RollingArray(10, 'a', 'b', 'c');
assert.equal(array.maxLength, 10, 'maxLength is set in the constructor'); | random_line_split |
vrp_tokens.py | #!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple VRP with special locations which need to be visited at end of the route."""
# [START import]
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
# [END import]
def create_data_model():
"""Stores the data for the problem."""
data = {}
# Special location don't consume token, while regular one consume one
data['tokens'] = [
0, # 0 depot
0, # 1 special node
0, # 2 special node
0, # 3 special node
0, # 4 special node
0, # 5 special node
-1, # 6
-1, # 7
-1, # 8
-1, # 9
-1, # 10
-1, # 11
-1, # 12
-1, # 13
-1, # 14
-1, # 15
-1, # 16
-1, # 17
-1, # 18
]
# just need to be big enough, not a limiting factor
data['vehicle_tokens'] = [20, 20, 20, 20]
data['num_vehicles'] = 4
data['depot'] = 0
return data
def print_solution(manager, routing, solution):
"""Prints solution on console."""
print(f'Objective: {solution.ObjectiveValue()}')
token_dimension = routing.GetDimensionOrDie('Token')
total_distance = 0
total_token = 0
for vehicle_id in range(manager.GetNumberOfVehicles()):
plan_output = f'Route for vehicle {vehicle_id}:\n'
index = routing.Start(vehicle_id)
total_token += solution.Value(token_dimension.CumulVar(index))
route_distance = 0
route_token = 0
while not routing.IsEnd(index):
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token}) -> '
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token})\n'
plan_output += f'Distance of the route: {route_distance}m\n'
total_distance += route_distance
print(plan_output)
print('Total distance of all routes: {}m'.format(total_distance))
print('Total token of all routes: {}'.format(total_token))
def main():
"""Solve the CVRP problem."""
# Instantiate the data problem.
data = create_data_model()
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['tokens']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
# Create and register a transit callback.
def | (from_index, to_index):
"""Returns the distance between the two nodes."""
del from_index
del to_index
return 10
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.AddDimension(
transit_callback_index,
0, # null slack
3000, # maximum distance per vehicle
True, # start cumul to zero
'distance')
distance_dimension = routing.GetDimensionOrDie('distance')
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add Token constraint.
def token_callback(from_index):
"""Returns the number of token consumed by the node."""
# Convert from routing variable Index to tokens NodeIndex.
from_node = manager.IndexToNode(from_index)
return data['tokens'][from_node]
token_callback_index = routing.RegisterUnaryTransitCallback(token_callback)
routing.AddDimensionWithVehicleCapacity(
token_callback_index,
0, # null capacity slack
data['vehicle_tokens'], # vehicle maximum tokens
False, # start cumul to zero
'Token')
# Add constraint: special node can only be visited if token remaining is zero
token_dimension = routing.GetDimensionOrDie('Token')
for node in range(1, 6):
index = manager.NodeToIndex(node)
routing.solver().Add(token_dimension.CumulVar(index) == 0)
# Instantiate route start and end times to produce feasible times.
# [START depot_start_end_times]
for i in range(manager.GetNumberOfVehicles()):
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.Start(i)))
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.End(i)))
# [END depot_start_end_times]
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
search_parameters.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameters.time_limit.FromSeconds(1)
# Solve the problem.
solution = routing.SolveWithParameters(search_parameters)
# Print solution on console.
# [START print_solution]
if solution:
print_solution(manager, routing, solution)
else:
print('No solution found !')
# [END print_solution]
if __name__ == '__main__':
main()
| distance_callback | identifier_name |
vrp_tokens.py | #!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple VRP with special locations which need to be visited at end of the route."""
# [START import]
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
# [END import]
def create_data_model():
"""Stores the data for the problem."""
data = {}
# Special location don't consume token, while regular one consume one
data['tokens'] = [
0, # 0 depot
0, # 1 special node
0, # 2 special node
0, # 3 special node
0, # 4 special node
0, # 5 special node
-1, # 6
-1, # 7
-1, # 8
-1, # 9
-1, # 10
-1, # 11
-1, # 12
-1, # 13
-1, # 14
-1, # 15
-1, # 16
-1, # 17
-1, # 18
]
# just need to be big enough, not a limiting factor
data['vehicle_tokens'] = [20, 20, 20, 20]
data['num_vehicles'] = 4
data['depot'] = 0
return data
def print_solution(manager, routing, solution):
"""Prints solution on console."""
print(f'Objective: {solution.ObjectiveValue()}')
token_dimension = routing.GetDimensionOrDie('Token')
total_distance = 0
total_token = 0
for vehicle_id in range(manager.GetNumberOfVehicles()):
|
print('Total distance of all routes: {}m'.format(total_distance))
print('Total token of all routes: {}'.format(total_token))
def main():
"""Solve the CVRP problem."""
# Instantiate the data problem.
data = create_data_model()
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['tokens']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
# Create and register a transit callback.
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
del from_index
del to_index
return 10
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.AddDimension(
transit_callback_index,
0, # null slack
3000, # maximum distance per vehicle
True, # start cumul to zero
'distance')
distance_dimension = routing.GetDimensionOrDie('distance')
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add Token constraint.
def token_callback(from_index):
"""Returns the number of token consumed by the node."""
# Convert from routing variable Index to tokens NodeIndex.
from_node = manager.IndexToNode(from_index)
return data['tokens'][from_node]
token_callback_index = routing.RegisterUnaryTransitCallback(token_callback)
routing.AddDimensionWithVehicleCapacity(
token_callback_index,
0, # null capacity slack
data['vehicle_tokens'], # vehicle maximum tokens
False, # start cumul to zero
'Token')
# Add constraint: special node can only be visited if token remaining is zero
token_dimension = routing.GetDimensionOrDie('Token')
for node in range(1, 6):
index = manager.NodeToIndex(node)
routing.solver().Add(token_dimension.CumulVar(index) == 0)
# Instantiate route start and end times to produce feasible times.
# [START depot_start_end_times]
for i in range(manager.GetNumberOfVehicles()):
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.Start(i)))
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.End(i)))
# [END depot_start_end_times]
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
search_parameters.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameters.time_limit.FromSeconds(1)
# Solve the problem.
solution = routing.SolveWithParameters(search_parameters)
# Print solution on console.
# [START print_solution]
if solution:
print_solution(manager, routing, solution)
else:
print('No solution found !')
# [END print_solution]
if __name__ == '__main__':
main()
| plan_output = f'Route for vehicle {vehicle_id}:\n'
index = routing.Start(vehicle_id)
total_token += solution.Value(token_dimension.CumulVar(index))
route_distance = 0
route_token = 0
while not routing.IsEnd(index):
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token}) -> '
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token})\n'
plan_output += f'Distance of the route: {route_distance}m\n'
total_distance += route_distance
print(plan_output) | conditional_block |
vrp_tokens.py | #
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple VRP with special locations which need to be visited at end of the route."""
# [START import]
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
# [END import]
def create_data_model():
"""Stores the data for the problem."""
data = {}
# Special location don't consume token, while regular one consume one
data['tokens'] = [
0, # 0 depot
0, # 1 special node
0, # 2 special node
0, # 3 special node
0, # 4 special node
0, # 5 special node
-1, # 6
-1, # 7
-1, # 8
-1, # 9
-1, # 10
-1, # 11
-1, # 12
-1, # 13
-1, # 14
-1, # 15
-1, # 16
-1, # 17
-1, # 18
]
# just need to be big enough, not a limiting factor
data['vehicle_tokens'] = [20, 20, 20, 20]
data['num_vehicles'] = 4
data['depot'] = 0
return data
def print_solution(manager, routing, solution):
"""Prints solution on console."""
print(f'Objective: {solution.ObjectiveValue()}')
token_dimension = routing.GetDimensionOrDie('Token')
total_distance = 0
total_token = 0
for vehicle_id in range(manager.GetNumberOfVehicles()):
plan_output = f'Route for vehicle {vehicle_id}:\n'
index = routing.Start(vehicle_id)
total_token += solution.Value(token_dimension.CumulVar(index))
route_distance = 0
route_token = 0
while not routing.IsEnd(index):
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token}) -> '
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token})\n'
plan_output += f'Distance of the route: {route_distance}m\n'
total_distance += route_distance
print(plan_output)
print('Total distance of all routes: {}m'.format(total_distance))
print('Total token of all routes: {}'.format(total_token))
def main():
"""Solve the CVRP problem."""
# Instantiate the data problem.
data = create_data_model()
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['tokens']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
# Create and register a transit callback.
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
del from_index
del to_index
return 10
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.AddDimension(
transit_callback_index,
0, # null slack
3000, # maximum distance per vehicle
True, # start cumul to zero
'distance')
distance_dimension = routing.GetDimensionOrDie('distance')
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add Token constraint.
def token_callback(from_index):
"""Returns the number of token consumed by the node."""
# Convert from routing variable Index to tokens NodeIndex.
from_node = manager.IndexToNode(from_index)
return data['tokens'][from_node]
token_callback_index = routing.RegisterUnaryTransitCallback(token_callback)
routing.AddDimensionWithVehicleCapacity(
token_callback_index,
0, # null capacity slack
data['vehicle_tokens'], # vehicle maximum tokens
False, # start cumul to zero
'Token')
# Add constraint: special node can only be visited if token remaining is zero
token_dimension = routing.GetDimensionOrDie('Token')
for node in range(1, 6):
index = manager.NodeToIndex(node)
routing.solver().Add(token_dimension.CumulVar(index) == 0)
# Instantiate route start and end times to produce feasible times.
# [START depot_start_end_times]
for i in range(manager.GetNumberOfVehicles()):
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.Start(i)))
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.End(i)))
# [END depot_start_end_times]
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
search_parameters.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameters.time_limit.FromSeconds(1)
# Solve the problem.
solution = routing.SolveWithParameters(search_parameters)
# Print solution on console.
# [START print_solution]
if solution:
print_solution(manager, routing, solution)
else:
print('No solution found !')
# [END print_solution]
if __name__ == '__main__':
main() | #!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at | random_line_split | |
vrp_tokens.py | #!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple VRP with special locations which need to be visited at end of the route."""
# [START import]
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
# [END import]
def create_data_model():
"""Stores the data for the problem."""
data = {}
# Special location don't consume token, while regular one consume one
data['tokens'] = [
0, # 0 depot
0, # 1 special node
0, # 2 special node
0, # 3 special node
0, # 4 special node
0, # 5 special node
-1, # 6
-1, # 7
-1, # 8
-1, # 9
-1, # 10
-1, # 11
-1, # 12
-1, # 13
-1, # 14
-1, # 15
-1, # 16
-1, # 17
-1, # 18
]
# just need to be big enough, not a limiting factor
data['vehicle_tokens'] = [20, 20, 20, 20]
data['num_vehicles'] = 4
data['depot'] = 0
return data
def print_solution(manager, routing, solution):
|
def main():
"""Solve the CVRP problem."""
# Instantiate the data problem.
data = create_data_model()
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['tokens']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
# Create and register a transit callback.
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
del from_index
del to_index
return 10
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.AddDimension(
transit_callback_index,
0, # null slack
3000, # maximum distance per vehicle
True, # start cumul to zero
'distance')
distance_dimension = routing.GetDimensionOrDie('distance')
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add Token constraint.
def token_callback(from_index):
"""Returns the number of token consumed by the node."""
# Convert from routing variable Index to tokens NodeIndex.
from_node = manager.IndexToNode(from_index)
return data['tokens'][from_node]
token_callback_index = routing.RegisterUnaryTransitCallback(token_callback)
routing.AddDimensionWithVehicleCapacity(
token_callback_index,
0, # null capacity slack
data['vehicle_tokens'], # vehicle maximum tokens
False, # start cumul to zero
'Token')
# Add constraint: special node can only be visited if token remaining is zero
token_dimension = routing.GetDimensionOrDie('Token')
for node in range(1, 6):
index = manager.NodeToIndex(node)
routing.solver().Add(token_dimension.CumulVar(index) == 0)
# Instantiate route start and end times to produce feasible times.
# [START depot_start_end_times]
for i in range(manager.GetNumberOfVehicles()):
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.Start(i)))
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.End(i)))
# [END depot_start_end_times]
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
search_parameters.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameters.time_limit.FromSeconds(1)
# Solve the problem.
solution = routing.SolveWithParameters(search_parameters)
# Print solution on console.
# [START print_solution]
if solution:
print_solution(manager, routing, solution)
else:
print('No solution found !')
# [END print_solution]
if __name__ == '__main__':
main()
| """Prints solution on console."""
print(f'Objective: {solution.ObjectiveValue()}')
token_dimension = routing.GetDimensionOrDie('Token')
total_distance = 0
total_token = 0
for vehicle_id in range(manager.GetNumberOfVehicles()):
plan_output = f'Route for vehicle {vehicle_id}:\n'
index = routing.Start(vehicle_id)
total_token += solution.Value(token_dimension.CumulVar(index))
route_distance = 0
route_token = 0
while not routing.IsEnd(index):
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token}) -> '
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token})\n'
plan_output += f'Distance of the route: {route_distance}m\n'
total_distance += route_distance
print(plan_output)
print('Total distance of all routes: {}m'.format(total_distance))
print('Total token of all routes: {}'.format(total_token)) | identifier_body |
settings.py | """
Django settings for BenHoboCo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
GET_SOLO_TEMPLATE_TAG_NAME = 'get_solo'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'b&r86v3qyzx=d^8p8k4$c!#imhb+jys*$g@yxz8#vt83@r-va_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True | 'cs410.cs.ualberta.ca:41011',
]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'crispy_forms',
'solo',
'core',
'south',
'images',
'posts',
'authors',
'friends',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'BenHoboCo.urls'
WSGI_APPLICATION = 'BenHoboCo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME':'helix',
'USER':'myuser',
'PASSWORD':'mypass',
'HOST':'leago.btrinh.com',
'PORT':'3306',
}
}
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_PATH = os.path.join( BASE_DIR, "static" )
STATICFILES_DIRS = (
STATIC_PATH,
)
# Templates
TEMPLATE_PATH = os.path.join( BASE_DIR, "templates")
TEMPLATE_DIRS = (
TEMPLATE_PATH,
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join( BASE_DIR, 'media' )
LOGIN_URL = '/login/' |
# NOTE: Local server has to be in the first position!
ALLOWED_HOSTS = [
'127.0.0.1:8000', | random_line_split |
tree.js | // ========================================================================
// SproutCore -- JavaScript Application Framework
// Copyright ©2006-2011, Strobe Inc. and contributors.
// Portions copyright ©2008 Apple Inc. All rights reserved.
// ========================================================================
sc_require('controllers/object');
sc_require('mixins/selection_support');
sc_require('private/tree_item_observer');
/**
@class
A TreeController manages a tree of model objects that you might want to
display in the UI using a collection view. For the most part, you should
work with a TreeController much like you would an ObjectController, except
that the TreeController will also provide an arrangedObjects property that
can be used as the content of a CollectionView.
TODO: Document More
@extends SC.ObjectController
@extends SC.SelectionSupport
@since SproutCore 1.0
*/
SC.TreeController = SC.ObjectController.extend(SC.SelectionSupport,
/** @scope SC.TreeController.prototype */ {
// ..........................................................
// PROPERTIES
//
/**
Set to YES if you want the top-level items in the tree to be displayed as
group items in the collection view.
@property {Boolean}
*/
treeItemIsGrouped: NO,
/**
If your content support expanding and collapsing of content, then set this
property to the name of the key on your model that should be used to
determine the expansion state of the item. The default is
"treeItemIsExpanded"
@property {String}
*/
treeItemIsExpandedKey: "treeItemIsExpanded",
/**
Set to the name of the property on your content object that holds the | */
treeItemChildrenKey: "treeItemChildren",
/**
Returns an SC.Array object that actually will represent the tree as a
flat array suitable for use by a CollectionView. Other than binding this
property as the content of a CollectionView, you generally should not
use this property directly. Instead, work on the tree content using the
TreeController like you would any other ObjectController.
@property {SC.Array}
*/
arrangedObjects: function() {
var ret, content = this.get('content');
if (content) {
ret = SC.TreeItemObserver.create({ item: content, delegate: this });
} else ret = null; // empty!
this._sctc_arrangedObjects = ret ;
return ret ;
}.property().cacheable(),
// ..........................................................
// PRIVATE
//
/**
@private
Manually invalidate the arrangedObjects cache so that we can teardown
any existing value. We do it via an observer so that this will fire
immediately instead of waiting on some other component to get
arrangedObjects again.
*/
_sctc_invalidateArrangedObjects: function() {
this.propertyWillChange('arrangedObjects');
var ret = this._sctc_arrangedObjects;
if (ret) ret.destroy();
this._sctc_arrangedObjects = null;
this.propertyDidChange('arrangedObjects');
}.observes('content', 'treeItemIsExpandedKey', 'treeItemChildrenKey', 'treeItemIsGrouped'),
_sctc_arrangedObjectsContentDidChange: function() {
this.updateSelectionAfterContentChange();
}.observes('*arrangedObjects.[]'),
/**
@private
Returns the first item in arrangeObjects that is not a group. This uses
a brute force approach right now; we assume you probably don't have a lot
of groups up front.
*/
firstSelectableObject: function() {
var objects = this.get('arrangedObjects'),
indexes, len, idx = 0;
if (!objects) return null; // fast track
indexes = objects.contentGroupIndexes(null, objects);
len = objects.get('length');
while(indexes.contains(idx) && (idx<len)) idx++;
return idx>=len ? null : objects.objectAt(idx);
}.property()
}); | children array for each tree node. The default is "treeItemChildren".
@property {String} | random_line_split |
tree.js | // ========================================================================
// SproutCore -- JavaScript Application Framework
// Copyright ©2006-2011, Strobe Inc. and contributors.
// Portions copyright ©2008 Apple Inc. All rights reserved.
// ========================================================================
sc_require('controllers/object');
sc_require('mixins/selection_support');
sc_require('private/tree_item_observer');
/**
@class
A TreeController manages a tree of model objects that you might want to
display in the UI using a collection view. For the most part, you should
work with a TreeController much like you would an ObjectController, except
that the TreeController will also provide an arrangedObjects property that
can be used as the content of a CollectionView.
TODO: Document More
@extends SC.ObjectController
@extends SC.SelectionSupport
@since SproutCore 1.0
*/
SC.TreeController = SC.ObjectController.extend(SC.SelectionSupport,
/** @scope SC.TreeController.prototype */ {
// ..........................................................
// PROPERTIES
//
/**
Set to YES if you want the top-level items in the tree to be displayed as
group items in the collection view.
@property {Boolean}
*/
treeItemIsGrouped: NO,
/**
If your content support expanding and collapsing of content, then set this
property to the name of the key on your model that should be used to
determine the expansion state of the item. The default is
"treeItemIsExpanded"
@property {String}
*/
treeItemIsExpandedKey: "treeItemIsExpanded",
/**
Set to the name of the property on your content object that holds the
children array for each tree node. The default is "treeItemChildren".
@property {String}
*/
treeItemChildrenKey: "treeItemChildren",
/**
Returns an SC.Array object that actually will represent the tree as a
flat array suitable for use by a CollectionView. Other than binding this
property as the content of a CollectionView, you generally should not
use this property directly. Instead, work on the tree content using the
TreeController like you would any other ObjectController.
@property {SC.Array}
*/
arrangedObjects: function() {
var ret, content = this.get('content');
if (content) {
| lse ret = null; // empty!
this._sctc_arrangedObjects = ret ;
return ret ;
}.property().cacheable(),
// ..........................................................
// PRIVATE
//
/**
@private
Manually invalidate the arrangedObjects cache so that we can teardown
any existing value. We do it via an observer so that this will fire
immediately instead of waiting on some other component to get
arrangedObjects again.
*/
_sctc_invalidateArrangedObjects: function() {
this.propertyWillChange('arrangedObjects');
var ret = this._sctc_arrangedObjects;
if (ret) ret.destroy();
this._sctc_arrangedObjects = null;
this.propertyDidChange('arrangedObjects');
}.observes('content', 'treeItemIsExpandedKey', 'treeItemChildrenKey', 'treeItemIsGrouped'),
_sctc_arrangedObjectsContentDidChange: function() {
this.updateSelectionAfterContentChange();
}.observes('*arrangedObjects.[]'),
/**
@private
Returns the first item in arrangeObjects that is not a group. This uses
a brute force approach right now; we assume you probably don't have a lot
of groups up front.
*/
firstSelectableObject: function() {
var objects = this.get('arrangedObjects'),
indexes, len, idx = 0;
if (!objects) return null; // fast track
indexes = objects.contentGroupIndexes(null, objects);
len = objects.get('length');
while(indexes.contains(idx) && (idx<len)) idx++;
return idx>=len ? null : objects.objectAt(idx);
}.property()
});
| ret = SC.TreeItemObserver.create({ item: content, delegate: this });
} e | conditional_block |
user.local.js | 'use strict';
const passport = require('passport');
//capitalize constructors as convention
const LocalStrategy = require('passport-local').Strategy;
const User = require('./user.model');
const SUCCESSFUL_LOGIN_MSG = 'Success!';
const INCORRECT_USERNAME_MSG = 'Incorrect Username or password';
const INCORRECT_PASSWORD_MSG = 'Incorrect Username or password';
passport.serializeUser((user, done) => {
done(null, user._id);
});
passport.deserializeUser(function(id, done) {
User.findById(id, done);
});
passport.use(new LocalStrategy ({
usernameField: 'email'
},
(email, password, done) => {
User.findOne({ email: email }, (err, user) => {
if (err) throw err;
if (user) {
user.authenticate(password, (err, valid) => {
if (err) throw err;
if (valid) {
done(null, user); | done();
}
});
})
); | } else {
done();
}
});
} else { | random_line_split |
user.local.js | 'use strict';
const passport = require('passport');
//capitalize constructors as convention
const LocalStrategy = require('passport-local').Strategy;
const User = require('./user.model');
const SUCCESSFUL_LOGIN_MSG = 'Success!';
const INCORRECT_USERNAME_MSG = 'Incorrect Username or password';
const INCORRECT_PASSWORD_MSG = 'Incorrect Username or password';
passport.serializeUser((user, done) => {
done(null, user._id);
});
passport.deserializeUser(function(id, done) {
User.findById(id, done);
});
passport.use(new LocalStrategy ({
usernameField: 'email'
},
(email, password, done) => {
User.findOne({ email: email }, (err, user) => {
if (err) throw err;
if (user) {
user.authenticate(password, (err, valid) => {
if (err) throw err;
if (valid) | else {
done();
}
});
} else {
done();
}
});
})
);
| {
done(null, user);
} | conditional_block |
glue.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
use app_units::Au;
use data::{NUM_THREADS, PerDocumentStyleData};
use env_logger;
use euclid::Size2D;
use gecko_bindings::bindings::{RawGeckoElementBorrowed, RawGeckoNodeBorrowed};
use gecko_bindings::bindings::{RawServoStyleSetBorrowed, RawServoStyleSetOwned, ServoNodeDataOwned};
use gecko_bindings::bindings::{RawServoStyleSetBorrowedMut, RawGeckoDocumentBorrowed};
use gecko_bindings::bindings::{RawServoStyleSheetBorrowed, ServoComputedValuesBorrowed};
use gecko_bindings::bindings::{RawServoStyleSheetStrong, ServoComputedValuesStrong};
use gecko_bindings::bindings::{ServoComputedValuesBorrowedOrNull, ServoDeclarationBlock};
use gecko_bindings::bindings::{ServoDeclarationBlockBorrowed, ServoDeclarationBlockStrong};
use gecko_bindings::bindings::{ThreadSafePrincipalHolder, ThreadSafeURIHolder, nsHTMLCSSStyleSheet};
use gecko_bindings::ptr::{GeckoArcPrincipal, GeckoArcURI};
use gecko_bindings::structs::{SheetParsingMode, nsIAtom};
use gecko_bindings::structs::ServoElementSnapshot;
use gecko_bindings::structs::nsRestyleHint;
use gecko_bindings::sugar::ownership::{FFIArcHelpers, HasArcFFI, HasBoxFFI};
use gecko_bindings::sugar::ownership::{HasFFI, HasSimpleFFI, Strong};
use gecko_string_cache::Atom;
use snapshot::GeckoElementSnapshot;
use std::mem::transmute;
use std::ptr;
use std::slice;
use std::str::from_utf8_unchecked;
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
use style::arc_ptr_eq;
use style::context::{LocalStyleContextCreationInfo, ReflowGoal, SharedStyleContext};
use style::dom::{TDocument, TElement, TNode};
use style::error_reporting::StdoutErrorReporter;
use style::gecko_selector_impl::{GeckoSelectorImpl, PseudoElement};
use style::parallel;
use style::parser::ParserContextExtraData;
use style::properties::{ComputedValues, PropertyDeclarationBlock, parse_one_declaration};
use style::selector_impl::PseudoElementCascadeType;
use style::sequential;
use style::stylesheets::{Origin, Stylesheet};
use style::timer::Timer;
use traversal::RecalcStyleOnly;
use url::Url;
use wrapper::{DUMMY_BASE_URL, GeckoDocument, GeckoElement, GeckoNode, NonOpaqueStyleData};
/*
* For Gecko->Servo function calls, we need to redeclare the same signature that was declared in
* the C header in Gecko. In order to catch accidental mismatches, we run rust-bindgen against
* those signatures as well, giving us a second declaration of all the Servo_* functions in this
* crate. If there's a mismatch, LLVM will assert and abort, which is a rather awful thing to
* depend on but good enough for our purposes.
*/
#[no_mangle]
pub extern "C" fn Servo_Initialize() -> () {
// Enable standard Rust logging.
//
// See https://doc.rust-lang.org/log/env_logger/index.html for instructions.
env_logger::init().unwrap();
// Allocate our default computed values.
unsafe { ComputedValues::initialize(); }
}
#[no_mangle]
pub extern "C" fn Servo_Shutdown() -> () {
// Destroy our default computed values.
unsafe { ComputedValues::shutdown(); }
}
fn restyle_subtree(node: GeckoNode, raw_data: RawServoStyleSetBorrowedMut) {
debug_assert!(node.is_element() || node.is_text_node());
// Force the creation of our lazily-constructed initial computed values on
// the main thread, since it's not safe to call elsewhere.
//
// FIXME(bholley): this should move into Servo_Initialize as soon as we get
// rid of the HackilyFindSomeDeviceContext stuff that happens during
// initial_values computation, since that stuff needs to be called further
// along in startup than the sensible place to call Servo_Initialize.
ComputedValues::initial_values();
// The stylist consumes stylesheets lazily.
let per_doc_data = PerDocumentStyleData::from_ffi_mut(raw_data);
per_doc_data.flush_stylesheets();
let local_context_data =
LocalStyleContextCreationInfo::new(per_doc_data.new_animations_sender.clone());
let shared_style_context = SharedStyleContext {
viewport_size: Size2D::new(Au(0), Au(0)),
screen_size_changed: false,
generation: 0,
goal: ReflowGoal::ForScriptQuery,
stylist: per_doc_data.stylist.clone(),
running_animations: per_doc_data.running_animations.clone(),
expired_animations: per_doc_data.expired_animations.clone(),
error_reporter: Box::new(StdoutErrorReporter),
local_context_creation_data: Mutex::new(local_context_data),
timer: Timer::new(),
};
// We ensure this is true before calling Servo_RestyleSubtree()
debug_assert!(node.is_dirty() || node.has_dirty_descendants());
if per_doc_data.num_threads == 1 || per_doc_data.work_queue.is_none() {
sequential::traverse_dom::<GeckoNode, RecalcStyleOnly>(node, &shared_style_context);
} else {
parallel::traverse_dom::<GeckoNode, RecalcStyleOnly>(node, &shared_style_context,
per_doc_data.work_queue.as_mut().unwrap());
}
}
#[no_mangle]
pub extern "C" fn Servo_RestyleSubtree(node: RawGeckoNodeBorrowed,
raw_data: RawServoStyleSetBorrowedMut) -> () {
let node = GeckoNode(node);
restyle_subtree(node, raw_data);
}
#[no_mangle]
pub extern "C" fn Servo_RestyleDocument(doc: RawGeckoDocumentBorrowed, raw_data: RawServoStyleSetBorrowedMut) -> () {
let document = GeckoDocument(doc);
let node = match document.root_node() {
Some(x) => x,
None => return,
};
restyle_subtree(node, raw_data);
}
#[no_mangle]
pub extern "C" fn Servo_StyleWorkerThreadCount() -> u32 {
*NUM_THREADS as u32
}
#[no_mangle]
pub extern "C" fn Servo_NodeData_Drop(data: ServoNodeDataOwned) -> () {
let _ = data.into_box::<NonOpaqueStyleData>();
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_FromUTF8Bytes(bytes: *const u8,
length: u32,
mode: SheetParsingMode,
base_bytes: *const u8,
base_length: u32,
base: *mut ThreadSafeURIHolder,
referrer: *mut ThreadSafeURIHolder,
principal: *mut ThreadSafePrincipalHolder)
-> RawServoStyleSheetStrong {
let input = unsafe { from_utf8_unchecked(slice::from_raw_parts(bytes, length as usize)) };
let origin = match mode {
SheetParsingMode::eAuthorSheetFeatures => Origin::Author,
SheetParsingMode::eUserSheetFeatures => Origin::User,
SheetParsingMode::eAgentSheetFeatures => Origin::UserAgent,
};
let base_str = unsafe { from_utf8_unchecked(slice::from_raw_parts(base_bytes, base_length as usize)) };
let url = Url::parse(base_str).unwrap();
let extra_data = ParserContextExtraData {
base: Some(GeckoArcURI::new(base)),
referrer: Some(GeckoArcURI::new(referrer)),
principal: Some(GeckoArcPrincipal::new(principal)),
};
let sheet = Arc::new(Stylesheet::from_str(input, url, origin, Box::new(StdoutErrorReporter),
extra_data));
unsafe {
transmute(sheet)
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_AppendStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.push(sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_PrependStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.insert(0, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_InsertStyleSheetBefore(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed,
raw_reference: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
let reference = HasArcFFI::as_arc(&raw_reference);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
let index = data.stylesheets.iter().position(|x| arc_ptr_eq(x, reference)).unwrap();
data.stylesheets.insert(index, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_RemoveStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_HasRules(raw_sheet: RawServoStyleSheetBorrowed) -> bool {
!Stylesheet::as_arc(&raw_sheet).rules.is_empty()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_AddRef(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::addref(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_Release(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::release(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Get(node: RawGeckoNodeBorrowed)
-> ServoComputedValuesStrong {
let node = GeckoNode(node);
let arc_cv = match node.borrow_data().map_or(None, |data| data.style.clone()) {
Some(style) => style,
None => {
// FIXME(bholley): This case subverts the intended semantics of this
// function, and exists only to make stylo builds more robust corner-
// cases where Gecko wants the style for a node that Servo never
// traversed. We should remove this as soon as possible.
error!("stylo: encountered unstyled node, substituting default values.");
Arc::new(ComputedValues::initial_values().clone())
},
};
arc_cv.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForAnonymousBox(parent_style_or_null: ServoComputedValuesBorrowedOrNull,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowedMut)
-> ServoComputedValuesStrong {
// The stylist consumes stylesheets lazily.
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
data.flush_stylesheets();
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ true);
let maybe_parent = parent_style_or_null.as_arc_opt();
let new_computed = data.stylist.precomputed_values_for_pseudo(&pseudo, maybe_parent);
new_computed.map_or(Strong::null(), |c| c.into_strong())
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForPseudoElement(parent_style: ServoComputedValuesBorrowed,
match_element: RawGeckoElementBorrowed,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowedMut,
is_probe: bool)
-> ServoComputedValuesStrong {
debug_assert!(!(match_element as *const _).is_null());
let parent_or_null = || {
if is_probe {
Strong::null()
} else {
ComputedValues::as_arc(&parent_style).clone().into_strong()
}
};
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ false);
// The stylist consumes stylesheets lazily.
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
data.flush_stylesheets();
let element = GeckoElement(match_element);
match GeckoSelectorImpl::pseudo_element_cascade_type(&pseudo) {
PseudoElementCascadeType::Eager => {
let node = element.as_node();
let maybe_computed = node.borrow_data()
.and_then(|data| {
data.per_pseudo.get(&pseudo).map(|c| c.clone())
});
maybe_computed.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Lazy => {
let parent = ComputedValues::as_arc(&parent_style);
data.stylist
.lazily_compute_pseudo_element_style(&element, &pseudo, parent)
.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Precomputed => {
unreachable!("Anonymous pseudo found in \
Servo_GetComputedValuesForPseudoElement");
}
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Inherit(parent_style: ServoComputedValuesBorrowedOrNull)
-> ServoComputedValuesStrong {
let style = if parent_style.is_null() {
Arc::new(ComputedValues::initial_values().clone())
} else {
ComputedValues::inherit_from(parent_style.as_arc())
};
style.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_AddRef(ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::addref(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Release(ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::release(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Init() -> RawServoStyleSetOwned {
let data = Box::new(PerDocumentStyleData::new());
data.into_ffi()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Drop(data: RawServoStyleSetOwned) -> () |
pub struct GeckoDeclarationBlock {
pub declarations: Option<Arc<PropertyDeclarationBlock>>,
// XXX The following two fields are made atomic to work around the
// ownership system so that they can be changed inside a shared
// instance. It wouldn't provide safety as Rust usually promises,
// but it is fine as far as we only access them in a single thread.
// If we need to access them in different threads, we would need
// to redesign how it works with MiscContainer in Gecko side.
pub cache: AtomicPtr<nsHTMLCSSStyleSheet>,
pub immutable: AtomicBool,
}
unsafe impl HasFFI for GeckoDeclarationBlock {
type FFIType = ServoDeclarationBlock;
}
unsafe impl HasArcFFI for GeckoDeclarationBlock {}
#[no_mangle]
pub extern "C" fn Servo_ParseStyleAttribute(bytes: *const u8, length: u32,
cache: *mut nsHTMLCSSStyleSheet)
-> ServoDeclarationBlockStrong {
let value = unsafe { from_utf8_unchecked(slice::from_raw_parts(bytes, length as usize)) };
Arc::new(GeckoDeclarationBlock {
declarations: GeckoElement::parse_style_attribute(value).map(Arc::new),
cache: AtomicPtr::new(cache),
immutable: AtomicBool::new(false),
}).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_AddRef(declarations: ServoDeclarationBlockBorrowed) {
unsafe { GeckoDeclarationBlock::addref(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Release(declarations: ServoDeclarationBlockBorrowed) {
unsafe { GeckoDeclarationBlock::release(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetCache(declarations: ServoDeclarationBlockBorrowed)
-> *mut nsHTMLCSSStyleSheet {
GeckoDeclarationBlock::as_arc(&declarations).cache.load(Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetImmutable(declarations: ServoDeclarationBlockBorrowed) {
GeckoDeclarationBlock::as_arc(&declarations).immutable.store(true, Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_ClearCachePointer(declarations: ServoDeclarationBlockBorrowed) {
GeckoDeclarationBlock::as_arc(&declarations).cache.store(ptr::null_mut(), Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_CSSSupports(property: *const u8, property_length: u32,
value: *const u8, value_length: u32) -> bool {
let property = unsafe { from_utf8_unchecked(slice::from_raw_parts(property, property_length as usize)) };
let value = unsafe { from_utf8_unchecked(slice::from_raw_parts(value, value_length as usize)) };
let base_url = &*DUMMY_BASE_URL;
let extra_data = ParserContextExtraData::default();
match parse_one_declaration(&property, &value, &base_url, Box::new(StdoutErrorReporter), extra_data) {
Ok(decls) => !decls.is_empty(),
Err(()) => false,
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputeRestyleHint(element: RawGeckoElementBorrowed,
snapshot: *mut ServoElementSnapshot,
raw_data: RawServoStyleSetBorrowed) -> nsRestyleHint {
let per_doc_data = PerDocumentStyleData::from_ffi(raw_data);
let snapshot = unsafe { GeckoElementSnapshot::from_raw(snapshot) };
let element = GeckoElement(element);
// NB: This involves an FFI call, we can get rid of it easily if needed.
let current_state = element.get_state();
let hint = per_doc_data.stylist
.compute_restyle_hint(&element, &snapshot,
current_state);
// NB: Binary representations match.
unsafe { transmute(hint.bits() as u32) }
}
| {
let _ = data.into_box::<PerDocumentStyleData>();
} | identifier_body |
glue.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
use app_units::Au;
use data::{NUM_THREADS, PerDocumentStyleData};
use env_logger;
use euclid::Size2D;
use gecko_bindings::bindings::{RawGeckoElementBorrowed, RawGeckoNodeBorrowed};
use gecko_bindings::bindings::{RawServoStyleSetBorrowed, RawServoStyleSetOwned, ServoNodeDataOwned};
use gecko_bindings::bindings::{RawServoStyleSetBorrowedMut, RawGeckoDocumentBorrowed};
use gecko_bindings::bindings::{RawServoStyleSheetBorrowed, ServoComputedValuesBorrowed};
use gecko_bindings::bindings::{RawServoStyleSheetStrong, ServoComputedValuesStrong};
use gecko_bindings::bindings::{ServoComputedValuesBorrowedOrNull, ServoDeclarationBlock};
use gecko_bindings::bindings::{ServoDeclarationBlockBorrowed, ServoDeclarationBlockStrong};
use gecko_bindings::bindings::{ThreadSafePrincipalHolder, ThreadSafeURIHolder, nsHTMLCSSStyleSheet};
use gecko_bindings::ptr::{GeckoArcPrincipal, GeckoArcURI};
use gecko_bindings::structs::{SheetParsingMode, nsIAtom};
use gecko_bindings::structs::ServoElementSnapshot;
use gecko_bindings::structs::nsRestyleHint;
use gecko_bindings::sugar::ownership::{FFIArcHelpers, HasArcFFI, HasBoxFFI};
use gecko_bindings::sugar::ownership::{HasFFI, HasSimpleFFI, Strong};
use gecko_string_cache::Atom;
use snapshot::GeckoElementSnapshot;
use std::mem::transmute;
use std::ptr;
use std::slice;
use std::str::from_utf8_unchecked;
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
use style::arc_ptr_eq;
use style::context::{LocalStyleContextCreationInfo, ReflowGoal, SharedStyleContext};
use style::dom::{TDocument, TElement, TNode};
use style::error_reporting::StdoutErrorReporter;
use style::gecko_selector_impl::{GeckoSelectorImpl, PseudoElement};
use style::parallel;
use style::parser::ParserContextExtraData;
use style::properties::{ComputedValues, PropertyDeclarationBlock, parse_one_declaration};
use style::selector_impl::PseudoElementCascadeType;
use style::sequential;
use style::stylesheets::{Origin, Stylesheet};
use style::timer::Timer;
use traversal::RecalcStyleOnly;
use url::Url;
use wrapper::{DUMMY_BASE_URL, GeckoDocument, GeckoElement, GeckoNode, NonOpaqueStyleData};
/*
* For Gecko->Servo function calls, we need to redeclare the same signature that was declared in
* the C header in Gecko. In order to catch accidental mismatches, we run rust-bindgen against
* those signatures as well, giving us a second declaration of all the Servo_* functions in this
* crate. If there's a mismatch, LLVM will assert and abort, which is a rather awful thing to
* depend on but good enough for our purposes.
*/
#[no_mangle]
pub extern "C" fn Servo_Initialize() -> () {
// Enable standard Rust logging.
//
// See https://doc.rust-lang.org/log/env_logger/index.html for instructions.
env_logger::init().unwrap();
// Allocate our default computed values.
unsafe { ComputedValues::initialize(); }
}
#[no_mangle]
pub extern "C" fn Servo_Shutdown() -> () {
// Destroy our default computed values.
unsafe { ComputedValues::shutdown(); }
}
fn restyle_subtree(node: GeckoNode, raw_data: RawServoStyleSetBorrowedMut) {
debug_assert!(node.is_element() || node.is_text_node());
// Force the creation of our lazily-constructed initial computed values on
// the main thread, since it's not safe to call elsewhere.
//
// FIXME(bholley): this should move into Servo_Initialize as soon as we get
// rid of the HackilyFindSomeDeviceContext stuff that happens during
// initial_values computation, since that stuff needs to be called further
// along in startup than the sensible place to call Servo_Initialize.
ComputedValues::initial_values();
// The stylist consumes stylesheets lazily.
let per_doc_data = PerDocumentStyleData::from_ffi_mut(raw_data);
per_doc_data.flush_stylesheets();
let local_context_data =
LocalStyleContextCreationInfo::new(per_doc_data.new_animations_sender.clone());
let shared_style_context = SharedStyleContext {
viewport_size: Size2D::new(Au(0), Au(0)),
screen_size_changed: false,
generation: 0,
goal: ReflowGoal::ForScriptQuery,
stylist: per_doc_data.stylist.clone(),
running_animations: per_doc_data.running_animations.clone(),
expired_animations: per_doc_data.expired_animations.clone(),
error_reporter: Box::new(StdoutErrorReporter),
local_context_creation_data: Mutex::new(local_context_data),
timer: Timer::new(),
};
// We ensure this is true before calling Servo_RestyleSubtree()
debug_assert!(node.is_dirty() || node.has_dirty_descendants());
if per_doc_data.num_threads == 1 || per_doc_data.work_queue.is_none() {
sequential::traverse_dom::<GeckoNode, RecalcStyleOnly>(node, &shared_style_context);
} else {
parallel::traverse_dom::<GeckoNode, RecalcStyleOnly>(node, &shared_style_context,
per_doc_data.work_queue.as_mut().unwrap());
}
}
#[no_mangle]
pub extern "C" fn Servo_RestyleSubtree(node: RawGeckoNodeBorrowed,
raw_data: RawServoStyleSetBorrowedMut) -> () {
let node = GeckoNode(node);
restyle_subtree(node, raw_data);
}
#[no_mangle]
pub extern "C" fn Servo_RestyleDocument(doc: RawGeckoDocumentBorrowed, raw_data: RawServoStyleSetBorrowedMut) -> () {
let document = GeckoDocument(doc);
let node = match document.root_node() {
Some(x) => x,
None => return,
};
restyle_subtree(node, raw_data);
}
#[no_mangle]
pub extern "C" fn Servo_StyleWorkerThreadCount() -> u32 {
*NUM_THREADS as u32
}
#[no_mangle]
pub extern "C" fn Servo_NodeData_Drop(data: ServoNodeDataOwned) -> () {
let _ = data.into_box::<NonOpaqueStyleData>();
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_FromUTF8Bytes(bytes: *const u8,
length: u32,
mode: SheetParsingMode,
base_bytes: *const u8,
base_length: u32,
base: *mut ThreadSafeURIHolder,
referrer: *mut ThreadSafeURIHolder,
principal: *mut ThreadSafePrincipalHolder)
-> RawServoStyleSheetStrong {
let input = unsafe { from_utf8_unchecked(slice::from_raw_parts(bytes, length as usize)) };
let origin = match mode {
SheetParsingMode::eAuthorSheetFeatures => Origin::Author,
SheetParsingMode::eUserSheetFeatures => Origin::User,
SheetParsingMode::eAgentSheetFeatures => Origin::UserAgent,
};
let base_str = unsafe { from_utf8_unchecked(slice::from_raw_parts(base_bytes, base_length as usize)) };
let url = Url::parse(base_str).unwrap();
let extra_data = ParserContextExtraData {
base: Some(GeckoArcURI::new(base)),
referrer: Some(GeckoArcURI::new(referrer)),
principal: Some(GeckoArcPrincipal::new(principal)),
};
let sheet = Arc::new(Stylesheet::from_str(input, url, origin, Box::new(StdoutErrorReporter),
extra_data));
unsafe {
transmute(sheet)
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_AppendStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.push(sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_PrependStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.insert(0, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_InsertStyleSheetBefore(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed,
raw_reference: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
let reference = HasArcFFI::as_arc(&raw_reference);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
let index = data.stylesheets.iter().position(|x| arc_ptr_eq(x, reference)).unwrap();
data.stylesheets.insert(index, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_RemoveStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_HasRules(raw_sheet: RawServoStyleSheetBorrowed) -> bool {
!Stylesheet::as_arc(&raw_sheet).rules.is_empty()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_AddRef(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::addref(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_Release(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::release(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Get(node: RawGeckoNodeBorrowed)
-> ServoComputedValuesStrong {
let node = GeckoNode(node);
let arc_cv = match node.borrow_data().map_or(None, |data| data.style.clone()) {
Some(style) => style,
None => {
// FIXME(bholley): This case subverts the intended semantics of this
// function, and exists only to make stylo builds more robust corner-
// cases where Gecko wants the style for a node that Servo never
// traversed. We should remove this as soon as possible.
error!("stylo: encountered unstyled node, substituting default values.");
Arc::new(ComputedValues::initial_values().clone())
},
};
arc_cv.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForAnonymousBox(parent_style_or_null: ServoComputedValuesBorrowedOrNull,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowedMut)
-> ServoComputedValuesStrong {
// The stylist consumes stylesheets lazily.
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
data.flush_stylesheets();
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ true);
let maybe_parent = parent_style_or_null.as_arc_opt();
let new_computed = data.stylist.precomputed_values_for_pseudo(&pseudo, maybe_parent);
new_computed.map_or(Strong::null(), |c| c.into_strong())
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForPseudoElement(parent_style: ServoComputedValuesBorrowed,
match_element: RawGeckoElementBorrowed,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowedMut,
is_probe: bool)
-> ServoComputedValuesStrong {
debug_assert!(!(match_element as *const _).is_null());
let parent_or_null = || {
if is_probe {
Strong::null()
} else {
ComputedValues::as_arc(&parent_style).clone().into_strong()
}
};
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ false);
// The stylist consumes stylesheets lazily.
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
data.flush_stylesheets();
let element = GeckoElement(match_element);
match GeckoSelectorImpl::pseudo_element_cascade_type(&pseudo) {
PseudoElementCascadeType::Eager => {
let node = element.as_node();
let maybe_computed = node.borrow_data()
.and_then(|data| {
data.per_pseudo.get(&pseudo).map(|c| c.clone())
});
maybe_computed.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Lazy => {
let parent = ComputedValues::as_arc(&parent_style);
data.stylist
.lazily_compute_pseudo_element_style(&element, &pseudo, parent)
.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Precomputed => {
unreachable!("Anonymous pseudo found in \
Servo_GetComputedValuesForPseudoElement");
}
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Inherit(parent_style: ServoComputedValuesBorrowedOrNull)
-> ServoComputedValuesStrong {
let style = if parent_style.is_null() {
Arc::new(ComputedValues::initial_values().clone())
} else {
ComputedValues::inherit_from(parent_style.as_arc())
};
style.into_strong()
}
#[no_mangle]
pub extern "C" fn | (ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::addref(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Release(ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::release(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Init() -> RawServoStyleSetOwned {
let data = Box::new(PerDocumentStyleData::new());
data.into_ffi()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Drop(data: RawServoStyleSetOwned) -> () {
let _ = data.into_box::<PerDocumentStyleData>();
}
pub struct GeckoDeclarationBlock {
pub declarations: Option<Arc<PropertyDeclarationBlock>>,
// XXX The following two fields are made atomic to work around the
// ownership system so that they can be changed inside a shared
// instance. It wouldn't provide safety as Rust usually promises,
// but it is fine as far as we only access them in a single thread.
// If we need to access them in different threads, we would need
// to redesign how it works with MiscContainer in Gecko side.
pub cache: AtomicPtr<nsHTMLCSSStyleSheet>,
pub immutable: AtomicBool,
}
unsafe impl HasFFI for GeckoDeclarationBlock {
type FFIType = ServoDeclarationBlock;
}
unsafe impl HasArcFFI for GeckoDeclarationBlock {}
#[no_mangle]
pub extern "C" fn Servo_ParseStyleAttribute(bytes: *const u8, length: u32,
cache: *mut nsHTMLCSSStyleSheet)
-> ServoDeclarationBlockStrong {
let value = unsafe { from_utf8_unchecked(slice::from_raw_parts(bytes, length as usize)) };
Arc::new(GeckoDeclarationBlock {
declarations: GeckoElement::parse_style_attribute(value).map(Arc::new),
cache: AtomicPtr::new(cache),
immutable: AtomicBool::new(false),
}).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_AddRef(declarations: ServoDeclarationBlockBorrowed) {
unsafe { GeckoDeclarationBlock::addref(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Release(declarations: ServoDeclarationBlockBorrowed) {
unsafe { GeckoDeclarationBlock::release(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetCache(declarations: ServoDeclarationBlockBorrowed)
-> *mut nsHTMLCSSStyleSheet {
GeckoDeclarationBlock::as_arc(&declarations).cache.load(Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetImmutable(declarations: ServoDeclarationBlockBorrowed) {
GeckoDeclarationBlock::as_arc(&declarations).immutable.store(true, Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_ClearCachePointer(declarations: ServoDeclarationBlockBorrowed) {
GeckoDeclarationBlock::as_arc(&declarations).cache.store(ptr::null_mut(), Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_CSSSupports(property: *const u8, property_length: u32,
value: *const u8, value_length: u32) -> bool {
let property = unsafe { from_utf8_unchecked(slice::from_raw_parts(property, property_length as usize)) };
let value = unsafe { from_utf8_unchecked(slice::from_raw_parts(value, value_length as usize)) };
let base_url = &*DUMMY_BASE_URL;
let extra_data = ParserContextExtraData::default();
match parse_one_declaration(&property, &value, &base_url, Box::new(StdoutErrorReporter), extra_data) {
Ok(decls) => !decls.is_empty(),
Err(()) => false,
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputeRestyleHint(element: RawGeckoElementBorrowed,
snapshot: *mut ServoElementSnapshot,
raw_data: RawServoStyleSetBorrowed) -> nsRestyleHint {
let per_doc_data = PerDocumentStyleData::from_ffi(raw_data);
let snapshot = unsafe { GeckoElementSnapshot::from_raw(snapshot) };
let element = GeckoElement(element);
// NB: This involves an FFI call, we can get rid of it easily if needed.
let current_state = element.get_state();
let hint = per_doc_data.stylist
.compute_restyle_hint(&element, &snapshot,
current_state);
// NB: Binary representations match.
unsafe { transmute(hint.bits() as u32) }
}
| Servo_ComputedValues_AddRef | identifier_name |
glue.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
use app_units::Au;
use data::{NUM_THREADS, PerDocumentStyleData};
use env_logger;
use euclid::Size2D;
use gecko_bindings::bindings::{RawGeckoElementBorrowed, RawGeckoNodeBorrowed};
use gecko_bindings::bindings::{RawServoStyleSetBorrowed, RawServoStyleSetOwned, ServoNodeDataOwned};
use gecko_bindings::bindings::{RawServoStyleSetBorrowedMut, RawGeckoDocumentBorrowed};
use gecko_bindings::bindings::{RawServoStyleSheetBorrowed, ServoComputedValuesBorrowed};
use gecko_bindings::bindings::{RawServoStyleSheetStrong, ServoComputedValuesStrong};
use gecko_bindings::bindings::{ServoComputedValuesBorrowedOrNull, ServoDeclarationBlock};
use gecko_bindings::bindings::{ServoDeclarationBlockBorrowed, ServoDeclarationBlockStrong};
use gecko_bindings::bindings::{ThreadSafePrincipalHolder, ThreadSafeURIHolder, nsHTMLCSSStyleSheet};
use gecko_bindings::ptr::{GeckoArcPrincipal, GeckoArcURI};
use gecko_bindings::structs::{SheetParsingMode, nsIAtom};
use gecko_bindings::structs::ServoElementSnapshot;
use gecko_bindings::structs::nsRestyleHint;
use gecko_bindings::sugar::ownership::{FFIArcHelpers, HasArcFFI, HasBoxFFI};
use gecko_bindings::sugar::ownership::{HasFFI, HasSimpleFFI, Strong};
use gecko_string_cache::Atom;
use snapshot::GeckoElementSnapshot;
use std::mem::transmute;
use std::ptr;
use std::slice;
use std::str::from_utf8_unchecked;
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
use style::arc_ptr_eq;
use style::context::{LocalStyleContextCreationInfo, ReflowGoal, SharedStyleContext};
use style::dom::{TDocument, TElement, TNode};
use style::error_reporting::StdoutErrorReporter;
use style::gecko_selector_impl::{GeckoSelectorImpl, PseudoElement};
use style::parallel;
use style::parser::ParserContextExtraData;
use style::properties::{ComputedValues, PropertyDeclarationBlock, parse_one_declaration};
use style::selector_impl::PseudoElementCascadeType;
use style::sequential;
use style::stylesheets::{Origin, Stylesheet};
use style::timer::Timer;
use traversal::RecalcStyleOnly;
use url::Url;
use wrapper::{DUMMY_BASE_URL, GeckoDocument, GeckoElement, GeckoNode, NonOpaqueStyleData};
/*
* For Gecko->Servo function calls, we need to redeclare the same signature that was declared in
* the C header in Gecko. In order to catch accidental mismatches, we run rust-bindgen against
* those signatures as well, giving us a second declaration of all the Servo_* functions in this
* crate. If there's a mismatch, LLVM will assert and abort, which is a rather awful thing to
* depend on but good enough for our purposes.
*/
#[no_mangle]
pub extern "C" fn Servo_Initialize() -> () {
// Enable standard Rust logging.
//
// See https://doc.rust-lang.org/log/env_logger/index.html for instructions.
env_logger::init().unwrap();
// Allocate our default computed values.
unsafe { ComputedValues::initialize(); }
}
#[no_mangle]
pub extern "C" fn Servo_Shutdown() -> () {
// Destroy our default computed values.
unsafe { ComputedValues::shutdown(); }
}
fn restyle_subtree(node: GeckoNode, raw_data: RawServoStyleSetBorrowedMut) {
debug_assert!(node.is_element() || node.is_text_node());
// Force the creation of our lazily-constructed initial computed values on
// the main thread, since it's not safe to call elsewhere.
//
// FIXME(bholley): this should move into Servo_Initialize as soon as we get
// rid of the HackilyFindSomeDeviceContext stuff that happens during
// initial_values computation, since that stuff needs to be called further
// along in startup than the sensible place to call Servo_Initialize.
ComputedValues::initial_values();
// The stylist consumes stylesheets lazily.
let per_doc_data = PerDocumentStyleData::from_ffi_mut(raw_data);
per_doc_data.flush_stylesheets();
let local_context_data =
LocalStyleContextCreationInfo::new(per_doc_data.new_animations_sender.clone());
let shared_style_context = SharedStyleContext {
viewport_size: Size2D::new(Au(0), Au(0)),
screen_size_changed: false,
generation: 0,
goal: ReflowGoal::ForScriptQuery,
stylist: per_doc_data.stylist.clone(),
running_animations: per_doc_data.running_animations.clone(),
expired_animations: per_doc_data.expired_animations.clone(),
error_reporter: Box::new(StdoutErrorReporter),
local_context_creation_data: Mutex::new(local_context_data),
timer: Timer::new(),
};
// We ensure this is true before calling Servo_RestyleSubtree()
debug_assert!(node.is_dirty() || node.has_dirty_descendants());
if per_doc_data.num_threads == 1 || per_doc_data.work_queue.is_none() {
sequential::traverse_dom::<GeckoNode, RecalcStyleOnly>(node, &shared_style_context);
} else {
parallel::traverse_dom::<GeckoNode, RecalcStyleOnly>(node, &shared_style_context,
per_doc_data.work_queue.as_mut().unwrap());
}
}
#[no_mangle]
pub extern "C" fn Servo_RestyleSubtree(node: RawGeckoNodeBorrowed,
raw_data: RawServoStyleSetBorrowedMut) -> () {
let node = GeckoNode(node);
restyle_subtree(node, raw_data);
}
#[no_mangle]
pub extern "C" fn Servo_RestyleDocument(doc: RawGeckoDocumentBorrowed, raw_data: RawServoStyleSetBorrowedMut) -> () {
let document = GeckoDocument(doc);
let node = match document.root_node() {
Some(x) => x,
None => return,
};
restyle_subtree(node, raw_data);
}
#[no_mangle]
pub extern "C" fn Servo_StyleWorkerThreadCount() -> u32 {
*NUM_THREADS as u32
}
#[no_mangle]
pub extern "C" fn Servo_NodeData_Drop(data: ServoNodeDataOwned) -> () {
let _ = data.into_box::<NonOpaqueStyleData>();
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_FromUTF8Bytes(bytes: *const u8,
length: u32,
mode: SheetParsingMode,
base_bytes: *const u8,
base_length: u32,
base: *mut ThreadSafeURIHolder,
referrer: *mut ThreadSafeURIHolder,
principal: *mut ThreadSafePrincipalHolder)
-> RawServoStyleSheetStrong {
let input = unsafe { from_utf8_unchecked(slice::from_raw_parts(bytes, length as usize)) };
let origin = match mode {
SheetParsingMode::eAuthorSheetFeatures => Origin::Author,
SheetParsingMode::eUserSheetFeatures => Origin::User,
SheetParsingMode::eAgentSheetFeatures => Origin::UserAgent,
};
let base_str = unsafe { from_utf8_unchecked(slice::from_raw_parts(base_bytes, base_length as usize)) };
let url = Url::parse(base_str).unwrap();
let extra_data = ParserContextExtraData {
base: Some(GeckoArcURI::new(base)),
referrer: Some(GeckoArcURI::new(referrer)),
principal: Some(GeckoArcPrincipal::new(principal)),
};
let sheet = Arc::new(Stylesheet::from_str(input, url, origin, Box::new(StdoutErrorReporter),
extra_data));
unsafe {
transmute(sheet)
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_AppendStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.push(sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_PrependStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.insert(0, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_InsertStyleSheetBefore(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed,
raw_reference: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
let reference = HasArcFFI::as_arc(&raw_reference);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
let index = data.stylesheets.iter().position(|x| arc_ptr_eq(x, reference)).unwrap();
data.stylesheets.insert(index, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_RemoveStyleSheet(raw_data: RawServoStyleSetBorrowedMut,
raw_sheet: RawServoStyleSheetBorrowed) {
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_HasRules(raw_sheet: RawServoStyleSheetBorrowed) -> bool {
!Stylesheet::as_arc(&raw_sheet).rules.is_empty()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_AddRef(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::addref(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_Release(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::release(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Get(node: RawGeckoNodeBorrowed)
-> ServoComputedValuesStrong {
let node = GeckoNode(node);
let arc_cv = match node.borrow_data().map_or(None, |data| data.style.clone()) {
Some(style) => style,
None => {
// FIXME(bholley): This case subverts the intended semantics of this
// function, and exists only to make stylo builds more robust corner-
// cases where Gecko wants the style for a node that Servo never
// traversed. We should remove this as soon as possible.
error!("stylo: encountered unstyled node, substituting default values.");
Arc::new(ComputedValues::initial_values().clone())
},
};
arc_cv.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForAnonymousBox(parent_style_or_null: ServoComputedValuesBorrowedOrNull,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowedMut)
-> ServoComputedValuesStrong {
// The stylist consumes stylesheets lazily.
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
data.flush_stylesheets();
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ true);
let maybe_parent = parent_style_or_null.as_arc_opt();
let new_computed = data.stylist.precomputed_values_for_pseudo(&pseudo, maybe_parent);
new_computed.map_or(Strong::null(), |c| c.into_strong())
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForPseudoElement(parent_style: ServoComputedValuesBorrowed,
match_element: RawGeckoElementBorrowed,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowedMut,
is_probe: bool)
-> ServoComputedValuesStrong {
debug_assert!(!(match_element as *const _).is_null());
let parent_or_null = || {
if is_probe {
Strong::null()
} else {
ComputedValues::as_arc(&parent_style).clone().into_strong()
}
};
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ false);
// The stylist consumes stylesheets lazily.
let data = PerDocumentStyleData::from_ffi_mut(raw_data);
data.flush_stylesheets();
let element = GeckoElement(match_element);
match GeckoSelectorImpl::pseudo_element_cascade_type(&pseudo) {
PseudoElementCascadeType::Eager => {
let node = element.as_node();
let maybe_computed = node.borrow_data()
.and_then(|data| {
data.per_pseudo.get(&pseudo).map(|c| c.clone())
});
maybe_computed.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Lazy => {
let parent = ComputedValues::as_arc(&parent_style);
data.stylist
.lazily_compute_pseudo_element_style(&element, &pseudo, parent)
.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Precomputed => {
unreachable!("Anonymous pseudo found in \
Servo_GetComputedValuesForPseudoElement");
}
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Inherit(parent_style: ServoComputedValuesBorrowedOrNull)
-> ServoComputedValuesStrong {
let style = if parent_style.is_null() {
Arc::new(ComputedValues::initial_values().clone())
} else {
ComputedValues::inherit_from(parent_style.as_arc())
};
style.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_AddRef(ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::addref(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Release(ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::release(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Init() -> RawServoStyleSetOwned {
let data = Box::new(PerDocumentStyleData::new());
data.into_ffi()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Drop(data: RawServoStyleSetOwned) -> () {
let _ = data.into_box::<PerDocumentStyleData>();
}
pub struct GeckoDeclarationBlock {
pub declarations: Option<Arc<PropertyDeclarationBlock>>,
// XXX The following two fields are made atomic to work around the
// ownership system so that they can be changed inside a shared
// instance. It wouldn't provide safety as Rust usually promises,
// but it is fine as far as we only access them in a single thread.
// If we need to access them in different threads, we would need
// to redesign how it works with MiscContainer in Gecko side.
pub cache: AtomicPtr<nsHTMLCSSStyleSheet>,
pub immutable: AtomicBool,
}
unsafe impl HasFFI for GeckoDeclarationBlock {
type FFIType = ServoDeclarationBlock;
}
unsafe impl HasArcFFI for GeckoDeclarationBlock {}
#[no_mangle]
pub extern "C" fn Servo_ParseStyleAttribute(bytes: *const u8, length: u32,
cache: *mut nsHTMLCSSStyleSheet)
-> ServoDeclarationBlockStrong {
let value = unsafe { from_utf8_unchecked(slice::from_raw_parts(bytes, length as usize)) };
Arc::new(GeckoDeclarationBlock {
declarations: GeckoElement::parse_style_attribute(value).map(Arc::new),
cache: AtomicPtr::new(cache),
immutable: AtomicBool::new(false),
}).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_AddRef(declarations: ServoDeclarationBlockBorrowed) {
unsafe { GeckoDeclarationBlock::addref(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Release(declarations: ServoDeclarationBlockBorrowed) {
unsafe { GeckoDeclarationBlock::release(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetCache(declarations: ServoDeclarationBlockBorrowed)
-> *mut nsHTMLCSSStyleSheet {
GeckoDeclarationBlock::as_arc(&declarations).cache.load(Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetImmutable(declarations: ServoDeclarationBlockBorrowed) {
GeckoDeclarationBlock::as_arc(&declarations).immutable.store(true, Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_ClearCachePointer(declarations: ServoDeclarationBlockBorrowed) {
GeckoDeclarationBlock::as_arc(&declarations).cache.store(ptr::null_mut(), Ordering::Relaxed)
}
#[no_mangle]
pub extern "C" fn Servo_CSSSupports(property: *const u8, property_length: u32,
value: *const u8, value_length: u32) -> bool {
let property = unsafe { from_utf8_unchecked(slice::from_raw_parts(property, property_length as usize)) };
let value = unsafe { from_utf8_unchecked(slice::from_raw_parts(value, value_length as usize)) };
let base_url = &*DUMMY_BASE_URL;
let extra_data = ParserContextExtraData::default();
match parse_one_declaration(&property, &value, &base_url, Box::new(StdoutErrorReporter), extra_data) {
Ok(decls) => !decls.is_empty(),
Err(()) => false,
}
}
#[no_mangle] | let per_doc_data = PerDocumentStyleData::from_ffi(raw_data);
let snapshot = unsafe { GeckoElementSnapshot::from_raw(snapshot) };
let element = GeckoElement(element);
// NB: This involves an FFI call, we can get rid of it easily if needed.
let current_state = element.get_state();
let hint = per_doc_data.stylist
.compute_restyle_hint(&element, &snapshot,
current_state);
// NB: Binary representations match.
unsafe { transmute(hint.bits() as u32) }
} | pub extern "C" fn Servo_ComputeRestyleHint(element: RawGeckoElementBorrowed,
snapshot: *mut ServoElementSnapshot,
raw_data: RawServoStyleSetBorrowed) -> nsRestyleHint { | random_line_split |
rq.py | from __future__ import absolute_import
import logging
try:
from redis import Redis
from rq import Queue
except ImportError:
Redis = None
Queue = None
from kaneda.exceptions import ImproperlyConfigured
from .base import BaseQueue
class RQQueue(BaseQueue):
"""
RQ queue
:param queue: queue instance of RQ class.
:param redis_url: Redis connection url where RQ will attend the async reporting requests.
:param queue_name: name of the queue being used by the RQ worker process.
"""
settings_namespace = 'RQ'
def __init__(self, queue=None, redis_url=None, queue_name='kaneda'):
if not Redis:
raise ImproperlyConfigured('You need to install redis to use the RQ queue.')
if not Queue:
raise ImproperlyConfigured('You need to install rq library to use the RQ queue.')
if queue:
if not isinstance(queue, Queue):
raise ImproperlyConfigured('"queue" parameter is not an instance of RQ queue.')
self.queue = queue
elif redis_url:
self.queue = Queue(queue_name, connection=Redis.from_url(redis_url))
else:
|
def report(self, name, metric, value, tags, id_):
try:
return self.queue.enqueue('kaneda.tasks.rq.report', name, metric, value, tags, id_)
except Exception as e:
logger = logging.getLogger(__name__)
logger.exception(e)
| self.queue = Queue(queue_name, connection=Redis()) | conditional_block |
rq.py | from __future__ import absolute_import
import logging
try:
from redis import Redis
from rq import Queue
except ImportError:
Redis = None
Queue = None
from kaneda.exceptions import ImproperlyConfigured
from .base import BaseQueue
class RQQueue(BaseQueue):
"""
RQ queue
:param queue: queue instance of RQ class.
:param redis_url: Redis connection url where RQ will attend the async reporting requests.
:param queue_name: name of the queue being used by the RQ worker process.
"""
settings_namespace = 'RQ'
def __init__(self, queue=None, redis_url=None, queue_name='kaneda'):
if not Redis:
raise ImproperlyConfigured('You need to install redis to use the RQ queue.')
if not Queue:
raise ImproperlyConfigured('You need to install rq library to use the RQ queue.')
if queue:
if not isinstance(queue, Queue):
raise ImproperlyConfigured('"queue" parameter is not an instance of RQ queue.')
self.queue = queue
elif redis_url:
self.queue = Queue(queue_name, connection=Redis.from_url(redis_url))
else:
self.queue = Queue(queue_name, connection=Redis())
def | (self, name, metric, value, tags, id_):
try:
return self.queue.enqueue('kaneda.tasks.rq.report', name, metric, value, tags, id_)
except Exception as e:
logger = logging.getLogger(__name__)
logger.exception(e)
| report | identifier_name |
rq.py | from __future__ import absolute_import
import logging
try:
from redis import Redis
from rq import Queue
except ImportError:
Redis = None
Queue = None
from kaneda.exceptions import ImproperlyConfigured
from .base import BaseQueue
class RQQueue(BaseQueue):
"""
RQ queue
:param queue: queue instance of RQ class.
:param redis_url: Redis connection url where RQ will attend the async reporting requests.
:param queue_name: name of the queue being used by the RQ worker process.
"""
settings_namespace = 'RQ'
def __init__(self, queue=None, redis_url=None, queue_name='kaneda'):
if not Redis:
raise ImproperlyConfigured('You need to install redis to use the RQ queue.')
if not Queue:
raise ImproperlyConfigured('You need to install rq library to use the RQ queue.')
if queue:
if not isinstance(queue, Queue):
raise ImproperlyConfigured('"queue" parameter is not an instance of RQ queue.')
self.queue = queue
elif redis_url:
self.queue = Queue(queue_name, connection=Redis.from_url(redis_url))
else:
self.queue = Queue(queue_name, connection=Redis())
def report(self, name, metric, value, tags, id_): | logger = logging.getLogger(__name__)
logger.exception(e) | try:
return self.queue.enqueue('kaneda.tasks.rq.report', name, metric, value, tags, id_)
except Exception as e: | random_line_split |
rq.py | from __future__ import absolute_import
import logging
try:
from redis import Redis
from rq import Queue
except ImportError:
Redis = None
Queue = None
from kaneda.exceptions import ImproperlyConfigured
from .base import BaseQueue
class RQQueue(BaseQueue):
"""
RQ queue
:param queue: queue instance of RQ class.
:param redis_url: Redis connection url where RQ will attend the async reporting requests.
:param queue_name: name of the queue being used by the RQ worker process.
"""
settings_namespace = 'RQ'
def __init__(self, queue=None, redis_url=None, queue_name='kaneda'):
if not Redis:
raise ImproperlyConfigured('You need to install redis to use the RQ queue.')
if not Queue:
raise ImproperlyConfigured('You need to install rq library to use the RQ queue.')
if queue:
if not isinstance(queue, Queue):
raise ImproperlyConfigured('"queue" parameter is not an instance of RQ queue.')
self.queue = queue
elif redis_url:
self.queue = Queue(queue_name, connection=Redis.from_url(redis_url))
else:
self.queue = Queue(queue_name, connection=Redis())
def report(self, name, metric, value, tags, id_):
| try:
return self.queue.enqueue('kaneda.tasks.rq.report', name, metric, value, tags, id_)
except Exception as e:
logger = logging.getLogger(__name__)
logger.exception(e) | identifier_body | |
dataset.service.ts | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Injectable, OnDestroy } from '@angular/core';
import { Action, Store } from '@ngrx/store';
import { ToastsService } from 'src/app/services/toasts/toasts.service';
import { IProjectState } from '../../store/reducers';
import {
AbstractOverlayControllerDirective,
ConfirmationDialogComponent,
DataPickerService,
OverlayService,
} from 'cd-common';
import { BehaviorSubject, Observable, Subscription } from 'rxjs';
import { downloadBlobAsFile, createJsonFile, fileFromBlob } from 'cd-utils/files';
import { UploadService } from '../upload/upload.service';
import { RendererService } from 'src/app/services/renderer/renderer.service';
import { storagePathForJsonDatasetFile } from 'src/app/utils/storage.utils';
import { PropertiesService } from '../properties/properties.service';
import { incrementedName } from 'cd-utils/string';
import { AnalyticsService } from 'src/app/services/analytics/analytics.service';
import { AnalyticsEvent, getDatasetAnalyticsName, UPLOAD_JSON_FILE } from 'cd-common/analytics';
import { createId } from 'cd-utils/guid';
import { ProjectContentService } from 'src/app/database/changes/project-content.service';
import * as actions from '../../store/actions';
import * as config from '../../components/panels/data-panel/data-panel.config';
import * as consts from 'cd-common/consts';
import * as utils from 'cd-common/utils';
import * as cd from 'cd-interfaces';
type BaseDatasetValues = [
projId: string | undefined,
dataId: string,
fileName: string,
uploadPath: string
];
@Injectable({
providedIn: 'root',
})
export class DatasetService extends AbstractOverlayControllerDirective implements OnDestroy {
private subscriptions = new Subscription();
private confirmSubscription = Subscription.EMPTY;
private _datasets: cd.ProjectDataset[] = [];
public openAddDatasetMenuTrigger$ = new BehaviorSubject(false);
public datasets$: Observable<cd.ProjectDataset[]>;
constructor(
overlayService: OverlayService,
private projectStore: Store<IProjectState>,
private toastService: ToastsService,
private dataPickerService: DataPickerService,
private uploadService: UploadService,
private rendererService: RendererService,
private propertiesService: PropertiesService,
private analyticsService: AnalyticsService,
private projectContentService: ProjectContentService
) {
super(overlayService);
this.datasets$ = this.projectContentService.datasetArray$;
this.subscriptions.add(this.datasets$.subscribe(this.onDatasetsSubscription));
}
ngOnDestroy() {
super.ngOnDestroy();
this.confirmSubscription.unsubscribe();
}
onDatasetsSubscription = (datasets: cd.ProjectDataset[]) => (this._datasets = datasets);
openAddDatasetMenu() {
this.openAddDatasetMenuTrigger$.next(true);
}
addNewDirectInputDataset = (): string | undefined => {
const [projId, dataId, fileName, uploadPath] = this._getBaseStoredDatasetValues();
if (!projId) return;
const dataset = utils.createJsonDataset(dataId, projId, fileName, uploadPath);
const jsonValue = config.DIRECT_INPUT_INITIAL_JSON;
return this._afterStoredDatasetCreated(dataId, fileName, dataset, jsonValue, uploadPath);
};
createGenericEndpointDataset(data: cd.IStringMap<any>, url: string) {
const [projId, dataId, fileName, uploadPath] = this._getBaseStoredDatasetValues();
if (!projId) return;
const dataset = utils.createGenericEndpointDataset(dataId, projId, fileName, uploadPath, url);
this._afterStoredDatasetCreated(dataId, fileName, dataset, data, uploadPath);
}
createSheetsDataset(data: cd.IStringMap<any>, sheetId: string, tabId: string) {
const fileName = `sheets-${tabId}`;
const [projId, dataId, _, uploadPath] = this._getBaseStoredDatasetValues(fileName);
if (!projId) return;
const dataset = utils.createSheetsDataset(dataId, projId, fileName, uploadPath, sheetId, tabId);
this._afterStoredDatasetCreated(dataId, fileName, dataset, data, uploadPath);
}
private _getBaseStoredDatasetValues = (providedFileName = ''): BaseDatasetValues => {
const projId = this.propertiesService.getProjectId();
const dataId = createId();
const jsonFileId = createId();
const fileName = providedFileName || this.getIncrementedDefaultDatasetName();
const uploadPath = storagePathForJsonDatasetFile(jsonFileId, fileName);
return [projId, dataId, fileName, uploadPath];
};
/**
* Sends the created dataset to the appropriate places if small enough
* NOTE: Returns the dataset's ID if successful (to be used by the addNewDirectInputDataset function)
*/
private _afterStoredDatasetCreated = (
dataId: string,
fileName: string,
dataset: cd.IStoredDataset,
data: cd.IStringMap<any>,
uploadPath: string
): string | undefined => {
const jsonFile = createJsonFile(data, fileName);
if (!this.checkDatasetSize(jsonFile)) return;
this.sendDatasetDataToRenderer(dataId, jsonFile); // send to renderer
this.dataPickerService.addDataSource(dataset, data); // add to data picker
this.dispatch(new actions.DatasetCreate([dataset as cd.ProjectDataset])); // Add to store // database
this.uploadService.uploadFile(jsonFile, uploadPath); // Upload to firebase storage
const name = getDatasetAnalyticsName(dataset.datasetType);
this.analyticsService.logEvent(AnalyticsEvent.DatasetAdded, { name });
return dataset.id;
};
createDatasetFromFile(file: File, showToast = true) {
if (!this.checkDatasetSize(file)) return;
// show uploading toast
if (showToast) this.toastService.addToast(config.UPLOADING_TOAST);
// log analytics event
this.analyticsService.logEvent(AnalyticsEvent.DatasetAdded, { name: UPLOAD_JSON_FILE });
// Generate storage path
const id = createId();
const uploadPath = storagePathForJsonDatasetFile(id, file.name);
const { onUploadComplete, onUploadError } = this;
// Upload file and resolve promise from success or error callbacks
this.uploadService.uploadFile(
file,
uploadPath,
() => onUploadComplete(uploadPath, file, showToast),
onUploadError
);
}
getDatasets() {
return this._datasets;
}
updateDataset(datasetId: string, updates: Partial<cd.ProjectDataset>) {
this.dispatch(new actions.DatasetUpdate(datasetId, updates));
}
replaceDatasetData(datasetId: string, data: string) {
const file = createJsonFile(data, config.DEFAULT_DATASET_FILENAME);
this.replaceDatasetDataFromFile(datasetId, file, false);
}
replaceDatasetDataFromFile(datasetId: string, file: File, showToast = true) {
if (!this.checkDatasetSize(file)) return;
// show uploading toast
if (showToast) this.toastService.addToast(config.UPLOADING_TOAST);
// send new data to renderer and update data picker immediately
this.sendDatasetDataToRenderer(datasetId, file);
this.dataPickerService.updateBlobDataSource(datasetId, file);
// Upload file
const id = createId();
const uploadPath = storagePathForJsonDatasetFile(id, file.name);
const { onReplaceComplete, onUploadError } = this;
this.uploadService.uploadFile(
file,
uploadPath,
() => onReplaceComplete(datasetId, uploadPath, showToast),
onUploadError
);
}
downloadDatasetData = async (dataset: cd.ProjectDataset) => {
// TODO: how would we download other dataset types
if (dataset.datasetType !== cd.DatasetType.Json) return;
const { storagePath, name } = dataset as cd.IJsonDataset; | downloadBlobAsFile(jsonBlob, fileName);
};
duplicateDataset = async (dataset: cd.ProjectDataset) => {
const { storagePath, name } = dataset as cd.IJsonDataset;
const jsonBlob = await this.uploadService.downloadFile(storagePath);
if (!jsonBlob) return;
const fileName = this._incrementName(name);
const file = fileFromBlob(jsonBlob, fileName);
this.toastService.addToast(config.DUPLICATING_TOAST);
this.createDatasetFromFile(file, false);
};
deleteDataset(dataset: cd.ProjectDataset) {
const confirmModal = this.showModal<ConfirmationDialogComponent>(ConfirmationDialogComponent);
const { instance } = confirmModal;
const { cancel, confirm } = instance;
instance.title = config.REMOVE_DATASET_TITLE;
instance.message = config.REMOVE_DATASET_MESSAGE;
this.confirmSubscription = new Subscription();
this.confirmSubscription.add(cancel.subscribe(this.onCancelRemoveDataset));
this.confirmSubscription.add(confirm.subscribe(() => this.onConfirmRemoveDataset(dataset)));
}
/** Send updated data to Renderer */
private sendDatasetDataToRenderer(datasetId: string, dataFile: File) {
const rendererData = { [datasetId]: dataFile };
this.rendererService.addDatasetData(rendererData);
}
// eslint-disable-next-line require-await
private onUploadComplete = async (filePath: string, file: File, showToast = true) => {
const projectId = this.propertiesService.getProjectId();
if (!projectId) return;
const id = createId();
const dataset = utils.createJsonDataset(id, projectId, file.name, filePath);
this.dispatch(new actions.DatasetCreate([dataset]));
if (showToast) this.showUploadToast(config.UPLOAD_SUCCESS_MESSAGE);
// send new data to renderer and data picker immediately
this.sendDatasetDataToRenderer(id, file);
this.dataPickerService.addDataSourceWithBlobValue(dataset, file);
};
private onUploadError = (e: any) => {
console.error(e);
this.showUploadToast(config.UPLOAD_FAILED_MESSAGE);
};
private onReplaceComplete = (datasetId: string, storagePath: string, showToast = true) => {
this.dispatch(new actions.DatasetUpdate(datasetId, { storagePath }));
if (showToast) this.showUploadToast(config.REPLACE_SUCCESS_MESSAGE);
};
private onConfirmRemoveDataset = (dataset: cd.ProjectDataset) => {
// log analytics event
this.analyticsService.logEvent(AnalyticsEvent.DatasetRemoved);
this.dispatch(new actions.DatasetDelete(dataset));
};
private onCancelRemoveDataset = () => {
this.closeModal();
this.confirmSubscription.unsubscribe();
};
private dispatch(action: Action) {
this.projectStore.dispatch(action);
}
private showUploadToast(message: string) {
this.toastService.addToast({ id: config.UPLOAD_TOAST_ID, message }, config.UPLOAD_TOAST_ID);
}
private checkDatasetSize(dataBlob: Blob): boolean {
const validSize = dataBlob.size <= consts.DATASET_SIZE_LIMIT;
if (validSize) return true;
// log event
this.analyticsService.logEvent(AnalyticsEvent.DatasetSizeLimitExceeded);
this.toastService.addToast({ message: consts.DATASET_SIZE_LIMIT_ERROR });
return false;
}
private getIncrementedDefaultDatasetName = () => {
return this._incrementName(config.DEFAULT_DATASET_FILENAME);
};
private _incrementName = (name: string) => {
const currentNames = this._datasets.map((d) => d.name);
return incrementedName(name, currentNames);
};
} | const jsonBlob = await this.uploadService.downloadFile(storagePath);
const fileName = utils.addJsonFileExtension(name); | random_line_split |
dataset.service.ts | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Injectable, OnDestroy } from '@angular/core';
import { Action, Store } from '@ngrx/store';
import { ToastsService } from 'src/app/services/toasts/toasts.service';
import { IProjectState } from '../../store/reducers';
import {
AbstractOverlayControllerDirective,
ConfirmationDialogComponent,
DataPickerService,
OverlayService,
} from 'cd-common';
import { BehaviorSubject, Observable, Subscription } from 'rxjs';
import { downloadBlobAsFile, createJsonFile, fileFromBlob } from 'cd-utils/files';
import { UploadService } from '../upload/upload.service';
import { RendererService } from 'src/app/services/renderer/renderer.service';
import { storagePathForJsonDatasetFile } from 'src/app/utils/storage.utils';
import { PropertiesService } from '../properties/properties.service';
import { incrementedName } from 'cd-utils/string';
import { AnalyticsService } from 'src/app/services/analytics/analytics.service';
import { AnalyticsEvent, getDatasetAnalyticsName, UPLOAD_JSON_FILE } from 'cd-common/analytics';
import { createId } from 'cd-utils/guid';
import { ProjectContentService } from 'src/app/database/changes/project-content.service';
import * as actions from '../../store/actions';
import * as config from '../../components/panels/data-panel/data-panel.config';
import * as consts from 'cd-common/consts';
import * as utils from 'cd-common/utils';
import * as cd from 'cd-interfaces';
type BaseDatasetValues = [
projId: string | undefined,
dataId: string,
fileName: string,
uploadPath: string
];
@Injectable({
providedIn: 'root',
})
export class | extends AbstractOverlayControllerDirective implements OnDestroy {
private subscriptions = new Subscription();
private confirmSubscription = Subscription.EMPTY;
private _datasets: cd.ProjectDataset[] = [];
public openAddDatasetMenuTrigger$ = new BehaviorSubject(false);
public datasets$: Observable<cd.ProjectDataset[]>;
constructor(
overlayService: OverlayService,
private projectStore: Store<IProjectState>,
private toastService: ToastsService,
private dataPickerService: DataPickerService,
private uploadService: UploadService,
private rendererService: RendererService,
private propertiesService: PropertiesService,
private analyticsService: AnalyticsService,
private projectContentService: ProjectContentService
) {
super(overlayService);
this.datasets$ = this.projectContentService.datasetArray$;
this.subscriptions.add(this.datasets$.subscribe(this.onDatasetsSubscription));
}
ngOnDestroy() {
super.ngOnDestroy();
this.confirmSubscription.unsubscribe();
}
onDatasetsSubscription = (datasets: cd.ProjectDataset[]) => (this._datasets = datasets);
openAddDatasetMenu() {
this.openAddDatasetMenuTrigger$.next(true);
}
addNewDirectInputDataset = (): string | undefined => {
const [projId, dataId, fileName, uploadPath] = this._getBaseStoredDatasetValues();
if (!projId) return;
const dataset = utils.createJsonDataset(dataId, projId, fileName, uploadPath);
const jsonValue = config.DIRECT_INPUT_INITIAL_JSON;
return this._afterStoredDatasetCreated(dataId, fileName, dataset, jsonValue, uploadPath);
};
createGenericEndpointDataset(data: cd.IStringMap<any>, url: string) {
const [projId, dataId, fileName, uploadPath] = this._getBaseStoredDatasetValues();
if (!projId) return;
const dataset = utils.createGenericEndpointDataset(dataId, projId, fileName, uploadPath, url);
this._afterStoredDatasetCreated(dataId, fileName, dataset, data, uploadPath);
}
createSheetsDataset(data: cd.IStringMap<any>, sheetId: string, tabId: string) {
const fileName = `sheets-${tabId}`;
const [projId, dataId, _, uploadPath] = this._getBaseStoredDatasetValues(fileName);
if (!projId) return;
const dataset = utils.createSheetsDataset(dataId, projId, fileName, uploadPath, sheetId, tabId);
this._afterStoredDatasetCreated(dataId, fileName, dataset, data, uploadPath);
}
private _getBaseStoredDatasetValues = (providedFileName = ''): BaseDatasetValues => {
const projId = this.propertiesService.getProjectId();
const dataId = createId();
const jsonFileId = createId();
const fileName = providedFileName || this.getIncrementedDefaultDatasetName();
const uploadPath = storagePathForJsonDatasetFile(jsonFileId, fileName);
return [projId, dataId, fileName, uploadPath];
};
/**
* Sends the created dataset to the appropriate places if small enough
* NOTE: Returns the dataset's ID if successful (to be used by the addNewDirectInputDataset function)
*/
private _afterStoredDatasetCreated = (
dataId: string,
fileName: string,
dataset: cd.IStoredDataset,
data: cd.IStringMap<any>,
uploadPath: string
): string | undefined => {
const jsonFile = createJsonFile(data, fileName);
if (!this.checkDatasetSize(jsonFile)) return;
this.sendDatasetDataToRenderer(dataId, jsonFile); // send to renderer
this.dataPickerService.addDataSource(dataset, data); // add to data picker
this.dispatch(new actions.DatasetCreate([dataset as cd.ProjectDataset])); // Add to store // database
this.uploadService.uploadFile(jsonFile, uploadPath); // Upload to firebase storage
const name = getDatasetAnalyticsName(dataset.datasetType);
this.analyticsService.logEvent(AnalyticsEvent.DatasetAdded, { name });
return dataset.id;
};
createDatasetFromFile(file: File, showToast = true) {
if (!this.checkDatasetSize(file)) return;
// show uploading toast
if (showToast) this.toastService.addToast(config.UPLOADING_TOAST);
// log analytics event
this.analyticsService.logEvent(AnalyticsEvent.DatasetAdded, { name: UPLOAD_JSON_FILE });
// Generate storage path
const id = createId();
const uploadPath = storagePathForJsonDatasetFile(id, file.name);
const { onUploadComplete, onUploadError } = this;
// Upload file and resolve promise from success or error callbacks
this.uploadService.uploadFile(
file,
uploadPath,
() => onUploadComplete(uploadPath, file, showToast),
onUploadError
);
}
getDatasets() {
return this._datasets;
}
updateDataset(datasetId: string, updates: Partial<cd.ProjectDataset>) {
this.dispatch(new actions.DatasetUpdate(datasetId, updates));
}
replaceDatasetData(datasetId: string, data: string) {
const file = createJsonFile(data, config.DEFAULT_DATASET_FILENAME);
this.replaceDatasetDataFromFile(datasetId, file, false);
}
replaceDatasetDataFromFile(datasetId: string, file: File, showToast = true) {
if (!this.checkDatasetSize(file)) return;
// show uploading toast
if (showToast) this.toastService.addToast(config.UPLOADING_TOAST);
// send new data to renderer and update data picker immediately
this.sendDatasetDataToRenderer(datasetId, file);
this.dataPickerService.updateBlobDataSource(datasetId, file);
// Upload file
const id = createId();
const uploadPath = storagePathForJsonDatasetFile(id, file.name);
const { onReplaceComplete, onUploadError } = this;
this.uploadService.uploadFile(
file,
uploadPath,
() => onReplaceComplete(datasetId, uploadPath, showToast),
onUploadError
);
}
downloadDatasetData = async (dataset: cd.ProjectDataset) => {
// TODO: how would we download other dataset types
if (dataset.datasetType !== cd.DatasetType.Json) return;
const { storagePath, name } = dataset as cd.IJsonDataset;
const jsonBlob = await this.uploadService.downloadFile(storagePath);
const fileName = utils.addJsonFileExtension(name);
downloadBlobAsFile(jsonBlob, fileName);
};
duplicateDataset = async (dataset: cd.ProjectDataset) => {
const { storagePath, name } = dataset as cd.IJsonDataset;
const jsonBlob = await this.uploadService.downloadFile(storagePath);
if (!jsonBlob) return;
const fileName = this._incrementName(name);
const file = fileFromBlob(jsonBlob, fileName);
this.toastService.addToast(config.DUPLICATING_TOAST);
this.createDatasetFromFile(file, false);
};
deleteDataset(dataset: cd.ProjectDataset) {
const confirmModal = this.showModal<ConfirmationDialogComponent>(ConfirmationDialogComponent);
const { instance } = confirmModal;
const { cancel, confirm } = instance;
instance.title = config.REMOVE_DATASET_TITLE;
instance.message = config.REMOVE_DATASET_MESSAGE;
this.confirmSubscription = new Subscription();
this.confirmSubscription.add(cancel.subscribe(this.onCancelRemoveDataset));
this.confirmSubscription.add(confirm.subscribe(() => this.onConfirmRemoveDataset(dataset)));
}
/** Send updated data to Renderer */
private sendDatasetDataToRenderer(datasetId: string, dataFile: File) {
const rendererData = { [datasetId]: dataFile };
this.rendererService.addDatasetData(rendererData);
}
// eslint-disable-next-line require-await
private onUploadComplete = async (filePath: string, file: File, showToast = true) => {
const projectId = this.propertiesService.getProjectId();
if (!projectId) return;
const id = createId();
const dataset = utils.createJsonDataset(id, projectId, file.name, filePath);
this.dispatch(new actions.DatasetCreate([dataset]));
if (showToast) this.showUploadToast(config.UPLOAD_SUCCESS_MESSAGE);
// send new data to renderer and data picker immediately
this.sendDatasetDataToRenderer(id, file);
this.dataPickerService.addDataSourceWithBlobValue(dataset, file);
};
private onUploadError = (e: any) => {
console.error(e);
this.showUploadToast(config.UPLOAD_FAILED_MESSAGE);
};
private onReplaceComplete = (datasetId: string, storagePath: string, showToast = true) => {
this.dispatch(new actions.DatasetUpdate(datasetId, { storagePath }));
if (showToast) this.showUploadToast(config.REPLACE_SUCCESS_MESSAGE);
};
private onConfirmRemoveDataset = (dataset: cd.ProjectDataset) => {
// log analytics event
this.analyticsService.logEvent(AnalyticsEvent.DatasetRemoved);
this.dispatch(new actions.DatasetDelete(dataset));
};
private onCancelRemoveDataset = () => {
this.closeModal();
this.confirmSubscription.unsubscribe();
};
private dispatch(action: Action) {
this.projectStore.dispatch(action);
}
private showUploadToast(message: string) {
this.toastService.addToast({ id: config.UPLOAD_TOAST_ID, message }, config.UPLOAD_TOAST_ID);
}
private checkDatasetSize(dataBlob: Blob): boolean {
const validSize = dataBlob.size <= consts.DATASET_SIZE_LIMIT;
if (validSize) return true;
// log event
this.analyticsService.logEvent(AnalyticsEvent.DatasetSizeLimitExceeded);
this.toastService.addToast({ message: consts.DATASET_SIZE_LIMIT_ERROR });
return false;
}
private getIncrementedDefaultDatasetName = () => {
return this._incrementName(config.DEFAULT_DATASET_FILENAME);
};
private _incrementName = (name: string) => {
const currentNames = this._datasets.map((d) => d.name);
return incrementedName(name, currentNames);
};
}
| DatasetService | identifier_name |
dataset.service.ts | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Injectable, OnDestroy } from '@angular/core';
import { Action, Store } from '@ngrx/store';
import { ToastsService } from 'src/app/services/toasts/toasts.service';
import { IProjectState } from '../../store/reducers';
import {
AbstractOverlayControllerDirective,
ConfirmationDialogComponent,
DataPickerService,
OverlayService,
} from 'cd-common';
import { BehaviorSubject, Observable, Subscription } from 'rxjs';
import { downloadBlobAsFile, createJsonFile, fileFromBlob } from 'cd-utils/files';
import { UploadService } from '../upload/upload.service';
import { RendererService } from 'src/app/services/renderer/renderer.service';
import { storagePathForJsonDatasetFile } from 'src/app/utils/storage.utils';
import { PropertiesService } from '../properties/properties.service';
import { incrementedName } from 'cd-utils/string';
import { AnalyticsService } from 'src/app/services/analytics/analytics.service';
import { AnalyticsEvent, getDatasetAnalyticsName, UPLOAD_JSON_FILE } from 'cd-common/analytics';
import { createId } from 'cd-utils/guid';
import { ProjectContentService } from 'src/app/database/changes/project-content.service';
import * as actions from '../../store/actions';
import * as config from '../../components/panels/data-panel/data-panel.config';
import * as consts from 'cd-common/consts';
import * as utils from 'cd-common/utils';
import * as cd from 'cd-interfaces';
type BaseDatasetValues = [
projId: string | undefined,
dataId: string,
fileName: string,
uploadPath: string
];
@Injectable({
providedIn: 'root',
})
export class DatasetService extends AbstractOverlayControllerDirective implements OnDestroy {
private subscriptions = new Subscription();
private confirmSubscription = Subscription.EMPTY;
private _datasets: cd.ProjectDataset[] = [];
public openAddDatasetMenuTrigger$ = new BehaviorSubject(false);
public datasets$: Observable<cd.ProjectDataset[]>;
constructor(
overlayService: OverlayService,
private projectStore: Store<IProjectState>,
private toastService: ToastsService,
private dataPickerService: DataPickerService,
private uploadService: UploadService,
private rendererService: RendererService,
private propertiesService: PropertiesService,
private analyticsService: AnalyticsService,
private projectContentService: ProjectContentService
) {
super(overlayService);
this.datasets$ = this.projectContentService.datasetArray$;
this.subscriptions.add(this.datasets$.subscribe(this.onDatasetsSubscription));
}
ngOnDestroy() {
super.ngOnDestroy();
this.confirmSubscription.unsubscribe();
}
onDatasetsSubscription = (datasets: cd.ProjectDataset[]) => (this._datasets = datasets);
openAddDatasetMenu() {
this.openAddDatasetMenuTrigger$.next(true);
}
addNewDirectInputDataset = (): string | undefined => {
const [projId, dataId, fileName, uploadPath] = this._getBaseStoredDatasetValues();
if (!projId) return;
const dataset = utils.createJsonDataset(dataId, projId, fileName, uploadPath);
const jsonValue = config.DIRECT_INPUT_INITIAL_JSON;
return this._afterStoredDatasetCreated(dataId, fileName, dataset, jsonValue, uploadPath);
};
createGenericEndpointDataset(data: cd.IStringMap<any>, url: string) {
const [projId, dataId, fileName, uploadPath] = this._getBaseStoredDatasetValues();
if (!projId) return;
const dataset = utils.createGenericEndpointDataset(dataId, projId, fileName, uploadPath, url);
this._afterStoredDatasetCreated(dataId, fileName, dataset, data, uploadPath);
}
createSheetsDataset(data: cd.IStringMap<any>, sheetId: string, tabId: string) |
private _getBaseStoredDatasetValues = (providedFileName = ''): BaseDatasetValues => {
const projId = this.propertiesService.getProjectId();
const dataId = createId();
const jsonFileId = createId();
const fileName = providedFileName || this.getIncrementedDefaultDatasetName();
const uploadPath = storagePathForJsonDatasetFile(jsonFileId, fileName);
return [projId, dataId, fileName, uploadPath];
};
/**
* Sends the created dataset to the appropriate places if small enough
* NOTE: Returns the dataset's ID if successful (to be used by the addNewDirectInputDataset function)
*/
private _afterStoredDatasetCreated = (
dataId: string,
fileName: string,
dataset: cd.IStoredDataset,
data: cd.IStringMap<any>,
uploadPath: string
): string | undefined => {
const jsonFile = createJsonFile(data, fileName);
if (!this.checkDatasetSize(jsonFile)) return;
this.sendDatasetDataToRenderer(dataId, jsonFile); // send to renderer
this.dataPickerService.addDataSource(dataset, data); // add to data picker
this.dispatch(new actions.DatasetCreate([dataset as cd.ProjectDataset])); // Add to store // database
this.uploadService.uploadFile(jsonFile, uploadPath); // Upload to firebase storage
const name = getDatasetAnalyticsName(dataset.datasetType);
this.analyticsService.logEvent(AnalyticsEvent.DatasetAdded, { name });
return dataset.id;
};
createDatasetFromFile(file: File, showToast = true) {
if (!this.checkDatasetSize(file)) return;
// show uploading toast
if (showToast) this.toastService.addToast(config.UPLOADING_TOAST);
// log analytics event
this.analyticsService.logEvent(AnalyticsEvent.DatasetAdded, { name: UPLOAD_JSON_FILE });
// Generate storage path
const id = createId();
const uploadPath = storagePathForJsonDatasetFile(id, file.name);
const { onUploadComplete, onUploadError } = this;
// Upload file and resolve promise from success or error callbacks
this.uploadService.uploadFile(
file,
uploadPath,
() => onUploadComplete(uploadPath, file, showToast),
onUploadError
);
}
getDatasets() {
return this._datasets;
}
updateDataset(datasetId: string, updates: Partial<cd.ProjectDataset>) {
this.dispatch(new actions.DatasetUpdate(datasetId, updates));
}
replaceDatasetData(datasetId: string, data: string) {
const file = createJsonFile(data, config.DEFAULT_DATASET_FILENAME);
this.replaceDatasetDataFromFile(datasetId, file, false);
}
replaceDatasetDataFromFile(datasetId: string, file: File, showToast = true) {
if (!this.checkDatasetSize(file)) return;
// show uploading toast
if (showToast) this.toastService.addToast(config.UPLOADING_TOAST);
// send new data to renderer and update data picker immediately
this.sendDatasetDataToRenderer(datasetId, file);
this.dataPickerService.updateBlobDataSource(datasetId, file);
// Upload file
const id = createId();
const uploadPath = storagePathForJsonDatasetFile(id, file.name);
const { onReplaceComplete, onUploadError } = this;
this.uploadService.uploadFile(
file,
uploadPath,
() => onReplaceComplete(datasetId, uploadPath, showToast),
onUploadError
);
}
downloadDatasetData = async (dataset: cd.ProjectDataset) => {
// TODO: how would we download other dataset types
if (dataset.datasetType !== cd.DatasetType.Json) return;
const { storagePath, name } = dataset as cd.IJsonDataset;
const jsonBlob = await this.uploadService.downloadFile(storagePath);
const fileName = utils.addJsonFileExtension(name);
downloadBlobAsFile(jsonBlob, fileName);
};
duplicateDataset = async (dataset: cd.ProjectDataset) => {
const { storagePath, name } = dataset as cd.IJsonDataset;
const jsonBlob = await this.uploadService.downloadFile(storagePath);
if (!jsonBlob) return;
const fileName = this._incrementName(name);
const file = fileFromBlob(jsonBlob, fileName);
this.toastService.addToast(config.DUPLICATING_TOAST);
this.createDatasetFromFile(file, false);
};
deleteDataset(dataset: cd.ProjectDataset) {
const confirmModal = this.showModal<ConfirmationDialogComponent>(ConfirmationDialogComponent);
const { instance } = confirmModal;
const { cancel, confirm } = instance;
instance.title = config.REMOVE_DATASET_TITLE;
instance.message = config.REMOVE_DATASET_MESSAGE;
this.confirmSubscription = new Subscription();
this.confirmSubscription.add(cancel.subscribe(this.onCancelRemoveDataset));
this.confirmSubscription.add(confirm.subscribe(() => this.onConfirmRemoveDataset(dataset)));
}
/** Send updated data to Renderer */
private sendDatasetDataToRenderer(datasetId: string, dataFile: File) {
const rendererData = { [datasetId]: dataFile };
this.rendererService.addDatasetData(rendererData);
}
// eslint-disable-next-line require-await
private onUploadComplete = async (filePath: string, file: File, showToast = true) => {
const projectId = this.propertiesService.getProjectId();
if (!projectId) return;
const id = createId();
const dataset = utils.createJsonDataset(id, projectId, file.name, filePath);
this.dispatch(new actions.DatasetCreate([dataset]));
if (showToast) this.showUploadToast(config.UPLOAD_SUCCESS_MESSAGE);
// send new data to renderer and data picker immediately
this.sendDatasetDataToRenderer(id, file);
this.dataPickerService.addDataSourceWithBlobValue(dataset, file);
};
private onUploadError = (e: any) => {
console.error(e);
this.showUploadToast(config.UPLOAD_FAILED_MESSAGE);
};
private onReplaceComplete = (datasetId: string, storagePath: string, showToast = true) => {
this.dispatch(new actions.DatasetUpdate(datasetId, { storagePath }));
if (showToast) this.showUploadToast(config.REPLACE_SUCCESS_MESSAGE);
};
private onConfirmRemoveDataset = (dataset: cd.ProjectDataset) => {
// log analytics event
this.analyticsService.logEvent(AnalyticsEvent.DatasetRemoved);
this.dispatch(new actions.DatasetDelete(dataset));
};
private onCancelRemoveDataset = () => {
this.closeModal();
this.confirmSubscription.unsubscribe();
};
private dispatch(action: Action) {
this.projectStore.dispatch(action);
}
private showUploadToast(message: string) {
this.toastService.addToast({ id: config.UPLOAD_TOAST_ID, message }, config.UPLOAD_TOAST_ID);
}
private checkDatasetSize(dataBlob: Blob): boolean {
const validSize = dataBlob.size <= consts.DATASET_SIZE_LIMIT;
if (validSize) return true;
// log event
this.analyticsService.logEvent(AnalyticsEvent.DatasetSizeLimitExceeded);
this.toastService.addToast({ message: consts.DATASET_SIZE_LIMIT_ERROR });
return false;
}
private getIncrementedDefaultDatasetName = () => {
return this._incrementName(config.DEFAULT_DATASET_FILENAME);
};
private _incrementName = (name: string) => {
const currentNames = this._datasets.map((d) => d.name);
return incrementedName(name, currentNames);
};
}
| {
const fileName = `sheets-${tabId}`;
const [projId, dataId, _, uploadPath] = this._getBaseStoredDatasetValues(fileName);
if (!projId) return;
const dataset = utils.createSheetsDataset(dataId, projId, fileName, uploadPath, sheetId, tabId);
this._afterStoredDatasetCreated(dataId, fileName, dataset, data, uploadPath);
} | identifier_body |
async-data.js | import { resolverForGivenFunction, dataObjBuilder, metaFunctionBuilder } from './core.js'
import { dataDefaults } from './defaults.js'
export default function AsyncDataMixinBuilder(dataGlobalDefaults, meta) {
const metaRefresh = metaFunctionBuilder('refresh', meta)
const metaLoading = metaFunctionBuilder('loading', meta)
const metaError = metaFunctionBuilder('error', meta)
const metaDefault = metaFunctionBuilder('default', meta)
const metaMore = metaFunctionBuilder('more', meta)
const metaReset = metaFunctionBuilder('reset', meta)
const metas = { metaLoading, metaError, metaDefault, metaReset }
return {
beforeCreate() {
let properties = this.$options.asyncData || {}
let methods = this.$options.methods = this.$options.methods || {}
for (const [propName, prop] of Object.entries(properties)) {
const opt = dataDefaults(prop, dataGlobalDefaults)
if (!opt.get)
throw `An asyncData was created without a get method: ${opt}`
methods[metaRefresh(propName)] = resolverForGivenFunction.call(this, propName, metas, opt.get, opt.default, opt.transform, opt.error)
// load more stuff |
// for all non lazy properties, call refresh methods
beforeMount() {
const properties = this.$options.asyncData || {}
for (const [propName, prop] of Object.entries(properties)) {
const opt = dataDefaults(prop, dataGlobalDefaults)
if (!opt.lazy) {
this[metaRefresh(propName)]()
}
}
},
data() {
return dataObjBuilder(this.$options.asyncData, metas, false)
}
}} | if (opt.more) {
methods[metaMore(propName)] = resolverForGivenFunction.call(this, propName, metas, opt.more.get, opt.default, opt.transform, opt.error, opt.more.concat)
}
}
}, | random_line_split |
async-data.js | import { resolverForGivenFunction, dataObjBuilder, metaFunctionBuilder } from './core.js'
import { dataDefaults } from './defaults.js'
export default function AsyncDataMixinBuilder(dataGlobalDefaults, meta) {
const metaRefresh = metaFunctionBuilder('refresh', meta)
const metaLoading = metaFunctionBuilder('loading', meta)
const metaError = metaFunctionBuilder('error', meta)
const metaDefault = metaFunctionBuilder('default', meta)
const metaMore = metaFunctionBuilder('more', meta)
const metaReset = metaFunctionBuilder('reset', meta)
const metas = { metaLoading, metaError, metaDefault, metaReset }
return {
beforeCreate() | ,
// for all non lazy properties, call refresh methods
beforeMount() {
const properties = this.$options.asyncData || {}
for (const [propName, prop] of Object.entries(properties)) {
const opt = dataDefaults(prop, dataGlobalDefaults)
if (!opt.lazy) {
this[metaRefresh(propName)]()
}
}
},
data() {
return dataObjBuilder(this.$options.asyncData, metas, false)
}
}}
| {
let properties = this.$options.asyncData || {}
let methods = this.$options.methods = this.$options.methods || {}
for (const [propName, prop] of Object.entries(properties)) {
const opt = dataDefaults(prop, dataGlobalDefaults)
if (!opt.get)
throw `An asyncData was created without a get method: ${opt}`
methods[metaRefresh(propName)] = resolverForGivenFunction.call(this, propName, metas, opt.get, opt.default, opt.transform, opt.error)
// load more stuff
if (opt.more) {
methods[metaMore(propName)] = resolverForGivenFunction.call(this, propName, metas, opt.more.get, opt.default, opt.transform, opt.error, opt.more.concat)
}
}
} | identifier_body |
async-data.js | import { resolverForGivenFunction, dataObjBuilder, metaFunctionBuilder } from './core.js'
import { dataDefaults } from './defaults.js'
export default function AsyncDataMixinBuilder(dataGlobalDefaults, meta) {
const metaRefresh = metaFunctionBuilder('refresh', meta)
const metaLoading = metaFunctionBuilder('loading', meta)
const metaError = metaFunctionBuilder('error', meta)
const metaDefault = metaFunctionBuilder('default', meta)
const metaMore = metaFunctionBuilder('more', meta)
const metaReset = metaFunctionBuilder('reset', meta)
const metas = { metaLoading, metaError, metaDefault, metaReset }
return {
beforeCreate() {
let properties = this.$options.asyncData || {}
let methods = this.$options.methods = this.$options.methods || {}
for (const [propName, prop] of Object.entries(properties)) {
const opt = dataDefaults(prop, dataGlobalDefaults)
if (!opt.get)
throw `An asyncData was created without a get method: ${opt}`
methods[metaRefresh(propName)] = resolverForGivenFunction.call(this, propName, metas, opt.get, opt.default, opt.transform, opt.error)
// load more stuff
if (opt.more) {
methods[metaMore(propName)] = resolverForGivenFunction.call(this, propName, metas, opt.more.get, opt.default, opt.transform, opt.error, opt.more.concat)
}
}
},
// for all non lazy properties, call refresh methods
beforeMount() {
const properties = this.$options.asyncData || {}
for (const [propName, prop] of Object.entries(properties)) {
const opt = dataDefaults(prop, dataGlobalDefaults)
if (!opt.lazy) {
this[metaRefresh(propName)]()
}
}
},
| () {
return dataObjBuilder(this.$options.asyncData, metas, false)
}
}}
| data | identifier_name |
__openerp__.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Lunch Orders',
'author': 'OpenERP SA',
'version': '0.2',
'depends': ['base', 'report'],
'category' : 'Tools',
'summary': 'Lunch Order, Meal, Food',
'description': """
The base module to manage lunch.
================================
Many companies order sandwiches, pizzas and other, from usual suppliers, for their employees to offer them more facilities.
However lunches management within the company requires proper administration especially when the number of employees or suppliers is important.
The “Lunch Order” module has been developed to make this management easier but also to offer employees more tools and usability. | """,
'data': [
'security/lunch_security.xml',
'lunch_view.xml',
'wizard/lunch_order_view.xml',
'wizard/lunch_validation_view.xml',
'wizard/lunch_cancel_view.xml',
'lunch_report.xml',
'report/report_lunch_order_view.xml',
'security/ir.model.access.csv',
'views/report_lunchorder.xml',
'views/lunch.xml',
],
'images': ['images/new_order.jpeg','images/lunch_account.jpeg','images/order_by_supplier_analysis.jpeg','images/alert.jpeg'],
'demo': ['lunch_demo.xml',],
'installable': True,
'application' : True,
'certificate' : '001292377792581874189',
'images': [],
} |
In addition to a full meal and supplier management, this module offers the possibility to display warning and provides quick order selection based on employee’s preferences.
If you want to save your employees' time and avoid them to always have coins in their pockets, this module is essential. | random_line_split |
lib.rs | //! Provides an interface for reading input from a *Nintendo GameCube Controller Adapter for Wii U*
//! USB device.
//!
//! Third party clones such as the 4-port Mayflash adapter in "Wii U mode" are also supported.
//!
//! This library depends on `libusb`, which is available as a dynamic library on many platforms
//! including Linux, Windows, and Mac OS X.
//!
//! Currently, rumble commands are **unimplemented**.
//!
//! # Usage
//!
//! ```norun
//! extern crate gcnctrlusb;
//!
//! fn main() {
//! // Panics if `libusb` is not found or otherwise fails.
//! let mut scanner = gcnctrlusb::Scanner::new().unwrap();
//! // Panics if a valid device was not found.
//! let mut adapter = scanner.find_adapter().unwrap().unwrap();
//! // Panics if the USB driver fails to open a connection to the device.
//! let mut listener = adapter.listen().unwrap();
//!
//! while let Ok(controllers) = listener.read() {
//! println!("Controller port 1: {:?}", controllers[0]);
//! }
//! }
//! ```
extern crate libusb;
use libusb::{Context, Device, DeviceHandle};
use std::error::Error as StdError;
use std::fmt::Error as FmtError;
use std::fmt::{Display, Formatter};
use std::time::Duration;
const VENDOR_ID: u16 = 0x057e;
const PRODUCT_ID: u16 = 0x0337;
/// Searches for GameCube controller adapter USB devices.
pub struct Scanner {
context: Context,
}
impl Scanner {
/// Initializes USB driver connectivity and returns a `Scanner` instance.
///
/// An error is returned if `libusb` is not loaded or driver initialization otherwise fails.
pub fn new() -> Result<Scanner, Error> {
Ok(Scanner { context: try!(Context::new()) })
}
/// Returns the first adapter found, or `None` if no adapter was found.
pub fn find_adapter<'a>(&'a mut self) -> Result<Option<Adapter<'a>>, Error> {
for mut device in try!(self.context.devices()).iter() {
let desc = try!(device.device_descriptor());
if desc.vendor_id() == VENDOR_ID && desc.product_id() == PRODUCT_ID {
return Ok(Some(Adapter { device: device }));
}
}
Ok(None)
}
}
/// A wrapper around the unopened USB device.
pub struct Adapter<'a> {
device: Device<'a>,
}
impl<'a> Adapter<'a> {
/// Opens the USB device and initializes the hardware for reading controller data.
///
/// If the device is inaccessible or unrecognizable, an error is returned. For example, the
/// device will be inaccessible if a previous `Listener` for this adapter is still alive.
pub fn listen(&mut self) -> Result<Listener<'a>, Error> {
let mut handle = try!(self.device.open());
let config = try!(self.device.config_descriptor(0));
let mut interface_descriptor: Option<_> = None;
let mut endpoint_in = None;
let mut endpoint_out = None;
for interface in config.interfaces() {
interface_descriptor = None;
endpoint_in = None;
endpoint_out = None;
for desc in interface.descriptors() {
for endpoint in desc.endpoint_descriptors() {
match endpoint.direction() {
libusb::Direction::In => endpoint_in = Some(endpoint.address()),
libusb::Direction::Out => endpoint_out = Some(endpoint.address()),
}
}
interface_descriptor = Some(desc);
}
}
if interface_descriptor.is_none() || endpoint_in.is_none() || endpoint_out.is_none() {
return Err(Error::UnrecognizedProtocol);
}
let interface_descriptor = interface_descriptor.unwrap();
let interface_number = interface_descriptor.interface_number();
let has_kernel_driver = match handle.kernel_driver_active(interface_number) {
Ok(true) => {
try!(handle.detach_kernel_driver(interface_number));
true
},
_ => false,
};
try!(handle.set_active_configuration(config.number()));
try!(handle.claim_interface(interface_number));
let setting = interface_descriptor.setting_number();
try!(handle.set_alternate_setting(interface_number, setting));
// Tell the adapter to start sending packets.
let timeout = Duration::from_secs(1);
try!(handle.write_interrupt(endpoint_out.unwrap(), &[0x13], timeout));
Ok(Listener {
handle: handle,
buffer: [0; 37],
has_kernel_driver: has_kernel_driver,
interface: interface_number,
endpoint_in: endpoint_in.unwrap(),
})
}
}
/// An interface that reads packets of controller data on each iteration.
///
/// This interface owns an opened handle to the USB device that is closed once the `Listener`
/// instance is dropped.
pub struct Listener<'a> {
handle: DeviceHandle<'a>,
buffer: [u8; 37],
has_kernel_driver: bool,
interface: u8,
endpoint_in: u8,
}
impl<'a> Listener<'a> {
/// Reads a data packet and returns the states for each of the four possibly connected
/// controllers.
///
/// If reading a single packet takes over 1 second, a timeout error with occur. In testing,
/// these packets are available at over 100 times per second.
///
/// Reasons an error may occur include:
///
/// * The USB device becomes disconnected
/// * The USB driver throws an error, fatal or not
/// * A USB message was successfully read, but it was not the right size
///
/// It is wise to treat all errors returned as fatal, and to reestablish the adapter connection
/// through `Scanner::find_adapter`.
pub fn read(&mut self) -> Result<[Option<Controller>; 4], Error> {
let timeout = Duration::from_secs(1);
match self.handle.read_interrupt(self.endpoint_in, &mut self.buffer, timeout) {
Ok(read) if read == 37 => Ok(Controller::parse_packet(&self.buffer)),
Ok(_) => Err(Error::InvalidPacket),
Err(err) => Err(Error::Usb(err)),
}
}
}
impl<'a> Drop for Listener<'a> {
fn drop(&mut self) {
if self.has_kernel_driver {
let _ = self.handle.attach_kernel_driver(self.interface);
}
}
}
/// The state of a GameCube controller at a given moment in time.
///
/// Note that the hardware will likely never report either extreme of the spectrum for any of the
/// analog inputs. For example, all `u8` fields may report only within the range of `30` to `225`.
/// Also, the hardware will likely never report a perfect `127` for the resting position of any of
/// the joystick axes. Keep in my that this library does not do any analog dead zone correction.
#[derive(Clone, Copy, Debug)]
pub struct Controller {
/// The classification of this controller.
pub kind: ControllerKind,
/// "A" button status.
pub a: bool,
/// "B" button status.
pub b: bool,
/// "X" button status.
pub x: bool,
/// "Y" button status.
pub y: bool,
/// Directional pad up button status.
pub up: bool,
/// Directional pad down button status.
pub down: bool,
/// Directional pad left button status.
pub left: bool,
/// Directional pad right button status.
pub right: bool,
/// Digital "L" button (full depression) status.
pub l: bool,
/// Digital "R" button (full depression) status.
pub r: bool,
/// The level of depression of the analog "L" button, `0` being completely up, `255` being
/// completely pressed in.
pub l_analog: u8,
/// The level of depression of the analog "R" button, `0` being completely up, `255` being
/// completely pressed in.
pub r_analog: u8,
/// "Z" button status.
pub z: bool,
/// Start button status.
pub start: bool,
/// The x-axis position of the primary analog joystick, `0` being completely left, `255` being
/// completely right.
pub stick_x: u8,
/// The y-axis position of the primary analog joystick, `0` being completely down, `255` being
/// completely up.
pub stick_y: u8,
/// The x-axis position of the secondary ("C") analog joystick, `0` being completely left,
/// `255` being completely right.
pub c_stick_x: u8,
/// The y-axis position of the secondary ("C") analog joystick, `0` being completely down,
/// `255` being completely up.
pub c_stick_y: u8,
}
impl Controller {
// # Panics
//
// Panics if `data` is not at least 9 bytes.
fn parse(data: &[u8]) -> Option<Controller> {
let kind = match data[0] >> 4 {
0 => return None,
1 => ControllerKind::Wired,
2 => ControllerKind::Wireless,
_ => ControllerKind::Unknown,
};
Some(Controller {
kind: kind, | right: data[1] & (1 << 5) != 0,
down: data[1] & (1 << 6) != 0,
up: data[1] & (1 << 7) != 0,
start: data[2] & (1 << 0) != 0,
z: data[2] & (1 << 1) != 0,
r: data[2] & (1 << 2) != 0,
l: data[2] & (1 << 3) != 0,
stick_x: data[3],
stick_y: data[4],
c_stick_x: data[5],
c_stick_y: data[6],
l_analog: data[7],
r_analog: data[8],
})
}
// # Panics
//
// Panics if `data` is not at least 37 bytes.
fn parse_packet(data: &[u8]) -> [Option<Controller>; 4] {
[
Controller::parse(&data[1..10]),
Controller::parse(&data[10..19]),
Controller::parse(&data[19..28]),
Controller::parse(&data[28..37])
]
}
}
/// The classification of a GameCube controller.
#[derive(Clone, Copy, Debug)]
pub enum ControllerKind {
/// The controller is wired and likely supports rumble.
Wired,
/// The controller is wireless and likely does not supports rumble.
Wireless,
/// The controller is of an unknown type.
Unknown,
}
/// An error that occurs during usage of this library.
#[derive(Debug)]
pub enum Error {
/// A USB driver error that can occur at any time while utilizing this library.
Usb(libusb::Error),
/// A seemingly valid adapter was found, but its communication protocol could not be resolved.
UnrecognizedProtocol,
/// An invalid message was read from the adapter, likely due to a device or driver failure.
InvalidPacket,
}
impl StdError for Error {
fn description(&self) -> &str {
match *self {
Error::Usb(ref err) => err.description(),
Error::UnrecognizedProtocol => "USB adapter protocol unrecognized",
Error::InvalidPacket => "Invalid data packet received",
}
}
fn cause(&self) -> Option<&StdError> {
match *self {
Error::Usb(ref err) => err.cause(),
_ => None,
}
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
match *self {
Error::Usb(ref err) => Display::fmt(err, f),
_ => self.description().fmt(f),
}
}
}
impl From<libusb::Error> for Error {
fn from(err: libusb::Error) -> Error {
Error::Usb(err)
}
} | a: data[1] & (1 << 0) != 0,
b: data[1] & (1 << 1) != 0,
x: data[1] & (1 << 2) != 0,
y: data[1] & (1 << 3) != 0,
left: data[1] & (1 << 4) != 0, | random_line_split |
lib.rs | //! Provides an interface for reading input from a *Nintendo GameCube Controller Adapter for Wii U*
//! USB device.
//!
//! Third party clones such as the 4-port Mayflash adapter in "Wii U mode" are also supported.
//!
//! This library depends on `libusb`, which is available as a dynamic library on many platforms
//! including Linux, Windows, and Mac OS X.
//!
//! Currently, rumble commands are **unimplemented**.
//!
//! # Usage
//!
//! ```norun
//! extern crate gcnctrlusb;
//!
//! fn main() {
//! // Panics if `libusb` is not found or otherwise fails.
//! let mut scanner = gcnctrlusb::Scanner::new().unwrap();
//! // Panics if a valid device was not found.
//! let mut adapter = scanner.find_adapter().unwrap().unwrap();
//! // Panics if the USB driver fails to open a connection to the device.
//! let mut listener = adapter.listen().unwrap();
//!
//! while let Ok(controllers) = listener.read() {
//! println!("Controller port 1: {:?}", controllers[0]);
//! }
//! }
//! ```
extern crate libusb;
use libusb::{Context, Device, DeviceHandle};
use std::error::Error as StdError;
use std::fmt::Error as FmtError;
use std::fmt::{Display, Formatter};
use std::time::Duration;
const VENDOR_ID: u16 = 0x057e;
const PRODUCT_ID: u16 = 0x0337;
/// Searches for GameCube controller adapter USB devices.
pub struct Scanner {
context: Context,
}
impl Scanner {
/// Initializes USB driver connectivity and returns a `Scanner` instance.
///
/// An error is returned if `libusb` is not loaded or driver initialization otherwise fails.
pub fn new() -> Result<Scanner, Error> {
Ok(Scanner { context: try!(Context::new()) })
}
/// Returns the first adapter found, or `None` if no adapter was found.
pub fn find_adapter<'a>(&'a mut self) -> Result<Option<Adapter<'a>>, Error> {
for mut device in try!(self.context.devices()).iter() {
let desc = try!(device.device_descriptor());
if desc.vendor_id() == VENDOR_ID && desc.product_id() == PRODUCT_ID {
return Ok(Some(Adapter { device: device }));
}
}
Ok(None)
}
}
/// A wrapper around the unopened USB device.
pub struct Adapter<'a> {
device: Device<'a>,
}
impl<'a> Adapter<'a> {
/// Opens the USB device and initializes the hardware for reading controller data.
///
/// If the device is inaccessible or unrecognizable, an error is returned. For example, the
/// device will be inaccessible if a previous `Listener` for this adapter is still alive.
pub fn listen(&mut self) -> Result<Listener<'a>, Error> {
let mut handle = try!(self.device.open());
let config = try!(self.device.config_descriptor(0));
let mut interface_descriptor: Option<_> = None;
let mut endpoint_in = None;
let mut endpoint_out = None;
for interface in config.interfaces() {
interface_descriptor = None;
endpoint_in = None;
endpoint_out = None;
for desc in interface.descriptors() {
for endpoint in desc.endpoint_descriptors() {
match endpoint.direction() {
libusb::Direction::In => endpoint_in = Some(endpoint.address()),
libusb::Direction::Out => endpoint_out = Some(endpoint.address()),
}
}
interface_descriptor = Some(desc);
}
}
if interface_descriptor.is_none() || endpoint_in.is_none() || endpoint_out.is_none() {
return Err(Error::UnrecognizedProtocol);
}
let interface_descriptor = interface_descriptor.unwrap();
let interface_number = interface_descriptor.interface_number();
let has_kernel_driver = match handle.kernel_driver_active(interface_number) {
Ok(true) => {
try!(handle.detach_kernel_driver(interface_number));
true
},
_ => false,
};
try!(handle.set_active_configuration(config.number()));
try!(handle.claim_interface(interface_number));
let setting = interface_descriptor.setting_number();
try!(handle.set_alternate_setting(interface_number, setting));
// Tell the adapter to start sending packets.
let timeout = Duration::from_secs(1);
try!(handle.write_interrupt(endpoint_out.unwrap(), &[0x13], timeout));
Ok(Listener {
handle: handle,
buffer: [0; 37],
has_kernel_driver: has_kernel_driver,
interface: interface_number,
endpoint_in: endpoint_in.unwrap(),
})
}
}
/// An interface that reads packets of controller data on each iteration.
///
/// This interface owns an opened handle to the USB device that is closed once the `Listener`
/// instance is dropped.
pub struct | <'a> {
handle: DeviceHandle<'a>,
buffer: [u8; 37],
has_kernel_driver: bool,
interface: u8,
endpoint_in: u8,
}
impl<'a> Listener<'a> {
/// Reads a data packet and returns the states for each of the four possibly connected
/// controllers.
///
/// If reading a single packet takes over 1 second, a timeout error with occur. In testing,
/// these packets are available at over 100 times per second.
///
/// Reasons an error may occur include:
///
/// * The USB device becomes disconnected
/// * The USB driver throws an error, fatal or not
/// * A USB message was successfully read, but it was not the right size
///
/// It is wise to treat all errors returned as fatal, and to reestablish the adapter connection
/// through `Scanner::find_adapter`.
pub fn read(&mut self) -> Result<[Option<Controller>; 4], Error> {
let timeout = Duration::from_secs(1);
match self.handle.read_interrupt(self.endpoint_in, &mut self.buffer, timeout) {
Ok(read) if read == 37 => Ok(Controller::parse_packet(&self.buffer)),
Ok(_) => Err(Error::InvalidPacket),
Err(err) => Err(Error::Usb(err)),
}
}
}
impl<'a> Drop for Listener<'a> {
fn drop(&mut self) {
if self.has_kernel_driver {
let _ = self.handle.attach_kernel_driver(self.interface);
}
}
}
/// The state of a GameCube controller at a given moment in time.
///
/// Note that the hardware will likely never report either extreme of the spectrum for any of the
/// analog inputs. For example, all `u8` fields may report only within the range of `30` to `225`.
/// Also, the hardware will likely never report a perfect `127` for the resting position of any of
/// the joystick axes. Keep in my that this library does not do any analog dead zone correction.
#[derive(Clone, Copy, Debug)]
pub struct Controller {
/// The classification of this controller.
pub kind: ControllerKind,
/// "A" button status.
pub a: bool,
/// "B" button status.
pub b: bool,
/// "X" button status.
pub x: bool,
/// "Y" button status.
pub y: bool,
/// Directional pad up button status.
pub up: bool,
/// Directional pad down button status.
pub down: bool,
/// Directional pad left button status.
pub left: bool,
/// Directional pad right button status.
pub right: bool,
/// Digital "L" button (full depression) status.
pub l: bool,
/// Digital "R" button (full depression) status.
pub r: bool,
/// The level of depression of the analog "L" button, `0` being completely up, `255` being
/// completely pressed in.
pub l_analog: u8,
/// The level of depression of the analog "R" button, `0` being completely up, `255` being
/// completely pressed in.
pub r_analog: u8,
/// "Z" button status.
pub z: bool,
/// Start button status.
pub start: bool,
/// The x-axis position of the primary analog joystick, `0` being completely left, `255` being
/// completely right.
pub stick_x: u8,
/// The y-axis position of the primary analog joystick, `0` being completely down, `255` being
/// completely up.
pub stick_y: u8,
/// The x-axis position of the secondary ("C") analog joystick, `0` being completely left,
/// `255` being completely right.
pub c_stick_x: u8,
/// The y-axis position of the secondary ("C") analog joystick, `0` being completely down,
/// `255` being completely up.
pub c_stick_y: u8,
}
impl Controller {
// # Panics
//
// Panics if `data` is not at least 9 bytes.
fn parse(data: &[u8]) -> Option<Controller> {
let kind = match data[0] >> 4 {
0 => return None,
1 => ControllerKind::Wired,
2 => ControllerKind::Wireless,
_ => ControllerKind::Unknown,
};
Some(Controller {
kind: kind,
a: data[1] & (1 << 0) != 0,
b: data[1] & (1 << 1) != 0,
x: data[1] & (1 << 2) != 0,
y: data[1] & (1 << 3) != 0,
left: data[1] & (1 << 4) != 0,
right: data[1] & (1 << 5) != 0,
down: data[1] & (1 << 6) != 0,
up: data[1] & (1 << 7) != 0,
start: data[2] & (1 << 0) != 0,
z: data[2] & (1 << 1) != 0,
r: data[2] & (1 << 2) != 0,
l: data[2] & (1 << 3) != 0,
stick_x: data[3],
stick_y: data[4],
c_stick_x: data[5],
c_stick_y: data[6],
l_analog: data[7],
r_analog: data[8],
})
}
// # Panics
//
// Panics if `data` is not at least 37 bytes.
fn parse_packet(data: &[u8]) -> [Option<Controller>; 4] {
[
Controller::parse(&data[1..10]),
Controller::parse(&data[10..19]),
Controller::parse(&data[19..28]),
Controller::parse(&data[28..37])
]
}
}
/// The classification of a GameCube controller.
#[derive(Clone, Copy, Debug)]
pub enum ControllerKind {
/// The controller is wired and likely supports rumble.
Wired,
/// The controller is wireless and likely does not supports rumble.
Wireless,
/// The controller is of an unknown type.
Unknown,
}
/// An error that occurs during usage of this library.
#[derive(Debug)]
pub enum Error {
/// A USB driver error that can occur at any time while utilizing this library.
Usb(libusb::Error),
/// A seemingly valid adapter was found, but its communication protocol could not be resolved.
UnrecognizedProtocol,
/// An invalid message was read from the adapter, likely due to a device or driver failure.
InvalidPacket,
}
impl StdError for Error {
fn description(&self) -> &str {
match *self {
Error::Usb(ref err) => err.description(),
Error::UnrecognizedProtocol => "USB adapter protocol unrecognized",
Error::InvalidPacket => "Invalid data packet received",
}
}
fn cause(&self) -> Option<&StdError> {
match *self {
Error::Usb(ref err) => err.cause(),
_ => None,
}
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
match *self {
Error::Usb(ref err) => Display::fmt(err, f),
_ => self.description().fmt(f),
}
}
}
impl From<libusb::Error> for Error {
fn from(err: libusb::Error) -> Error {
Error::Usb(err)
}
}
| Listener | identifier_name |
lib.rs | //! Provides an interface for reading input from a *Nintendo GameCube Controller Adapter for Wii U*
//! USB device.
//!
//! Third party clones such as the 4-port Mayflash adapter in "Wii U mode" are also supported.
//!
//! This library depends on `libusb`, which is available as a dynamic library on many platforms
//! including Linux, Windows, and Mac OS X.
//!
//! Currently, rumble commands are **unimplemented**.
//!
//! # Usage
//!
//! ```norun
//! extern crate gcnctrlusb;
//!
//! fn main() {
//! // Panics if `libusb` is not found or otherwise fails.
//! let mut scanner = gcnctrlusb::Scanner::new().unwrap();
//! // Panics if a valid device was not found.
//! let mut adapter = scanner.find_adapter().unwrap().unwrap();
//! // Panics if the USB driver fails to open a connection to the device.
//! let mut listener = adapter.listen().unwrap();
//!
//! while let Ok(controllers) = listener.read() {
//! println!("Controller port 1: {:?}", controllers[0]);
//! }
//! }
//! ```
extern crate libusb;
use libusb::{Context, Device, DeviceHandle};
use std::error::Error as StdError;
use std::fmt::Error as FmtError;
use std::fmt::{Display, Formatter};
use std::time::Duration;
const VENDOR_ID: u16 = 0x057e;
const PRODUCT_ID: u16 = 0x0337;
/// Searches for GameCube controller adapter USB devices.
pub struct Scanner {
context: Context,
}
impl Scanner {
/// Initializes USB driver connectivity and returns a `Scanner` instance.
///
/// An error is returned if `libusb` is not loaded or driver initialization otherwise fails.
pub fn new() -> Result<Scanner, Error> |
/// Returns the first adapter found, or `None` if no adapter was found.
pub fn find_adapter<'a>(&'a mut self) -> Result<Option<Adapter<'a>>, Error> {
for mut device in try!(self.context.devices()).iter() {
let desc = try!(device.device_descriptor());
if desc.vendor_id() == VENDOR_ID && desc.product_id() == PRODUCT_ID {
return Ok(Some(Adapter { device: device }));
}
}
Ok(None)
}
}
/// A wrapper around the unopened USB device.
pub struct Adapter<'a> {
device: Device<'a>,
}
impl<'a> Adapter<'a> {
/// Opens the USB device and initializes the hardware for reading controller data.
///
/// If the device is inaccessible or unrecognizable, an error is returned. For example, the
/// device will be inaccessible if a previous `Listener` for this adapter is still alive.
pub fn listen(&mut self) -> Result<Listener<'a>, Error> {
let mut handle = try!(self.device.open());
let config = try!(self.device.config_descriptor(0));
let mut interface_descriptor: Option<_> = None;
let mut endpoint_in = None;
let mut endpoint_out = None;
for interface in config.interfaces() {
interface_descriptor = None;
endpoint_in = None;
endpoint_out = None;
for desc in interface.descriptors() {
for endpoint in desc.endpoint_descriptors() {
match endpoint.direction() {
libusb::Direction::In => endpoint_in = Some(endpoint.address()),
libusb::Direction::Out => endpoint_out = Some(endpoint.address()),
}
}
interface_descriptor = Some(desc);
}
}
if interface_descriptor.is_none() || endpoint_in.is_none() || endpoint_out.is_none() {
return Err(Error::UnrecognizedProtocol);
}
let interface_descriptor = interface_descriptor.unwrap();
let interface_number = interface_descriptor.interface_number();
let has_kernel_driver = match handle.kernel_driver_active(interface_number) {
Ok(true) => {
try!(handle.detach_kernel_driver(interface_number));
true
},
_ => false,
};
try!(handle.set_active_configuration(config.number()));
try!(handle.claim_interface(interface_number));
let setting = interface_descriptor.setting_number();
try!(handle.set_alternate_setting(interface_number, setting));
// Tell the adapter to start sending packets.
let timeout = Duration::from_secs(1);
try!(handle.write_interrupt(endpoint_out.unwrap(), &[0x13], timeout));
Ok(Listener {
handle: handle,
buffer: [0; 37],
has_kernel_driver: has_kernel_driver,
interface: interface_number,
endpoint_in: endpoint_in.unwrap(),
})
}
}
/// An interface that reads packets of controller data on each iteration.
///
/// This interface owns an opened handle to the USB device that is closed once the `Listener`
/// instance is dropped.
pub struct Listener<'a> {
handle: DeviceHandle<'a>,
buffer: [u8; 37],
has_kernel_driver: bool,
interface: u8,
endpoint_in: u8,
}
impl<'a> Listener<'a> {
/// Reads a data packet and returns the states for each of the four possibly connected
/// controllers.
///
/// If reading a single packet takes over 1 second, a timeout error with occur. In testing,
/// these packets are available at over 100 times per second.
///
/// Reasons an error may occur include:
///
/// * The USB device becomes disconnected
/// * The USB driver throws an error, fatal or not
/// * A USB message was successfully read, but it was not the right size
///
/// It is wise to treat all errors returned as fatal, and to reestablish the adapter connection
/// through `Scanner::find_adapter`.
pub fn read(&mut self) -> Result<[Option<Controller>; 4], Error> {
let timeout = Duration::from_secs(1);
match self.handle.read_interrupt(self.endpoint_in, &mut self.buffer, timeout) {
Ok(read) if read == 37 => Ok(Controller::parse_packet(&self.buffer)),
Ok(_) => Err(Error::InvalidPacket),
Err(err) => Err(Error::Usb(err)),
}
}
}
impl<'a> Drop for Listener<'a> {
fn drop(&mut self) {
if self.has_kernel_driver {
let _ = self.handle.attach_kernel_driver(self.interface);
}
}
}
/// The state of a GameCube controller at a given moment in time.
///
/// Note that the hardware will likely never report either extreme of the spectrum for any of the
/// analog inputs. For example, all `u8` fields may report only within the range of `30` to `225`.
/// Also, the hardware will likely never report a perfect `127` for the resting position of any of
/// the joystick axes. Keep in my that this library does not do any analog dead zone correction.
#[derive(Clone, Copy, Debug)]
pub struct Controller {
/// The classification of this controller.
pub kind: ControllerKind,
/// "A" button status.
pub a: bool,
/// "B" button status.
pub b: bool,
/// "X" button status.
pub x: bool,
/// "Y" button status.
pub y: bool,
/// Directional pad up button status.
pub up: bool,
/// Directional pad down button status.
pub down: bool,
/// Directional pad left button status.
pub left: bool,
/// Directional pad right button status.
pub right: bool,
/// Digital "L" button (full depression) status.
pub l: bool,
/// Digital "R" button (full depression) status.
pub r: bool,
/// The level of depression of the analog "L" button, `0` being completely up, `255` being
/// completely pressed in.
pub l_analog: u8,
/// The level of depression of the analog "R" button, `0` being completely up, `255` being
/// completely pressed in.
pub r_analog: u8,
/// "Z" button status.
pub z: bool,
/// Start button status.
pub start: bool,
/// The x-axis position of the primary analog joystick, `0` being completely left, `255` being
/// completely right.
pub stick_x: u8,
/// The y-axis position of the primary analog joystick, `0` being completely down, `255` being
/// completely up.
pub stick_y: u8,
/// The x-axis position of the secondary ("C") analog joystick, `0` being completely left,
/// `255` being completely right.
pub c_stick_x: u8,
/// The y-axis position of the secondary ("C") analog joystick, `0` being completely down,
/// `255` being completely up.
pub c_stick_y: u8,
}
impl Controller {
// # Panics
//
// Panics if `data` is not at least 9 bytes.
fn parse(data: &[u8]) -> Option<Controller> {
let kind = match data[0] >> 4 {
0 => return None,
1 => ControllerKind::Wired,
2 => ControllerKind::Wireless,
_ => ControllerKind::Unknown,
};
Some(Controller {
kind: kind,
a: data[1] & (1 << 0) != 0,
b: data[1] & (1 << 1) != 0,
x: data[1] & (1 << 2) != 0,
y: data[1] & (1 << 3) != 0,
left: data[1] & (1 << 4) != 0,
right: data[1] & (1 << 5) != 0,
down: data[1] & (1 << 6) != 0,
up: data[1] & (1 << 7) != 0,
start: data[2] & (1 << 0) != 0,
z: data[2] & (1 << 1) != 0,
r: data[2] & (1 << 2) != 0,
l: data[2] & (1 << 3) != 0,
stick_x: data[3],
stick_y: data[4],
c_stick_x: data[5],
c_stick_y: data[6],
l_analog: data[7],
r_analog: data[8],
})
}
// # Panics
//
// Panics if `data` is not at least 37 bytes.
fn parse_packet(data: &[u8]) -> [Option<Controller>; 4] {
[
Controller::parse(&data[1..10]),
Controller::parse(&data[10..19]),
Controller::parse(&data[19..28]),
Controller::parse(&data[28..37])
]
}
}
/// The classification of a GameCube controller.
#[derive(Clone, Copy, Debug)]
pub enum ControllerKind {
/// The controller is wired and likely supports rumble.
Wired,
/// The controller is wireless and likely does not supports rumble.
Wireless,
/// The controller is of an unknown type.
Unknown,
}
/// An error that occurs during usage of this library.
#[derive(Debug)]
pub enum Error {
/// A USB driver error that can occur at any time while utilizing this library.
Usb(libusb::Error),
/// A seemingly valid adapter was found, but its communication protocol could not be resolved.
UnrecognizedProtocol,
/// An invalid message was read from the adapter, likely due to a device or driver failure.
InvalidPacket,
}
impl StdError for Error {
fn description(&self) -> &str {
match *self {
Error::Usb(ref err) => err.description(),
Error::UnrecognizedProtocol => "USB adapter protocol unrecognized",
Error::InvalidPacket => "Invalid data packet received",
}
}
fn cause(&self) -> Option<&StdError> {
match *self {
Error::Usb(ref err) => err.cause(),
_ => None,
}
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
match *self {
Error::Usb(ref err) => Display::fmt(err, f),
_ => self.description().fmt(f),
}
}
}
impl From<libusb::Error> for Error {
fn from(err: libusb::Error) -> Error {
Error::Usb(err)
}
}
| {
Ok(Scanner { context: try!(Context::new()) })
} | identifier_body |
extern-generic.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// We specify -Z incremental here because we want to test the partitioning for
// incremental compilation
// compile-flags:-Zprint-mono-items=eager -Zincremental=tmp/partitioning-tests/extern-generic -Zshare-generics=y
#![allow(dead_code)]
#![crate_type="lib"]
// aux-build:cgu_generic_function.rs
extern crate cgu_generic_function;
//~ MONO_ITEM fn extern_generic::user[0] @@ extern_generic[Internal]
fn user() |
mod mod1 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
mod mod1 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
}
}
mod mod2 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
}
mod mod3 {
//~ MONO_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[Internal]
fn non_user() {}
}
// Make sure the two generic functions from the extern crate get instantiated
// once for the current crate
//~ MONO_ITEM fn cgu_generic_function::foo[0]<&str> @@ cgu_generic_function-in-extern_generic.volatile[External]
//~ MONO_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function-in-extern_generic.volatile[External]
| {
let _ = cgu_generic_function::foo("abc");
} | identifier_body |
extern-generic.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// We specify -Z incremental here because we want to test the partitioning for
// incremental compilation
// compile-flags:-Zprint-mono-items=eager -Zincremental=tmp/partitioning-tests/extern-generic -Zshare-generics=y
#![allow(dead_code)]
#![crate_type="lib"]
// aux-build:cgu_generic_function.rs
extern crate cgu_generic_function;
//~ MONO_ITEM fn extern_generic::user[0] @@ extern_generic[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
mod mod1 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
} |
mod mod1 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
}
}
mod mod2 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
}
mod mod3 {
//~ MONO_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[Internal]
fn non_user() {}
}
// Make sure the two generic functions from the extern crate get instantiated
// once for the current crate
//~ MONO_ITEM fn cgu_generic_function::foo[0]<&str> @@ cgu_generic_function-in-extern_generic.volatile[External]
//~ MONO_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function-in-extern_generic.volatile[External] | random_line_split | |
extern-generic.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// We specify -Z incremental here because we want to test the partitioning for
// incremental compilation
// compile-flags:-Zprint-mono-items=eager -Zincremental=tmp/partitioning-tests/extern-generic -Zshare-generics=y
#![allow(dead_code)]
#![crate_type="lib"]
// aux-build:cgu_generic_function.rs
extern crate cgu_generic_function;
//~ MONO_ITEM fn extern_generic::user[0] @@ extern_generic[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
mod mod1 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
mod mod1 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[Internal]
fn user() {
let _ = cgu_generic_function::foo("abc");
}
}
}
mod mod2 {
use cgu_generic_function;
//~ MONO_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[Internal]
fn | () {
let _ = cgu_generic_function::foo("abc");
}
}
mod mod3 {
//~ MONO_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[Internal]
fn non_user() {}
}
// Make sure the two generic functions from the extern crate get instantiated
// once for the current crate
//~ MONO_ITEM fn cgu_generic_function::foo[0]<&str> @@ cgu_generic_function-in-extern_generic.volatile[External]
//~ MONO_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function-in-extern_generic.volatile[External]
| user | identifier_name |
functions_74.js | var searchData=
[ | ['teststruct',['testStruct',['../classurl_validator.html#a337a9edaa44e76bda5a7ed3a345b0b78',1,'urlValidator']]],
['testurls',['testUrls',['../classparseur_fic.html#ad2c99c1283f03ac105a2927aa9826021',1,'parseurFic']]],
['testvie',['testVie',['../classurl_validator.html#a9993e82ddcaf00c655e3ad9221a10232',1,'urlValidator']]]
]; | ['tester',['tester',['../classcontrol_vue.html#a92d898224293b741e5c6d3a3576a2193',1,'controlVue']]],
['testerappuyer',['testerAppuyer',['../class_vue.html#a7fb0d20950a6596a3eef78e244628682',1,'Vue']]],
['testerdossier',['testerDossier',['../classcontrol_vue.html#a630d60f73a0cdb77d2f7f92050983da7',1,'controlVue']]],
['testerfic',['testerFic',['../classcontrol_vue.html#a8139fd2a944a2fca901809edd2468a1e',1,'controlVue']]], | random_line_split |
htmlstyleelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::Parser as CssParser;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::HTMLStyleElementBinding;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::Element;
use dom::htmlelement::HTMLElement;
use dom::node::{ChildrenMutation, Node, document_from_node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use script_layout_interface::message::Msg;
use std::sync::Arc;
use string_cache::Atom;
use style::media_queries::parse_media_query_list;
use style::parser::ParserContextExtraData;
use style::stylesheets::{Stylesheet, Origin};
#[dom_struct]
pub struct HTMLStyleElement {
htmlelement: HTMLElement,
stylesheet: DOMRefCell<Option<Arc<Stylesheet>>>,
}
impl HTMLStyleElement {
fn new_inherited(local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLStyleElement {
HTMLStyleElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
stylesheet: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLStyleElement> |
pub fn parse_own_css(&self) {
let node = self.upcast::<Node>();
let element = self.upcast::<Element>();
assert!(node.is_in_doc());
let win = window_from_node(node);
let url = win.get_url();
let mq_attribute = element.get_attribute(&ns!(), &atom!("media"));
let mq_str = match mq_attribute {
Some(a) => String::from(&**a.value()),
None => String::new(),
};
let data = node.GetTextContent().expect("Element.textContent must be a string");
let mut sheet = Stylesheet::from_str(&data, url, Origin::Author, win.css_error_reporter(),
ParserContextExtraData::default());
let mut css_parser = CssParser::new(&mq_str);
let media = parse_media_query_list(&mut css_parser);
sheet.set_media(Some(media));
let sheet = Arc::new(sheet);
win.layout_chan().send(Msg::AddStylesheet(sheet.clone())).unwrap();
*self.stylesheet.borrow_mut() = Some(sheet);
let doc = document_from_node(self);
doc.r().invalidate_stylesheets();
}
pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> {
self.stylesheet.borrow().clone()
}
}
impl VirtualMethods for HTMLStyleElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if self.upcast::<Node>().is_in_doc() {
self.parse_own_css();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc {
self.parse_own_css();
}
}
}
| {
Node::reflect_node(box HTMLStyleElement::new_inherited(local_name, prefix, document),
document,
HTMLStyleElementBinding::Wrap)
} | identifier_body |
htmlstyleelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::Parser as CssParser;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::HTMLStyleElementBinding;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::Element;
use dom::htmlelement::HTMLElement;
use dom::node::{ChildrenMutation, Node, document_from_node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use script_layout_interface::message::Msg;
use std::sync::Arc;
use string_cache::Atom;
use style::media_queries::parse_media_query_list;
use style::parser::ParserContextExtraData;
use style::stylesheets::{Stylesheet, Origin};
#[dom_struct]
pub struct HTMLStyleElement {
htmlelement: HTMLElement,
stylesheet: DOMRefCell<Option<Arc<Stylesheet>>>,
}
impl HTMLStyleElement {
fn new_inherited(local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLStyleElement {
HTMLStyleElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
stylesheet: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLStyleElement> {
Node::reflect_node(box HTMLStyleElement::new_inherited(local_name, prefix, document),
document,
HTMLStyleElementBinding::Wrap)
}
pub fn parse_own_css(&self) {
let node = self.upcast::<Node>();
let element = self.upcast::<Element>();
assert!(node.is_in_doc());
let win = window_from_node(node);
let url = win.get_url();
let mq_attribute = element.get_attribute(&ns!(), &atom!("media"));
let mq_str = match mq_attribute {
Some(a) => String::from(&**a.value()),
None => String::new(),
};
let data = node.GetTextContent().expect("Element.textContent must be a string");
let mut sheet = Stylesheet::from_str(&data, url, Origin::Author, win.css_error_reporter(),
ParserContextExtraData::default());
let mut css_parser = CssParser::new(&mq_str);
let media = parse_media_query_list(&mut css_parser);
sheet.set_media(Some(media));
let sheet = Arc::new(sheet);
win.layout_chan().send(Msg::AddStylesheet(sheet.clone())).unwrap();
*self.stylesheet.borrow_mut() = Some(sheet);
let doc = document_from_node(self);
doc.r().invalidate_stylesheets();
}
pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> {
self.stylesheet.borrow().clone()
}
}
impl VirtualMethods for HTMLStyleElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if self.upcast::<Node>().is_in_doc() {
self.parse_own_css();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc |
}
}
| {
self.parse_own_css();
} | conditional_block |
htmlstyleelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::Parser as CssParser;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::HTMLStyleElementBinding;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::Element;
use dom::htmlelement::HTMLElement;
use dom::node::{ChildrenMutation, Node, document_from_node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use script_layout_interface::message::Msg;
use std::sync::Arc;
use string_cache::Atom;
use style::media_queries::parse_media_query_list;
use style::parser::ParserContextExtraData;
use style::stylesheets::{Stylesheet, Origin};
#[dom_struct]
pub struct HTMLStyleElement {
htmlelement: HTMLElement,
stylesheet: DOMRefCell<Option<Arc<Stylesheet>>>,
}
impl HTMLStyleElement {
fn new_inherited(local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLStyleElement {
HTMLStyleElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
stylesheet: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLStyleElement> {
Node::reflect_node(box HTMLStyleElement::new_inherited(local_name, prefix, document),
document,
HTMLStyleElementBinding::Wrap)
}
pub fn parse_own_css(&self) {
let node = self.upcast::<Node>();
let element = self.upcast::<Element>();
assert!(node.is_in_doc());
let win = window_from_node(node);
let url = win.get_url();
let mq_attribute = element.get_attribute(&ns!(), &atom!("media"));
let mq_str = match mq_attribute {
Some(a) => String::from(&**a.value()),
None => String::new(),
};
let data = node.GetTextContent().expect("Element.textContent must be a string");
let mut sheet = Stylesheet::from_str(&data, url, Origin::Author, win.css_error_reporter(),
ParserContextExtraData::default());
let mut css_parser = CssParser::new(&mq_str);
let media = parse_media_query_list(&mut css_parser);
sheet.set_media(Some(media));
let sheet = Arc::new(sheet);
win.layout_chan().send(Msg::AddStylesheet(sheet.clone())).unwrap();
*self.stylesheet.borrow_mut() = Some(sheet);
let doc = document_from_node(self);
doc.r().invalidate_stylesheets();
}
pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> {
self.stylesheet.borrow().clone()
}
}
impl VirtualMethods for HTMLStyleElement {
fn super_type(&self) -> Option<&VirtualMethods> { | if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if self.upcast::<Node>().is_in_doc() {
self.parse_own_css();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc {
self.parse_own_css();
}
}
} | Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn children_changed(&self, mutation: &ChildrenMutation) { | random_line_split |
htmlstyleelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::Parser as CssParser;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::HTMLStyleElementBinding;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::Element;
use dom::htmlelement::HTMLElement;
use dom::node::{ChildrenMutation, Node, document_from_node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use script_layout_interface::message::Msg;
use std::sync::Arc;
use string_cache::Atom;
use style::media_queries::parse_media_query_list;
use style::parser::ParserContextExtraData;
use style::stylesheets::{Stylesheet, Origin};
#[dom_struct]
pub struct HTMLStyleElement {
htmlelement: HTMLElement,
stylesheet: DOMRefCell<Option<Arc<Stylesheet>>>,
}
impl HTMLStyleElement {
fn | (local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLStyleElement {
HTMLStyleElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
stylesheet: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLStyleElement> {
Node::reflect_node(box HTMLStyleElement::new_inherited(local_name, prefix, document),
document,
HTMLStyleElementBinding::Wrap)
}
pub fn parse_own_css(&self) {
let node = self.upcast::<Node>();
let element = self.upcast::<Element>();
assert!(node.is_in_doc());
let win = window_from_node(node);
let url = win.get_url();
let mq_attribute = element.get_attribute(&ns!(), &atom!("media"));
let mq_str = match mq_attribute {
Some(a) => String::from(&**a.value()),
None => String::new(),
};
let data = node.GetTextContent().expect("Element.textContent must be a string");
let mut sheet = Stylesheet::from_str(&data, url, Origin::Author, win.css_error_reporter(),
ParserContextExtraData::default());
let mut css_parser = CssParser::new(&mq_str);
let media = parse_media_query_list(&mut css_parser);
sheet.set_media(Some(media));
let sheet = Arc::new(sheet);
win.layout_chan().send(Msg::AddStylesheet(sheet.clone())).unwrap();
*self.stylesheet.borrow_mut() = Some(sheet);
let doc = document_from_node(self);
doc.r().invalidate_stylesheets();
}
pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> {
self.stylesheet.borrow().clone()
}
}
impl VirtualMethods for HTMLStyleElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if self.upcast::<Node>().is_in_doc() {
self.parse_own_css();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc {
self.parse_own_css();
}
}
}
| new_inherited | identifier_name |
constref.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
const CONST_REF: &[u8; 3] = b"foo";
trait Foo {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"bar";
const CONST_REF: &'static [u8; 3];
}
impl Foo for i32 {
const CONST_REF: &'static [u8; 3] = b"jjj";
}
impl Foo for i64 {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"ggg";
const CONST_REF: &'static [u8; 3] = b"fff";
}
// Check that (associated and free) const references are not mistaken for a
// non-reference pattern (in which case they would be auto-dereferenced, making
// the types mismatched).
fn const_ref() -> bool {
let f = b"foo";
match f {
CONST_REF => true,
_ => false,
}
}
fn associated_const_ref() -> bool {
match (b"bar", b"jjj", b"ggg", b"fff") {
(i32::CONST_REF_DEFAULT, i32::CONST_REF, i64::CONST_REF_DEFAULT, i64::CONST_REF) => true,
_ => false,
}
}
pub fn main() | {
assert!(const_ref());
assert!(associated_const_ref());
} | identifier_body | |
constref.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
const CONST_REF: &[u8; 3] = b"foo";
trait Foo {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"bar";
const CONST_REF: &'static [u8; 3];
}
impl Foo for i32 {
const CONST_REF: &'static [u8; 3] = b"jjj";
}
impl Foo for i64 {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"ggg";
const CONST_REF: &'static [u8; 3] = b"fff";
}
// Check that (associated and free) const references are not mistaken for a
// non-reference pattern (in which case they would be auto-dereferenced, making
// the types mismatched).
fn const_ref() -> bool {
let f = b"foo";
match f {
CONST_REF => true,
_ => false,
}
}
fn associated_const_ref() -> bool {
match (b"bar", b"jjj", b"ggg", b"fff") {
(i32::CONST_REF_DEFAULT, i32::CONST_REF, i64::CONST_REF_DEFAULT, i64::CONST_REF) => true,
_ => false,
}
}
pub fn | () {
assert!(const_ref());
assert!(associated_const_ref());
}
| main | identifier_name |
constref.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
const CONST_REF: &[u8; 3] = b"foo";
trait Foo {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"bar";
const CONST_REF: &'static [u8; 3];
}
impl Foo for i32 {
const CONST_REF: &'static [u8; 3] = b"jjj";
}
impl Foo for i64 {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"ggg";
const CONST_REF: &'static [u8; 3] = b"fff";
}
// Check that (associated and free) const references are not mistaken for a
// non-reference pattern (in which case they would be auto-dereferenced, making
// the types mismatched).
fn const_ref() -> bool {
let f = b"foo";
match f {
CONST_REF => true,
_ => false,
}
}
fn associated_const_ref() -> bool {
match (b"bar", b"jjj", b"ggg", b"fff") {
(i32::CONST_REF_DEFAULT, i32::CONST_REF, i64::CONST_REF_DEFAULT, i64::CONST_REF) => true,
_ => false,
}
}
| pub fn main() {
assert!(const_ref());
assert!(associated_const_ref());
} | random_line_split | |
case_info_grammar.py | from parsimonious import Grammar
from parsimonious import NodeVisitor
from grammar_dev.grammars.CustomNodeVisitorFactory import CustomVisitorFactory
grammars = [
r"""
# Nonterminals
case_info = (new_line? assigned_filed_initiated the_rest) /
(new_line? line assigned_filed_initiated the_rest) /
(new_line? line line assigned_filed_initiated the_rest)
assigned_filed_initiated = ws* judge_assigned ws ws ws+ date_filed ws ws ws+ date_initiated ws* new_line
judge_assigned = judge_assigned_label ws judge_assigned_name?
judge_assigned_name = (single_content_char !(ws ws))+ single_content_char
date_filed = date_filed_label ws date_filed_date #"Date Filed: 01/03/2011"
date_filed_date = date &ws
date_initiated = date_initiated_label ws date_initiated_date #"Initiation Date: 01/03/2011"
date_initiated_date = date &new_line
the_rest = line*
# Silent helper nonterminals (don't include in list of terminals)
line = single_content_char* new_line?
date = number forward_slash number forward_slash number
# Silent Terminals (should be consumed and not returned. Don't include
# in list of terminals.)
judge_assigned_label = "Judge Assigned:"
date_filed_label = "Date Filed:"
date_initiated_label = "Initiation Date:"
# Loud Terminals (include in list of terminals)
number = ~"[0-9]"+
forward_slash = "/"
single_content_char = ~"[a-z0-9`\ \"=_\.,\-\(\)\'\$\?\*%;:#&\[\]/@§]"i
new_line = "\n"
ws = " "
""",
r"""
# Nonterminals
case_info = new_line? line* new_line*
line = single_content_char* new_line
# Terminals
single_content_char = ~"[a-z0-9`\ \"=_\.,\-\(\)\'\$\?\*%;:#&\[\]/@§]"i
new_line = "\n"
"""
]
nonterminals = ["case_info", "judge_assigned",
"date_filed", "date_initiated", "the_rest"]
terminals = ["single_content_char", "new_line", "judge_assigned_name",
"number", "forward_slash"]
def pa | ection_text):
grammar = Grammar(grammars[0])
custom_visitor = CustomVisitorFactory(terminals, nonterminals, dict()).create_instance()
root = grammar.parse(section_text)
# print("Parse tree:")
# print(root.prettily())
xml = custom_visitor.visit(root)
# print(xml)
return xml
| rse(s | identifier_name |
case_info_grammar.py | from parsimonious import Grammar
from parsimonious import NodeVisitor
from grammar_dev.grammars.CustomNodeVisitorFactory import CustomVisitorFactory
grammars = [
r"""
# Nonterminals
case_info = (new_line? assigned_filed_initiated the_rest) /
(new_line? line assigned_filed_initiated the_rest) /
(new_line? line line assigned_filed_initiated the_rest)
assigned_filed_initiated = ws* judge_assigned ws ws ws+ date_filed ws ws ws+ date_initiated ws* new_line
judge_assigned = judge_assigned_label ws judge_assigned_name?
judge_assigned_name = (single_content_char !(ws ws))+ single_content_char
date_filed = date_filed_label ws date_filed_date #"Date Filed: 01/03/2011"
date_filed_date = date &ws
date_initiated = date_initiated_label ws date_initiated_date #"Initiation Date: 01/03/2011"
date_initiated_date = date &new_line
the_rest = line*
# Silent helper nonterminals (don't include in list of terminals)
line = single_content_char* new_line?
date = number forward_slash number forward_slash number
# Silent Terminals (should be consumed and not returned. Don't include
# in list of terminals.)
judge_assigned_label = "Judge Assigned:"
date_filed_label = "Date Filed:"
date_initiated_label = "Initiation Date:"
# Loud Terminals (include in list of terminals)
number = ~"[0-9]"+
forward_slash = "/"
single_content_char = ~"[a-z0-9`\ \"=_\.,\-\(\)\'\$\?\*%;:#&\[\]/@§]"i
new_line = "\n"
ws = " "
""",
r"""
# Nonterminals
case_info = new_line? line* new_line*
line = single_content_char* new_line
# Terminals
single_content_char = ~"[a-z0-9`\ \"=_\.,\-\(\)\'\$\?\*%;:#&\[\]/@§]"i
new_line = "\n"
"""
]
nonterminals = ["case_info", "judge_assigned",
"date_filed", "date_initiated", "the_rest"]
terminals = ["single_content_char", "new_line", "judge_assigned_name",
"number", "forward_slash"]
def parse(section_text):
gr | ammar = Grammar(grammars[0])
custom_visitor = CustomVisitorFactory(terminals, nonterminals, dict()).create_instance()
root = grammar.parse(section_text)
# print("Parse tree:")
# print(root.prettily())
xml = custom_visitor.visit(root)
# print(xml)
return xml
| identifier_body | |
case_info_grammar.py | from parsimonious import Grammar
from parsimonious import NodeVisitor
from grammar_dev.grammars.CustomNodeVisitorFactory import CustomVisitorFactory
grammars = [
r"""
# Nonterminals
case_info = (new_line? assigned_filed_initiated the_rest) /
(new_line? line assigned_filed_initiated the_rest) /
(new_line? line line assigned_filed_initiated the_rest)
assigned_filed_initiated = ws* judge_assigned ws ws ws+ date_filed ws ws ws+ date_initiated ws* new_line
|
date_initiated = date_initiated_label ws date_initiated_date #"Initiation Date: 01/03/2011"
date_initiated_date = date &new_line
the_rest = line*
# Silent helper nonterminals (don't include in list of terminals)
line = single_content_char* new_line?
date = number forward_slash number forward_slash number
# Silent Terminals (should be consumed and not returned. Don't include
# in list of terminals.)
judge_assigned_label = "Judge Assigned:"
date_filed_label = "Date Filed:"
date_initiated_label = "Initiation Date:"
# Loud Terminals (include in list of terminals)
number = ~"[0-9]"+
forward_slash = "/"
single_content_char = ~"[a-z0-9`\ \"=_\.,\-\(\)\'\$\?\*%;:#&\[\]/@§]"i
new_line = "\n"
ws = " "
""",
r"""
# Nonterminals
case_info = new_line? line* new_line*
line = single_content_char* new_line
# Terminals
single_content_char = ~"[a-z0-9`\ \"=_\.,\-\(\)\'\$\?\*%;:#&\[\]/@§]"i
new_line = "\n"
"""
]
nonterminals = ["case_info", "judge_assigned",
"date_filed", "date_initiated", "the_rest"]
terminals = ["single_content_char", "new_line", "judge_assigned_name",
"number", "forward_slash"]
def parse(section_text):
grammar = Grammar(grammars[0])
custom_visitor = CustomVisitorFactory(terminals, nonterminals, dict()).create_instance()
root = grammar.parse(section_text)
# print("Parse tree:")
# print(root.prettily())
xml = custom_visitor.visit(root)
# print(xml)
return xml | judge_assigned = judge_assigned_label ws judge_assigned_name?
judge_assigned_name = (single_content_char !(ws ws))+ single_content_char
date_filed = date_filed_label ws date_filed_date #"Date Filed: 01/03/2011"
date_filed_date = date &ws | random_line_split |
imgt2fasta.py | #! /usr/bin/env python
# Copyright 2014 Uri Laserson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import optparse
import vdj
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
if len(args) == 2:
inhandle = open(args[0],'r')
outhandle = open(args[1],'w')
elif len(args) == 1:
inhandle = open(args[0],'r')
outhandle = sys.stdout
elif len(args) == 0:
inhandle = sys.stdin
outhandle = sys.stdout
else:
raise Exception, "Wrong number of arguments."
for chain in vdj.parse_imgt(inhandle):
# print >>outhandle, chain.format('fasta') # causes chain.description output instead of chain.id | print >>outhandle, ">%s\n%s" % (chain.id,chain.seq) | random_line_split | |
imgt2fasta.py | #! /usr/bin/env python
# Copyright 2014 Uri Laserson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import optparse
import vdj
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
if len(args) == 2:
inhandle = open(args[0],'r')
outhandle = open(args[1],'w')
elif len(args) == 1:
inhandle = open(args[0],'r')
outhandle = sys.stdout
elif len(args) == 0:
|
else:
raise Exception, "Wrong number of arguments."
for chain in vdj.parse_imgt(inhandle):
# print >>outhandle, chain.format('fasta') # causes chain.description output instead of chain.id
print >>outhandle, ">%s\n%s" % (chain.id,chain.seq)
| inhandle = sys.stdin
outhandle = sys.stdout | conditional_block |
query16.rs | use timely::dataflow::*;
// use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
// use differential_dataflow::AsCollection;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
use ::Collections;
// -- $ID$
// -- TPC-H/TPC-R Parts/Supplier Relationship Query (Q16)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// p_brand,
// p_type,
// p_size,
// count(distinct ps_suppkey) as supplier_cnt
// from
// partsupp,
// part
// where
// p_partkey = ps_partkey
// and p_brand <> ':1'
// and p_type not like ':2%'
// and p_size in (:3, :4, :5, :6, :7, :8, :9, :10)
// and ps_suppkey not in (
// select
// s_suppkey
// from
// supplier
// where
// s_comment like '%Customer%Complaints%'
// )
// group by
// p_brand,
// p_type,
// p_size
// order by
// supplier_cnt desc,
// p_brand,
// p_type,
// p_size;
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
}
fn substring2(source: &[u8], query1: &[u8], query2: &[u8]) -> bool {
if let Some(pos) = (0 .. (source.len() - query1.len())).position(|i| &source[i..][..query1.len()] == query1) {
(pos .. query2.len()).any(|i| &source[i..][..query2.len()] == query2)
}
else { false }
}
pub fn query<G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord {
println!("TODO: query 16 could use a count_u if it joins after to re-collect its attributes");
let suppliers =
collections
.suppliers()
.flat_map(|s|
if substring2(s.comment.as_bytes(), b"Customer", b"Complaints") {
Some((s.supp_key))
}
else { None }
);
let parts = collections
.partsupps()
.map(|ps| (ps.supp_key, ps.part_key))
.antijoin_u(&suppliers)
.map(|(_supp_key, part_key)| part_key);
collections
.parts()
.flat_map(|p|
if !starts_with(&p.brand, b"Brand45") && !starts_with(&p.typ.as_bytes(), b"MEDIUM POLISHED") && [49, 14, 23, 45, 19, 3, 36, 9].contains(&p.size) {
Some((p.part_key, (p.brand, p.typ.to_string(), p.size)))
}
else |
)
.semijoin_u(&parts)
.count()
.probe()
} | { None } | conditional_block |
query16.rs | use timely::dataflow::*;
// use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
// use differential_dataflow::AsCollection;
use differential_dataflow::operators::*; | use differential_dataflow::lattice::Lattice;
use ::Collections;
// -- $ID$
// -- TPC-H/TPC-R Parts/Supplier Relationship Query (Q16)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// p_brand,
// p_type,
// p_size,
// count(distinct ps_suppkey) as supplier_cnt
// from
// partsupp,
// part
// where
// p_partkey = ps_partkey
// and p_brand <> ':1'
// and p_type not like ':2%'
// and p_size in (:3, :4, :5, :6, :7, :8, :9, :10)
// and ps_suppkey not in (
// select
// s_suppkey
// from
// supplier
// where
// s_comment like '%Customer%Complaints%'
// )
// group by
// p_brand,
// p_type,
// p_size
// order by
// supplier_cnt desc,
// p_brand,
// p_type,
// p_size;
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
}
fn substring2(source: &[u8], query1: &[u8], query2: &[u8]) -> bool {
if let Some(pos) = (0 .. (source.len() - query1.len())).position(|i| &source[i..][..query1.len()] == query1) {
(pos .. query2.len()).any(|i| &source[i..][..query2.len()] == query2)
}
else { false }
}
pub fn query<G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord {
println!("TODO: query 16 could use a count_u if it joins after to re-collect its attributes");
let suppliers =
collections
.suppliers()
.flat_map(|s|
if substring2(s.comment.as_bytes(), b"Customer", b"Complaints") {
Some((s.supp_key))
}
else { None }
);
let parts = collections
.partsupps()
.map(|ps| (ps.supp_key, ps.part_key))
.antijoin_u(&suppliers)
.map(|(_supp_key, part_key)| part_key);
collections
.parts()
.flat_map(|p|
if !starts_with(&p.brand, b"Brand45") && !starts_with(&p.typ.as_bytes(), b"MEDIUM POLISHED") && [49, 14, 23, 45, 19, 3, 36, 9].contains(&p.size) {
Some((p.part_key, (p.brand, p.typ.to_string(), p.size)))
}
else { None }
)
.semijoin_u(&parts)
.count()
.probe()
} | random_line_split | |
query16.rs | use timely::dataflow::*;
// use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
// use differential_dataflow::AsCollection;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
use ::Collections;
// -- $ID$
// -- TPC-H/TPC-R Parts/Supplier Relationship Query (Q16)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// p_brand,
// p_type,
// p_size,
// count(distinct ps_suppkey) as supplier_cnt
// from
// partsupp,
// part
// where
// p_partkey = ps_partkey
// and p_brand <> ':1'
// and p_type not like ':2%'
// and p_size in (:3, :4, :5, :6, :7, :8, :9, :10)
// and ps_suppkey not in (
// select
// s_suppkey
// from
// supplier
// where
// s_comment like '%Customer%Complaints%'
// )
// group by
// p_brand,
// p_type,
// p_size
// order by
// supplier_cnt desc,
// p_brand,
// p_type,
// p_size;
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
}
fn substring2(source: &[u8], query1: &[u8], query2: &[u8]) -> bool {
if let Some(pos) = (0 .. (source.len() - query1.len())).position(|i| &source[i..][..query1.len()] == query1) {
(pos .. query2.len()).any(|i| &source[i..][..query2.len()] == query2)
}
else { false }
}
pub fn query<G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord | {
println!("TODO: query 16 could use a count_u if it joins after to re-collect its attributes");
let suppliers =
collections
.suppliers()
.flat_map(|s|
if substring2(s.comment.as_bytes(), b"Customer", b"Complaints") {
Some((s.supp_key))
}
else { None }
);
let parts = collections
.partsupps()
.map(|ps| (ps.supp_key, ps.part_key))
.antijoin_u(&suppliers)
.map(|(_supp_key, part_key)| part_key);
collections
.parts()
.flat_map(|p|
if !starts_with(&p.brand, b"Brand45") && !starts_with(&p.typ.as_bytes(), b"MEDIUM POLISHED") && [49, 14, 23, 45, 19, 3, 36, 9].contains(&p.size) {
Some((p.part_key, (p.brand, p.typ.to_string(), p.size)))
}
else { None }
)
.semijoin_u(&parts)
.count()
.probe()
} | identifier_body | |
query16.rs | use timely::dataflow::*;
// use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
// use differential_dataflow::AsCollection;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
use ::Collections;
// -- $ID$
// -- TPC-H/TPC-R Parts/Supplier Relationship Query (Q16)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// p_brand,
// p_type,
// p_size,
// count(distinct ps_suppkey) as supplier_cnt
// from
// partsupp,
// part
// where
// p_partkey = ps_partkey
// and p_brand <> ':1'
// and p_type not like ':2%'
// and p_size in (:3, :4, :5, :6, :7, :8, :9, :10)
// and ps_suppkey not in (
// select
// s_suppkey
// from
// supplier
// where
// s_comment like '%Customer%Complaints%'
// )
// group by
// p_brand,
// p_type,
// p_size
// order by
// supplier_cnt desc,
// p_brand,
// p_type,
// p_size;
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
}
fn substring2(source: &[u8], query1: &[u8], query2: &[u8]) -> bool {
if let Some(pos) = (0 .. (source.len() - query1.len())).position(|i| &source[i..][..query1.len()] == query1) {
(pos .. query2.len()).any(|i| &source[i..][..query2.len()] == query2)
}
else { false }
}
pub fn | <G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord {
println!("TODO: query 16 could use a count_u if it joins after to re-collect its attributes");
let suppliers =
collections
.suppliers()
.flat_map(|s|
if substring2(s.comment.as_bytes(), b"Customer", b"Complaints") {
Some((s.supp_key))
}
else { None }
);
let parts = collections
.partsupps()
.map(|ps| (ps.supp_key, ps.part_key))
.antijoin_u(&suppliers)
.map(|(_supp_key, part_key)| part_key);
collections
.parts()
.flat_map(|p|
if !starts_with(&p.brand, b"Brand45") && !starts_with(&p.typ.as_bytes(), b"MEDIUM POLISHED") && [49, 14, 23, 45, 19, 3, 36, 9].contains(&p.size) {
Some((p.part_key, (p.brand, p.typ.to_string(), p.size)))
}
else { None }
)
.semijoin_u(&parts)
.count()
.probe()
} | query | identifier_name |
capstone.rs | use alloc::string::{String, ToString};
use core::convert::From;
use core::marker::PhantomData;
use libc::{c_int, c_uint, c_void};
use capstone_sys::cs_opt_value::*;
use capstone_sys::*;
use crate::arch::CapstoneBuilder;
use crate::constants::{Arch, Endian, ExtraMode, Mode, OptValue, Syntax};
use crate::error::*;
use crate::ffi::str_from_cstr_ptr;
use crate::instruction::{Insn, InsnDetail, InsnGroupId, InsnId, Instructions, RegId};
/// An instance of the capstone disassembler
///
/// Create with an instance with [`.new()`](Self::new) and disassemble bytes with [`.disasm_all()`](Self::disasm_all).
#[derive(Debug)]
pub struct Capstone {
/// Opaque handle to cs_engine
/// Stored as a pointer to ensure `Capstone` is `!Send`/`!Sync`
csh: *mut c_void,
/// Internal mode bitfield
mode: cs_mode,
/// Internal endian bitfield
endian: cs_mode,
/// Syntax
syntax: cs_opt_value::Type,
/// Internal extra mode bitfield
extra_mode: cs_mode,
/// Whether to get extra details when disassembling
detail_enabled: bool,
/// Whether to skipdata when disassembling
skipdata_enabled: bool,
/// We *must* set `mode`, `extra_mode`, and `endian` at once because `capstone`
/// handles them inside the arch-specific handler. We store the bitwise OR of these flags that
/// can be passed directly to `cs_option()`.
raw_mode: cs_mode,
/// Architecture
arch: Arch,
}
/// Defines a setter on `Capstone` that speculatively changes the arch-specific mode (which
/// includes `mode`, `endian`, and `extra_mode`). The setter takes a `capstone-rs` type and changes
/// the internal `capstone-sys` type.
macro_rules! define_set_mode {
(
$( #[$func_attr:meta] )*
=> $($visibility:ident)*, $fn_name:ident,
$opt_type:ident, $param_name:ident : $param_type:ident ;
$cs_base_type:ident
) => {
$( #[$func_attr] )*
$($visibility)* fn $fn_name(&mut self, $param_name: $param_type) -> CsResult<()> {
let old_val = self.$param_name;
self.$param_name = $cs_base_type::from($param_name);
let old_raw_mode = self.raw_mode;
let new_raw_mode = self.update_raw_mode();
let result = self._set_cs_option(
cs_opt_type::$opt_type,
new_raw_mode.0 as usize,
);
if result.is_err() {
// On error, restore old values
self.raw_mode = old_raw_mode;
self.$param_name = old_val;
}
result
}
}
}
/// Represents that no extra modes are enabled. Can be passed to `Capstone::new_raw()` as the
/// `extra_mode` argument.
pub static NO_EXTRA_MODE: EmptyExtraModeIter = EmptyExtraModeIter(PhantomData);
/// Represents an empty set of `ExtraMode`.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
pub struct EmptyExtraModeIter(PhantomData<()>);
impl Iterator for EmptyExtraModeIter {
type Item = ExtraMode;
fn next(&mut self) -> Option<Self::Item> {
None
}
}
impl Capstone {
/// Create a new instance of the decompiler using the builder pattern interface.
/// This is the recommended interface to `Capstone`.
///
/// ```
/// use capstone::prelude::*;
/// let cs = Capstone::new().x86().mode(arch::x86::ArchMode::Mode32).build();
/// ```
#[allow(clippy::new_ret_no_self)]
pub fn new() -> CapstoneBuilder {
CapstoneBuilder::new()
}
/// Create a new instance of the decompiler using the "raw" interface.
/// The user must ensure that only sensible `Arch`/`Mode` combinations are used.
///
/// ```
/// use capstone::{Arch, Capstone, NO_EXTRA_MODE, Mode};
/// let cs = Capstone::new_raw(Arch::X86, Mode::Mode64, NO_EXTRA_MODE, None);
/// assert!(cs.is_ok());
/// ```
pub fn new_raw<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
) -> CsResult<Capstone> {
let mut handle: csh = 0;
let csarch: cs_arch = arch.into();
let csmode: cs_mode = mode.into();
// todo(tmfink): test valid modes at run time (or modify upstream capstone)
let endian = match endian {
Some(endian) => cs_mode::from(endian),
None => cs_mode(0),
};
let extra_mode = Self::extra_mode_value(extra_mode);
let combined_mode = csmode | endian | extra_mode;
let err = unsafe { cs_open(csarch, combined_mode, &mut handle) };
if cs_err::CS_ERR_OK == err {
let syntax = CS_OPT_SYNTAX_DEFAULT;
let raw_mode = cs_mode(0);
let detail_enabled = false;
let skipdata_enabled = detail_enabled;
let mut cs = Capstone {
csh: handle as *mut c_void,
syntax,
endian,
mode: csmode,
extra_mode,
detail_enabled,
skipdata_enabled,
raw_mode,
arch,
};
cs.update_raw_mode();
Ok(cs)
} else {
Err(err.into())
}
}
/// Disassemble all instructions in buffer
///
/// ```
/// # use capstone::prelude::*;
/// # let cs = Capstone::new().x86().mode(arch::x86::ArchMode::Mode32).build().unwrap();
/// cs.disasm_all(b"\x90", 0x1000).unwrap();
/// ```
pub fn disasm_all<'a>(&'a self, code: &[u8], addr: u64) -> CsResult<Instructions<'a>> {
self.disasm(code, addr, 0)
}
/// Disassemble `count` instructions in `code`
pub fn disasm_count<'a>(
&'a self,
code: &[u8],
addr: u64,
count: usize,
) -> CsResult<Instructions<'a>> {
if count == 0 {
return Err(Error::CustomError("Invalid dissasemble count; must be > 0"));
}
self.disasm(code, addr, count)
}
/// Disassembles a `&[u8]` full of instructions.
///
/// Pass `count = 0` to disassemble all instructions in the buffer.
fn disasm<'a>(&'a self, code: &[u8], addr: u64, count: usize) -> CsResult<Instructions<'a>> {
// SAFETY NOTE: `cs_disasm()` will write the error state into the
// `struct cs_struct` (true form of the `self.csh`) `errnum` field.
// CLAIM: since:
// - `Capstone` is not `Send`/`Sync`
// - The mutation is done through a `*mut c_void` (not through a const reference)
// it *should* be safe to accept `&self` (instead of `&mut self`) in this method.
let mut ptr: *mut cs_insn = core::ptr::null_mut();
let insn_count = unsafe {
cs_disasm(
self.csh(),
code.as_ptr(),
code.len() as usize,
addr,
count as usize,
&mut ptr,
)
};
if insn_count == 0 {
match self.error_result() {
Ok(_) => Ok(Instructions::new_empty()),
Err(err) => Err(err),
}
} else {
Ok(unsafe { Instructions::from_raw_parts(ptr, insn_count) })
}
}
/// Returns csh handle
#[inline]
fn csh(&self) -> csh {
self.csh as csh
}
/// Returns the raw mode value, which is useful for debugging
#[allow(dead_code)]
pub(crate) fn raw_mode(&self) -> cs_mode {
self.raw_mode
}
/// Update `raw_mode` with the bitwise OR of `mode`, `extra_mode`, and `endian`.
///
/// Returns the new `raw_mode`.
fn update_raw_mode(&mut self) -> cs_mode {
self.raw_mode = self.mode | self.extra_mode | self.endian;
self.raw_mode
}
/// Return the integer value used by capstone to represent the set of extra modes
fn extra_mode_value<T: Iterator<Item = ExtraMode>>(extra_mode: T) -> cs_mode {
// Bitwise OR extra modes
extra_mode.fold(cs_mode(0), |acc, x| acc | cs_mode::from(x))
}
/// Set extra modes in addition to normal `mode`
pub fn | <T: Iterator<Item = ExtraMode>>(&mut self, extra_mode: T) -> CsResult<()> {
let old_val = self.extra_mode;
self.extra_mode = Self::extra_mode_value(extra_mode);
let old_mode = self.raw_mode;
let new_mode = self.update_raw_mode();
let result = self._set_cs_option(cs_opt_type::CS_OPT_MODE, new_mode.0 as usize);
if result.is_err() {
// On error, restore old values
self.raw_mode = old_mode;
self.extra_mode = old_val;
}
result
}
/// Set the assembly syntax (has no effect on some platforms)
pub fn set_syntax(&mut self, syntax: Syntax) -> CsResult<()> {
// Todo(tmfink) check for valid syntax
let syntax_int = cs_opt_value::Type::from(syntax);
let result = self._set_cs_option(cs_opt_type::CS_OPT_SYNTAX, syntax_int as usize);
if result.is_ok() {
self.syntax = syntax_int;
}
result
}
define_set_mode!(
/// Set the endianness (has no effect on some platforms).
=> pub, set_endian, CS_OPT_MODE, endian : Endian; cs_mode);
define_set_mode!(
/// Sets the engine's disassembly mode.
/// Be careful, various combinations of modes aren't supported
/// See the capstone-sys documentation for more information.
=> pub, set_mode, CS_OPT_MODE, mode : Mode; cs_mode);
/// Returns a `CsResult` based on current `errno`.
/// If the `errno` is `CS_ERR_OK`, then `Ok(())` is returned. Otherwise, the error is returned.
fn error_result(&self) -> CsResult<()> {
let errno = unsafe { cs_errno(self.csh()) };
if errno == cs_err::CS_ERR_OK {
Ok(())
} else {
Err(errno.into())
}
}
/// Sets disassembling options at runtime.
///
/// Acts as a safe wrapper around capstone's `cs_option`.
fn _set_cs_option(&mut self, option_type: cs_opt_type, option_value: usize) -> CsResult<()> {
let err = unsafe { cs_option(self.csh(), option_type, option_value) };
if cs_err::CS_ERR_OK == err {
Ok(())
} else {
Err(err.into())
}
}
/// Controls whether to capstone will generate extra details about disassembled instructions.
///
/// Pass `true` to enable detail or `false` to disable detail.
pub fn set_detail(&mut self, enable_detail: bool) -> CsResult<()> {
let option_value: usize = OptValue::from(enable_detail).0 as usize;
let result = self._set_cs_option(cs_opt_type::CS_OPT_DETAIL, option_value);
// Only update internal state on success
if result.is_ok() {
self.detail_enabled = enable_detail;
}
result
}
/// Controls whether capstone will skip over invalid or data instructions.
///
/// Pass `true` to enable skipdata or `false` to disable skipdata.
pub fn set_skipdata(&mut self, enable_skipdata: bool) -> CsResult<()> {
let option_value: usize = OptValue::from(enable_skipdata).0 as usize;
let result = self._set_cs_option(cs_opt_type::CS_OPT_SKIPDATA, option_value);
// Only update internal state on success
if result.is_ok() {
self.skipdata_enabled = enable_skipdata;
}
result
}
/// Converts a register id `reg_id` to a `String` containing the register name.
pub fn reg_name(&self, reg_id: RegId) -> Option<String> {
let reg_name = unsafe {
let _reg_name = cs_reg_name(self.csh(), c_uint::from(reg_id.0));
str_from_cstr_ptr(_reg_name)?.to_string()
};
Some(reg_name)
}
/// Converts an instruction id `insn_id` to a `String` containing the instruction name.
///
/// Note: This function ignores the current syntax and uses the default syntax.
pub fn insn_name(&self, insn_id: InsnId) -> Option<String> {
let insn_name = unsafe {
let _insn_name = cs_insn_name(self.csh(), insn_id.0 as c_uint);
str_from_cstr_ptr(_insn_name)?.to_string()
};
Some(insn_name)
}
/// Converts a group id `group_id` to a `String` containing the group name.
pub fn group_name(&self, group_id: InsnGroupId) -> Option<String> {
let group_name = unsafe {
let _group_name = cs_group_name(self.csh(), c_uint::from(group_id.0));
str_from_cstr_ptr(_group_name)?.to_string()
};
Some(group_name)
}
/// Returns `Detail` structure for a given instruction
///
/// Requires:
///
/// 1. Instruction was created with detail enabled
/// 2. Skipdata is disabled
/// 3. Capstone was not compiled in diet mode
pub fn insn_detail<'s, 'i: 's>(&'s self, insn: &'i Insn) -> CsResult<InsnDetail<'i>> {
if !self.detail_enabled {
Err(Error::DetailOff)
} else if insn.id().0 == 0 {
Err(Error::IrrelevantDataInSkipData)
} else if Self::is_diet() {
Err(Error::IrrelevantDataInDiet)
} else {
Ok(unsafe { insn.detail(self.arch) })
}
}
/// Returns a tuple (major, minor) indicating the version of the capstone C library.
pub fn lib_version() -> (u32, u32) {
let mut major: c_int = 0;
let mut minor: c_int = 0;
let major_ptr: *mut c_int = &mut major;
let minor_ptr: *mut c_int = &mut minor;
// We can ignore the "hexical" version returned by capstone because we already have the
// major and minor versions
let _ = unsafe { cs_version(major_ptr, minor_ptr) };
(major as u32, minor as u32)
}
/// Returns whether the capstone library supports a given architecture.
pub fn supports_arch(arch: Arch) -> bool {
unsafe { cs_support(arch as c_int) }
}
/// Returns whether the capstone library was compiled in diet mode.
pub fn is_diet() -> bool {
unsafe { cs_support(CS_SUPPORT_DIET as c_int) }
}
}
impl Drop for Capstone {
fn drop(&mut self) {
unsafe { cs_close(&mut self.csh()) };
}
}
| set_extra_mode | identifier_name |
capstone.rs | use alloc::string::{String, ToString};
use core::convert::From;
use core::marker::PhantomData;
use libc::{c_int, c_uint, c_void};
use capstone_sys::cs_opt_value::*;
use capstone_sys::*;
use crate::arch::CapstoneBuilder;
use crate::constants::{Arch, Endian, ExtraMode, Mode, OptValue, Syntax};
use crate::error::*;
use crate::ffi::str_from_cstr_ptr;
use crate::instruction::{Insn, InsnDetail, InsnGroupId, InsnId, Instructions, RegId};
/// An instance of the capstone disassembler
///
/// Create with an instance with [`.new()`](Self::new) and disassemble bytes with [`.disasm_all()`](Self::disasm_all).
#[derive(Debug)]
pub struct Capstone {
/// Opaque handle to cs_engine
/// Stored as a pointer to ensure `Capstone` is `!Send`/`!Sync`
csh: *mut c_void,
/// Internal mode bitfield
mode: cs_mode,
/// Internal endian bitfield
endian: cs_mode,
/// Syntax
syntax: cs_opt_value::Type,
/// Internal extra mode bitfield
extra_mode: cs_mode,
/// Whether to get extra details when disassembling
detail_enabled: bool,
/// Whether to skipdata when disassembling
skipdata_enabled: bool,
/// We *must* set `mode`, `extra_mode`, and `endian` at once because `capstone`
/// handles them inside the arch-specific handler. We store the bitwise OR of these flags that
/// can be passed directly to `cs_option()`.
raw_mode: cs_mode,
/// Architecture
arch: Arch,
}
/// Defines a setter on `Capstone` that speculatively changes the arch-specific mode (which
/// includes `mode`, `endian`, and `extra_mode`). The setter takes a `capstone-rs` type and changes
/// the internal `capstone-sys` type.
macro_rules! define_set_mode {
(
$( #[$func_attr:meta] )*
=> $($visibility:ident)*, $fn_name:ident,
$opt_type:ident, $param_name:ident : $param_type:ident ;
$cs_base_type:ident
) => {
$( #[$func_attr] )*
$($visibility)* fn $fn_name(&mut self, $param_name: $param_type) -> CsResult<()> {
let old_val = self.$param_name;
self.$param_name = $cs_base_type::from($param_name);
let old_raw_mode = self.raw_mode;
let new_raw_mode = self.update_raw_mode();
let result = self._set_cs_option(
cs_opt_type::$opt_type,
new_raw_mode.0 as usize,
);
if result.is_err() {
// On error, restore old values
self.raw_mode = old_raw_mode;
self.$param_name = old_val;
}
result
}
}
}
/// Represents that no extra modes are enabled. Can be passed to `Capstone::new_raw()` as the
/// `extra_mode` argument.
pub static NO_EXTRA_MODE: EmptyExtraModeIter = EmptyExtraModeIter(PhantomData);
/// Represents an empty set of `ExtraMode`.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
pub struct EmptyExtraModeIter(PhantomData<()>);
impl Iterator for EmptyExtraModeIter {
type Item = ExtraMode;
fn next(&mut self) -> Option<Self::Item> {
None
}
}
impl Capstone {
/// Create a new instance of the decompiler using the builder pattern interface.
/// This is the recommended interface to `Capstone`.
///
/// ```
/// use capstone::prelude::*;
/// let cs = Capstone::new().x86().mode(arch::x86::ArchMode::Mode32).build();
/// ```
#[allow(clippy::new_ret_no_self)]
pub fn new() -> CapstoneBuilder {
CapstoneBuilder::new()
}
/// Create a new instance of the decompiler using the "raw" interface.
/// The user must ensure that only sensible `Arch`/`Mode` combinations are used.
///
/// ```
/// use capstone::{Arch, Capstone, NO_EXTRA_MODE, Mode};
/// let cs = Capstone::new_raw(Arch::X86, Mode::Mode64, NO_EXTRA_MODE, None);
/// assert!(cs.is_ok());
/// ```
pub fn new_raw<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
) -> CsResult<Capstone> {
let mut handle: csh = 0;
let csarch: cs_arch = arch.into();
let csmode: cs_mode = mode.into();
// todo(tmfink): test valid modes at run time (or modify upstream capstone)
let endian = match endian {
Some(endian) => cs_mode::from(endian),
None => cs_mode(0),
};
let extra_mode = Self::extra_mode_value(extra_mode);
let combined_mode = csmode | endian | extra_mode;
let err = unsafe { cs_open(csarch, combined_mode, &mut handle) };
if cs_err::CS_ERR_OK == err {
let syntax = CS_OPT_SYNTAX_DEFAULT;
let raw_mode = cs_mode(0);
let detail_enabled = false;
let skipdata_enabled = detail_enabled;
let mut cs = Capstone {
csh: handle as *mut c_void,
syntax,
endian,
mode: csmode,
extra_mode,
detail_enabled,
skipdata_enabled,
raw_mode,
arch,
};
cs.update_raw_mode();
Ok(cs)
} else {
Err(err.into())
}
}
/// Disassemble all instructions in buffer
///
/// ```
/// # use capstone::prelude::*;
/// # let cs = Capstone::new().x86().mode(arch::x86::ArchMode::Mode32).build().unwrap();
/// cs.disasm_all(b"\x90", 0x1000).unwrap();
/// ```
pub fn disasm_all<'a>(&'a self, code: &[u8], addr: u64) -> CsResult<Instructions<'a>> {
self.disasm(code, addr, 0)
}
/// Disassemble `count` instructions in `code`
pub fn disasm_count<'a>(
&'a self,
code: &[u8],
addr: u64,
count: usize,
) -> CsResult<Instructions<'a>> {
if count == 0 {
return Err(Error::CustomError("Invalid dissasemble count; must be > 0"));
}
self.disasm(code, addr, count)
}
/// Disassembles a `&[u8]` full of instructions.
///
/// Pass `count = 0` to disassemble all instructions in the buffer.
fn disasm<'a>(&'a self, code: &[u8], addr: u64, count: usize) -> CsResult<Instructions<'a>> {
// SAFETY NOTE: `cs_disasm()` will write the error state into the
// `struct cs_struct` (true form of the `self.csh`) `errnum` field.
// CLAIM: since:
// - `Capstone` is not `Send`/`Sync`
// - The mutation is done through a `*mut c_void` (not through a const reference)
// it *should* be safe to accept `&self` (instead of `&mut self`) in this method.
let mut ptr: *mut cs_insn = core::ptr::null_mut();
let insn_count = unsafe {
cs_disasm(
self.csh(),
code.as_ptr(),
code.len() as usize,
addr,
count as usize,
&mut ptr,
)
};
if insn_count == 0 {
match self.error_result() {
Ok(_) => Ok(Instructions::new_empty()),
Err(err) => Err(err),
}
} else {
Ok(unsafe { Instructions::from_raw_parts(ptr, insn_count) })
}
}
/// Returns csh handle
#[inline]
fn csh(&self) -> csh {
self.csh as csh
}
/// Returns the raw mode value, which is useful for debugging
#[allow(dead_code)]
pub(crate) fn raw_mode(&self) -> cs_mode {
self.raw_mode
}
/// Update `raw_mode` with the bitwise OR of `mode`, `extra_mode`, and `endian`.
///
/// Returns the new `raw_mode`.
fn update_raw_mode(&mut self) -> cs_mode {
self.raw_mode = self.mode | self.extra_mode | self.endian;
self.raw_mode
}
/// Return the integer value used by capstone to represent the set of extra modes
fn extra_mode_value<T: Iterator<Item = ExtraMode>>(extra_mode: T) -> cs_mode {
// Bitwise OR extra modes
extra_mode.fold(cs_mode(0), |acc, x| acc | cs_mode::from(x))
}
/// Set extra modes in addition to normal `mode`
pub fn set_extra_mode<T: Iterator<Item = ExtraMode>>(&mut self, extra_mode: T) -> CsResult<()> {
let old_val = self.extra_mode;
self.extra_mode = Self::extra_mode_value(extra_mode);
let old_mode = self.raw_mode;
let new_mode = self.update_raw_mode();
let result = self._set_cs_option(cs_opt_type::CS_OPT_MODE, new_mode.0 as usize);
if result.is_err() {
// On error, restore old values
self.raw_mode = old_mode;
self.extra_mode = old_val;
}
result
}
/// Set the assembly syntax (has no effect on some platforms)
pub fn set_syntax(&mut self, syntax: Syntax) -> CsResult<()> {
// Todo(tmfink) check for valid syntax
let syntax_int = cs_opt_value::Type::from(syntax);
let result = self._set_cs_option(cs_opt_type::CS_OPT_SYNTAX, syntax_int as usize);
if result.is_ok() {
self.syntax = syntax_int;
}
result
}
define_set_mode!(
/// Set the endianness (has no effect on some platforms).
=> pub, set_endian, CS_OPT_MODE, endian : Endian; cs_mode);
define_set_mode!(
/// Sets the engine's disassembly mode.
/// Be careful, various combinations of modes aren't supported
/// See the capstone-sys documentation for more information.
=> pub, set_mode, CS_OPT_MODE, mode : Mode; cs_mode);
/// Returns a `CsResult` based on current `errno`.
/// If the `errno` is `CS_ERR_OK`, then `Ok(())` is returned. Otherwise, the error is returned.
fn error_result(&self) -> CsResult<()> {
let errno = unsafe { cs_errno(self.csh()) };
if errno == cs_err::CS_ERR_OK {
Ok(())
} else {
Err(errno.into())
}
}
/// Sets disassembling options at runtime.
///
/// Acts as a safe wrapper around capstone's `cs_option`.
fn _set_cs_option(&mut self, option_type: cs_opt_type, option_value: usize) -> CsResult<()> |
/// Controls whether to capstone will generate extra details about disassembled instructions.
///
/// Pass `true` to enable detail or `false` to disable detail.
pub fn set_detail(&mut self, enable_detail: bool) -> CsResult<()> {
let option_value: usize = OptValue::from(enable_detail).0 as usize;
let result = self._set_cs_option(cs_opt_type::CS_OPT_DETAIL, option_value);
// Only update internal state on success
if result.is_ok() {
self.detail_enabled = enable_detail;
}
result
}
/// Controls whether capstone will skip over invalid or data instructions.
///
/// Pass `true` to enable skipdata or `false` to disable skipdata.
pub fn set_skipdata(&mut self, enable_skipdata: bool) -> CsResult<()> {
let option_value: usize = OptValue::from(enable_skipdata).0 as usize;
let result = self._set_cs_option(cs_opt_type::CS_OPT_SKIPDATA, option_value);
// Only update internal state on success
if result.is_ok() {
self.skipdata_enabled = enable_skipdata;
}
result
}
/// Converts a register id `reg_id` to a `String` containing the register name.
pub fn reg_name(&self, reg_id: RegId) -> Option<String> {
let reg_name = unsafe {
let _reg_name = cs_reg_name(self.csh(), c_uint::from(reg_id.0));
str_from_cstr_ptr(_reg_name)?.to_string()
};
Some(reg_name)
}
/// Converts an instruction id `insn_id` to a `String` containing the instruction name.
///
/// Note: This function ignores the current syntax and uses the default syntax.
pub fn insn_name(&self, insn_id: InsnId) -> Option<String> {
let insn_name = unsafe {
let _insn_name = cs_insn_name(self.csh(), insn_id.0 as c_uint);
str_from_cstr_ptr(_insn_name)?.to_string()
};
Some(insn_name)
}
/// Converts a group id `group_id` to a `String` containing the group name.
pub fn group_name(&self, group_id: InsnGroupId) -> Option<String> {
let group_name = unsafe {
let _group_name = cs_group_name(self.csh(), c_uint::from(group_id.0));
str_from_cstr_ptr(_group_name)?.to_string()
};
Some(group_name)
}
/// Returns `Detail` structure for a given instruction
///
/// Requires:
///
/// 1. Instruction was created with detail enabled
/// 2. Skipdata is disabled
/// 3. Capstone was not compiled in diet mode
pub fn insn_detail<'s, 'i: 's>(&'s self, insn: &'i Insn) -> CsResult<InsnDetail<'i>> {
if !self.detail_enabled {
Err(Error::DetailOff)
} else if insn.id().0 == 0 {
Err(Error::IrrelevantDataInSkipData)
} else if Self::is_diet() {
Err(Error::IrrelevantDataInDiet)
} else {
Ok(unsafe { insn.detail(self.arch) })
}
}
/// Returns a tuple (major, minor) indicating the version of the capstone C library.
pub fn lib_version() -> (u32, u32) {
let mut major: c_int = 0;
let mut minor: c_int = 0;
let major_ptr: *mut c_int = &mut major;
let minor_ptr: *mut c_int = &mut minor;
// We can ignore the "hexical" version returned by capstone because we already have the
// major and minor versions
let _ = unsafe { cs_version(major_ptr, minor_ptr) };
(major as u32, minor as u32)
}
/// Returns whether the capstone library supports a given architecture.
pub fn supports_arch(arch: Arch) -> bool {
unsafe { cs_support(arch as c_int) }
}
/// Returns whether the capstone library was compiled in diet mode.
pub fn is_diet() -> bool {
unsafe { cs_support(CS_SUPPORT_DIET as c_int) }
}
}
impl Drop for Capstone {
fn drop(&mut self) {
unsafe { cs_close(&mut self.csh()) };
}
}
| {
let err = unsafe { cs_option(self.csh(), option_type, option_value) };
if cs_err::CS_ERR_OK == err {
Ok(())
} else {
Err(err.into())
}
} | identifier_body |
capstone.rs | use alloc::string::{String, ToString};
use core::convert::From;
use core::marker::PhantomData;
use libc::{c_int, c_uint, c_void};
use capstone_sys::cs_opt_value::*;
use capstone_sys::*;
use crate::arch::CapstoneBuilder;
use crate::constants::{Arch, Endian, ExtraMode, Mode, OptValue, Syntax};
use crate::error::*;
use crate::ffi::str_from_cstr_ptr;
use crate::instruction::{Insn, InsnDetail, InsnGroupId, InsnId, Instructions, RegId};
/// An instance of the capstone disassembler
///
/// Create with an instance with [`.new()`](Self::new) and disassemble bytes with [`.disasm_all()`](Self::disasm_all).
#[derive(Debug)]
pub struct Capstone {
/// Opaque handle to cs_engine
/// Stored as a pointer to ensure `Capstone` is `!Send`/`!Sync`
csh: *mut c_void,
/// Internal mode bitfield
mode: cs_mode,
/// Internal endian bitfield
endian: cs_mode,
/// Syntax
syntax: cs_opt_value::Type,
/// Internal extra mode bitfield
extra_mode: cs_mode,
/// Whether to get extra details when disassembling
detail_enabled: bool,
/// Whether to skipdata when disassembling
skipdata_enabled: bool,
/// We *must* set `mode`, `extra_mode`, and `endian` at once because `capstone`
/// handles them inside the arch-specific handler. We store the bitwise OR of these flags that
/// can be passed directly to `cs_option()`.
raw_mode: cs_mode,
/// Architecture
arch: Arch,
}
/// Defines a setter on `Capstone` that speculatively changes the arch-specific mode (which
/// includes `mode`, `endian`, and `extra_mode`). The setter takes a `capstone-rs` type and changes
/// the internal `capstone-sys` type.
macro_rules! define_set_mode {
(
$( #[$func_attr:meta] )*
=> $($visibility:ident)*, $fn_name:ident,
$opt_type:ident, $param_name:ident : $param_type:ident ;
$cs_base_type:ident
) => {
$( #[$func_attr] )*
$($visibility)* fn $fn_name(&mut self, $param_name: $param_type) -> CsResult<()> {
let old_val = self.$param_name;
self.$param_name = $cs_base_type::from($param_name);
let old_raw_mode = self.raw_mode;
let new_raw_mode = self.update_raw_mode();
let result = self._set_cs_option(
cs_opt_type::$opt_type,
new_raw_mode.0 as usize,
);
if result.is_err() {
// On error, restore old values
self.raw_mode = old_raw_mode;
self.$param_name = old_val;
}
result
}
}
}
/// Represents that no extra modes are enabled. Can be passed to `Capstone::new_raw()` as the
/// `extra_mode` argument.
pub static NO_EXTRA_MODE: EmptyExtraModeIter = EmptyExtraModeIter(PhantomData);
/// Represents an empty set of `ExtraMode`.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
pub struct EmptyExtraModeIter(PhantomData<()>);
impl Iterator for EmptyExtraModeIter {
type Item = ExtraMode;
fn next(&mut self) -> Option<Self::Item> {
None
}
}
impl Capstone {
/// Create a new instance of the decompiler using the builder pattern interface.
/// This is the recommended interface to `Capstone`.
///
/// ```
/// use capstone::prelude::*;
/// let cs = Capstone::new().x86().mode(arch::x86::ArchMode::Mode32).build();
/// ```
#[allow(clippy::new_ret_no_self)]
pub fn new() -> CapstoneBuilder {
CapstoneBuilder::new()
}
/// Create a new instance of the decompiler using the "raw" interface.
/// The user must ensure that only sensible `Arch`/`Mode` combinations are used.
/// | /// assert!(cs.is_ok());
/// ```
pub fn new_raw<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
) -> CsResult<Capstone> {
let mut handle: csh = 0;
let csarch: cs_arch = arch.into();
let csmode: cs_mode = mode.into();
// todo(tmfink): test valid modes at run time (or modify upstream capstone)
let endian = match endian {
Some(endian) => cs_mode::from(endian),
None => cs_mode(0),
};
let extra_mode = Self::extra_mode_value(extra_mode);
let combined_mode = csmode | endian | extra_mode;
let err = unsafe { cs_open(csarch, combined_mode, &mut handle) };
if cs_err::CS_ERR_OK == err {
let syntax = CS_OPT_SYNTAX_DEFAULT;
let raw_mode = cs_mode(0);
let detail_enabled = false;
let skipdata_enabled = detail_enabled;
let mut cs = Capstone {
csh: handle as *mut c_void,
syntax,
endian,
mode: csmode,
extra_mode,
detail_enabled,
skipdata_enabled,
raw_mode,
arch,
};
cs.update_raw_mode();
Ok(cs)
} else {
Err(err.into())
}
}
/// Disassemble all instructions in buffer
///
/// ```
/// # use capstone::prelude::*;
/// # let cs = Capstone::new().x86().mode(arch::x86::ArchMode::Mode32).build().unwrap();
/// cs.disasm_all(b"\x90", 0x1000).unwrap();
/// ```
pub fn disasm_all<'a>(&'a self, code: &[u8], addr: u64) -> CsResult<Instructions<'a>> {
self.disasm(code, addr, 0)
}
/// Disassemble `count` instructions in `code`
pub fn disasm_count<'a>(
&'a self,
code: &[u8],
addr: u64,
count: usize,
) -> CsResult<Instructions<'a>> {
if count == 0 {
return Err(Error::CustomError("Invalid dissasemble count; must be > 0"));
}
self.disasm(code, addr, count)
}
/// Disassembles a `&[u8]` full of instructions.
///
/// Pass `count = 0` to disassemble all instructions in the buffer.
fn disasm<'a>(&'a self, code: &[u8], addr: u64, count: usize) -> CsResult<Instructions<'a>> {
// SAFETY NOTE: `cs_disasm()` will write the error state into the
// `struct cs_struct` (true form of the `self.csh`) `errnum` field.
// CLAIM: since:
// - `Capstone` is not `Send`/`Sync`
// - The mutation is done through a `*mut c_void` (not through a const reference)
// it *should* be safe to accept `&self` (instead of `&mut self`) in this method.
let mut ptr: *mut cs_insn = core::ptr::null_mut();
let insn_count = unsafe {
cs_disasm(
self.csh(),
code.as_ptr(),
code.len() as usize,
addr,
count as usize,
&mut ptr,
)
};
if insn_count == 0 {
match self.error_result() {
Ok(_) => Ok(Instructions::new_empty()),
Err(err) => Err(err),
}
} else {
Ok(unsafe { Instructions::from_raw_parts(ptr, insn_count) })
}
}
/// Returns csh handle
#[inline]
fn csh(&self) -> csh {
self.csh as csh
}
/// Returns the raw mode value, which is useful for debugging
#[allow(dead_code)]
pub(crate) fn raw_mode(&self) -> cs_mode {
self.raw_mode
}
/// Update `raw_mode` with the bitwise OR of `mode`, `extra_mode`, and `endian`.
///
/// Returns the new `raw_mode`.
fn update_raw_mode(&mut self) -> cs_mode {
self.raw_mode = self.mode | self.extra_mode | self.endian;
self.raw_mode
}
/// Return the integer value used by capstone to represent the set of extra modes
fn extra_mode_value<T: Iterator<Item = ExtraMode>>(extra_mode: T) -> cs_mode {
// Bitwise OR extra modes
extra_mode.fold(cs_mode(0), |acc, x| acc | cs_mode::from(x))
}
/// Set extra modes in addition to normal `mode`
pub fn set_extra_mode<T: Iterator<Item = ExtraMode>>(&mut self, extra_mode: T) -> CsResult<()> {
let old_val = self.extra_mode;
self.extra_mode = Self::extra_mode_value(extra_mode);
let old_mode = self.raw_mode;
let new_mode = self.update_raw_mode();
let result = self._set_cs_option(cs_opt_type::CS_OPT_MODE, new_mode.0 as usize);
if result.is_err() {
// On error, restore old values
self.raw_mode = old_mode;
self.extra_mode = old_val;
}
result
}
/// Set the assembly syntax (has no effect on some platforms)
pub fn set_syntax(&mut self, syntax: Syntax) -> CsResult<()> {
// Todo(tmfink) check for valid syntax
let syntax_int = cs_opt_value::Type::from(syntax);
let result = self._set_cs_option(cs_opt_type::CS_OPT_SYNTAX, syntax_int as usize);
if result.is_ok() {
self.syntax = syntax_int;
}
result
}
define_set_mode!(
/// Set the endianness (has no effect on some platforms).
=> pub, set_endian, CS_OPT_MODE, endian : Endian; cs_mode);
define_set_mode!(
/// Sets the engine's disassembly mode.
/// Be careful, various combinations of modes aren't supported
/// See the capstone-sys documentation for more information.
=> pub, set_mode, CS_OPT_MODE, mode : Mode; cs_mode);
/// Returns a `CsResult` based on current `errno`.
/// If the `errno` is `CS_ERR_OK`, then `Ok(())` is returned. Otherwise, the error is returned.
fn error_result(&self) -> CsResult<()> {
let errno = unsafe { cs_errno(self.csh()) };
if errno == cs_err::CS_ERR_OK {
Ok(())
} else {
Err(errno.into())
}
}
/// Sets disassembling options at runtime.
///
/// Acts as a safe wrapper around capstone's `cs_option`.
fn _set_cs_option(&mut self, option_type: cs_opt_type, option_value: usize) -> CsResult<()> {
let err = unsafe { cs_option(self.csh(), option_type, option_value) };
if cs_err::CS_ERR_OK == err {
Ok(())
} else {
Err(err.into())
}
}
/// Controls whether to capstone will generate extra details about disassembled instructions.
///
/// Pass `true` to enable detail or `false` to disable detail.
pub fn set_detail(&mut self, enable_detail: bool) -> CsResult<()> {
let option_value: usize = OptValue::from(enable_detail).0 as usize;
let result = self._set_cs_option(cs_opt_type::CS_OPT_DETAIL, option_value);
// Only update internal state on success
if result.is_ok() {
self.detail_enabled = enable_detail;
}
result
}
/// Controls whether capstone will skip over invalid or data instructions.
///
/// Pass `true` to enable skipdata or `false` to disable skipdata.
pub fn set_skipdata(&mut self, enable_skipdata: bool) -> CsResult<()> {
let option_value: usize = OptValue::from(enable_skipdata).0 as usize;
let result = self._set_cs_option(cs_opt_type::CS_OPT_SKIPDATA, option_value);
// Only update internal state on success
if result.is_ok() {
self.skipdata_enabled = enable_skipdata;
}
result
}
/// Converts a register id `reg_id` to a `String` containing the register name.
pub fn reg_name(&self, reg_id: RegId) -> Option<String> {
let reg_name = unsafe {
let _reg_name = cs_reg_name(self.csh(), c_uint::from(reg_id.0));
str_from_cstr_ptr(_reg_name)?.to_string()
};
Some(reg_name)
}
/// Converts an instruction id `insn_id` to a `String` containing the instruction name.
///
/// Note: This function ignores the current syntax and uses the default syntax.
pub fn insn_name(&self, insn_id: InsnId) -> Option<String> {
let insn_name = unsafe {
let _insn_name = cs_insn_name(self.csh(), insn_id.0 as c_uint);
str_from_cstr_ptr(_insn_name)?.to_string()
};
Some(insn_name)
}
/// Converts a group id `group_id` to a `String` containing the group name.
pub fn group_name(&self, group_id: InsnGroupId) -> Option<String> {
let group_name = unsafe {
let _group_name = cs_group_name(self.csh(), c_uint::from(group_id.0));
str_from_cstr_ptr(_group_name)?.to_string()
};
Some(group_name)
}
/// Returns `Detail` structure for a given instruction
///
/// Requires:
///
/// 1. Instruction was created with detail enabled
/// 2. Skipdata is disabled
/// 3. Capstone was not compiled in diet mode
pub fn insn_detail<'s, 'i: 's>(&'s self, insn: &'i Insn) -> CsResult<InsnDetail<'i>> {
if !self.detail_enabled {
Err(Error::DetailOff)
} else if insn.id().0 == 0 {
Err(Error::IrrelevantDataInSkipData)
} else if Self::is_diet() {
Err(Error::IrrelevantDataInDiet)
} else {
Ok(unsafe { insn.detail(self.arch) })
}
}
/// Returns a tuple (major, minor) indicating the version of the capstone C library.
pub fn lib_version() -> (u32, u32) {
let mut major: c_int = 0;
let mut minor: c_int = 0;
let major_ptr: *mut c_int = &mut major;
let minor_ptr: *mut c_int = &mut minor;
// We can ignore the "hexical" version returned by capstone because we already have the
// major and minor versions
let _ = unsafe { cs_version(major_ptr, minor_ptr) };
(major as u32, minor as u32)
}
/// Returns whether the capstone library supports a given architecture.
pub fn supports_arch(arch: Arch) -> bool {
unsafe { cs_support(arch as c_int) }
}
/// Returns whether the capstone library was compiled in diet mode.
pub fn is_diet() -> bool {
unsafe { cs_support(CS_SUPPORT_DIET as c_int) }
}
}
impl Drop for Capstone {
fn drop(&mut self) {
unsafe { cs_close(&mut self.csh()) };
}
} | /// ```
/// use capstone::{Arch, Capstone, NO_EXTRA_MODE, Mode};
/// let cs = Capstone::new_raw(Arch::X86, Mode::Mode64, NO_EXTRA_MODE, None); | random_line_split |
inputs.py | import numpy as np
import tensorflow as tf
import os
def get_inputs(split, config):
split_dir = config['split_dir']
data_dir = config['data_dir']
dataset = config['dataset']
split_file = os.path.join(split_dir, dataset, split + '.lst')
filename_queue = get_filename_queue(split_file, os.path.join(data_dir, dataset))
if dataset == 'mnist':
image = get_inputs_mnist(filename_queue, config)
config['output_size'] = 28
config['c_dim'] = 1
elif dataset == "cifar-10":
|
else:
image = get_inputs_image(filename_queue, config)
image_batch = create_batch([image], config['batch_size'])
return image_batch
def get_inputs_image(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Read a record, getting filenames from the filename_queue.
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
image = tf.image.decode_image(value, channels=c_dim)
image = tf.cast(image, tf.float32)/255.
image_shape = tf.shape(image)
image_height, image_width = image_shape[0], image_shape[1]
offset_height = tf.cast((image_height - image_size)/2, tf.int32)
offset_width = tf.cast((image_width - image_size)/2, tf.int32)
image = tf.image.crop_to_bounding_box(image, offset_height, offset_width, image_size, image_size)
image = tf.image.resize_images(image, [output_size, output_size])
image.set_shape([output_size, output_size, c_dim])
return image
def get_inputs_mnist(filename_queue, config):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
# Defaults are not specified since all keys are required.
features={
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'depth': tf.FixedLenFeature([], tf.int64),
'label': tf.FixedLenFeature([], tf.int64),
'image_raw': tf.FixedLenFeature([], tf.string),
})
image = tf.decode_raw(features['image_raw'], tf.uint8)
image.set_shape([784])
image = tf.reshape(image, [28, 28, 1])
image = tf.cast(image, tf.float32) / 255.
# Convert label from a scalar uint8 tensor to an int32 scalar.
label = tf.cast(features['label'], tf.int32)
binary_image = (tf.random_uniform(image.get_shape()) <= image)
binary_image = tf.cast(binary_image, tf.float32)
return binary_image
def get_inputs_cifar10(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Dimensions of the images in the CIFAR-10 dataset.
# See http://www.cs.toronto.edu/~kriz/cifar.html for a description of the
# input format.
label_bytes = 1 # 2 for CIFAR-100
image_bytes = 32 * 32 * 3
# Every record consists of a label followed by the image, with a
# fixed number of bytes for each.
record_bytes = label_bytes + image_bytes
# Read a record, getting filenames from the filename_queue.
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
key, value = reader.read(filename_queue)
record = tf.decode_raw(value, tf.uint8)
# The first bytes represent the label, which we convert from uint8->int32.
label = tf.cast(record[0], tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
#tf.strided_slice(record, [label_bytes], [label_bytes + image_bytes])
image = tf.reshape(record[label_bytes:label_bytes+image_bytes], [3, 32, 32])
image = tf.cast(image, tf.float32)/255.
# Convert from [depth, height, width] to [height, width, depth].
image = tf.transpose(image, [1, 2, 0])
return image
def get_filename_queue(split_file, data_dir):
with open(split_file, 'r') as f:
filenames = f.readlines()
filenames = [os.path.join(data_dir, f.strip()) for f in filenames]
for f in filenames:
if not os.path.exists(f):
raise ValueError('Failed to find file: ' + f)
filename_queue = tf.train.string_input_producer(filenames)
return filename_queue
def create_batch(inputs, batch_size=64, min_queue_examples=1000, num_preprocess_threads=12, enqueue_many=False):
# Generate a batch of images and labels by building up a queue of examples.
batch = tf.train.shuffle_batch(
inputs,
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples,
enqueue_many=enqueue_many,
)
return batch
| image = get_inputs_cifar10(filename_queue, config)
config['output_size'] = 32
config['c_dim'] = 3 | conditional_block |
inputs.py | import numpy as np
import tensorflow as tf
import os
def get_inputs(split, config):
|
def get_inputs_image(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Read a record, getting filenames from the filename_queue.
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
image = tf.image.decode_image(value, channels=c_dim)
image = tf.cast(image, tf.float32)/255.
image_shape = tf.shape(image)
image_height, image_width = image_shape[0], image_shape[1]
offset_height = tf.cast((image_height - image_size)/2, tf.int32)
offset_width = tf.cast((image_width - image_size)/2, tf.int32)
image = tf.image.crop_to_bounding_box(image, offset_height, offset_width, image_size, image_size)
image = tf.image.resize_images(image, [output_size, output_size])
image.set_shape([output_size, output_size, c_dim])
return image
def get_inputs_mnist(filename_queue, config):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
# Defaults are not specified since all keys are required.
features={
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'depth': tf.FixedLenFeature([], tf.int64),
'label': tf.FixedLenFeature([], tf.int64),
'image_raw': tf.FixedLenFeature([], tf.string),
})
image = tf.decode_raw(features['image_raw'], tf.uint8)
image.set_shape([784])
image = tf.reshape(image, [28, 28, 1])
image = tf.cast(image, tf.float32) / 255.
# Convert label from a scalar uint8 tensor to an int32 scalar.
label = tf.cast(features['label'], tf.int32)
binary_image = (tf.random_uniform(image.get_shape()) <= image)
binary_image = tf.cast(binary_image, tf.float32)
return binary_image
def get_inputs_cifar10(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Dimensions of the images in the CIFAR-10 dataset.
# See http://www.cs.toronto.edu/~kriz/cifar.html for a description of the
# input format.
label_bytes = 1 # 2 for CIFAR-100
image_bytes = 32 * 32 * 3
# Every record consists of a label followed by the image, with a
# fixed number of bytes for each.
record_bytes = label_bytes + image_bytes
# Read a record, getting filenames from the filename_queue.
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
key, value = reader.read(filename_queue)
record = tf.decode_raw(value, tf.uint8)
# The first bytes represent the label, which we convert from uint8->int32.
label = tf.cast(record[0], tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
#tf.strided_slice(record, [label_bytes], [label_bytes + image_bytes])
image = tf.reshape(record[label_bytes:label_bytes+image_bytes], [3, 32, 32])
image = tf.cast(image, tf.float32)/255.
# Convert from [depth, height, width] to [height, width, depth].
image = tf.transpose(image, [1, 2, 0])
return image
def get_filename_queue(split_file, data_dir):
with open(split_file, 'r') as f:
filenames = f.readlines()
filenames = [os.path.join(data_dir, f.strip()) for f in filenames]
for f in filenames:
if not os.path.exists(f):
raise ValueError('Failed to find file: ' + f)
filename_queue = tf.train.string_input_producer(filenames)
return filename_queue
def create_batch(inputs, batch_size=64, min_queue_examples=1000, num_preprocess_threads=12, enqueue_many=False):
# Generate a batch of images and labels by building up a queue of examples.
batch = tf.train.shuffle_batch(
inputs,
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples,
enqueue_many=enqueue_many,
)
return batch
| split_dir = config['split_dir']
data_dir = config['data_dir']
dataset = config['dataset']
split_file = os.path.join(split_dir, dataset, split + '.lst')
filename_queue = get_filename_queue(split_file, os.path.join(data_dir, dataset))
if dataset == 'mnist':
image = get_inputs_mnist(filename_queue, config)
config['output_size'] = 28
config['c_dim'] = 1
elif dataset == "cifar-10":
image = get_inputs_cifar10(filename_queue, config)
config['output_size'] = 32
config['c_dim'] = 3
else:
image = get_inputs_image(filename_queue, config)
image_batch = create_batch([image], config['batch_size'])
return image_batch | identifier_body |
inputs.py | import numpy as np
import tensorflow as tf
import os
def get_inputs(split, config):
split_dir = config['split_dir']
data_dir = config['data_dir']
dataset = config['dataset']
split_file = os.path.join(split_dir, dataset, split + '.lst')
filename_queue = get_filename_queue(split_file, os.path.join(data_dir, dataset))
if dataset == 'mnist':
image = get_inputs_mnist(filename_queue, config)
config['output_size'] = 28
config['c_dim'] = 1
elif dataset == "cifar-10":
image = get_inputs_cifar10(filename_queue, config)
config['output_size'] = 32
config['c_dim'] = 3
else:
image = get_inputs_image(filename_queue, config)
image_batch = create_batch([image], config['batch_size'])
return image_batch
def get_inputs_image(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Read a record, getting filenames from the filename_queue.
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
image = tf.image.decode_image(value, channels=c_dim)
image = tf.cast(image, tf.float32)/255.
image_shape = tf.shape(image)
image_height, image_width = image_shape[0], image_shape[1]
offset_height = tf.cast((image_height - image_size)/2, tf.int32)
offset_width = tf.cast((image_width - image_size)/2, tf.int32)
image = tf.image.crop_to_bounding_box(image, offset_height, offset_width, image_size, image_size)
image = tf.image.resize_images(image, [output_size, output_size])
image.set_shape([output_size, output_size, c_dim])
return image
def | (filename_queue, config):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
# Defaults are not specified since all keys are required.
features={
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'depth': tf.FixedLenFeature([], tf.int64),
'label': tf.FixedLenFeature([], tf.int64),
'image_raw': tf.FixedLenFeature([], tf.string),
})
image = tf.decode_raw(features['image_raw'], tf.uint8)
image.set_shape([784])
image = tf.reshape(image, [28, 28, 1])
image = tf.cast(image, tf.float32) / 255.
# Convert label from a scalar uint8 tensor to an int32 scalar.
label = tf.cast(features['label'], tf.int32)
binary_image = (tf.random_uniform(image.get_shape()) <= image)
binary_image = tf.cast(binary_image, tf.float32)
return binary_image
def get_inputs_cifar10(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Dimensions of the images in the CIFAR-10 dataset.
# See http://www.cs.toronto.edu/~kriz/cifar.html for a description of the
# input format.
label_bytes = 1 # 2 for CIFAR-100
image_bytes = 32 * 32 * 3
# Every record consists of a label followed by the image, with a
# fixed number of bytes for each.
record_bytes = label_bytes + image_bytes
# Read a record, getting filenames from the filename_queue.
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
key, value = reader.read(filename_queue)
record = tf.decode_raw(value, tf.uint8)
# The first bytes represent the label, which we convert from uint8->int32.
label = tf.cast(record[0], tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
#tf.strided_slice(record, [label_bytes], [label_bytes + image_bytes])
image = tf.reshape(record[label_bytes:label_bytes+image_bytes], [3, 32, 32])
image = tf.cast(image, tf.float32)/255.
# Convert from [depth, height, width] to [height, width, depth].
image = tf.transpose(image, [1, 2, 0])
return image
def get_filename_queue(split_file, data_dir):
with open(split_file, 'r') as f:
filenames = f.readlines()
filenames = [os.path.join(data_dir, f.strip()) for f in filenames]
for f in filenames:
if not os.path.exists(f):
raise ValueError('Failed to find file: ' + f)
filename_queue = tf.train.string_input_producer(filenames)
return filename_queue
def create_batch(inputs, batch_size=64, min_queue_examples=1000, num_preprocess_threads=12, enqueue_many=False):
# Generate a batch of images and labels by building up a queue of examples.
batch = tf.train.shuffle_batch(
inputs,
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples,
enqueue_many=enqueue_many,
)
return batch
| get_inputs_mnist | identifier_name |
inputs.py | import numpy as np
import tensorflow as tf
import os
def get_inputs(split, config):
split_dir = config['split_dir']
data_dir = config['data_dir']
dataset = config['dataset']
split_file = os.path.join(split_dir, dataset, split + '.lst')
filename_queue = get_filename_queue(split_file, os.path.join(data_dir, dataset))
if dataset == 'mnist':
image = get_inputs_mnist(filename_queue, config)
config['output_size'] = 28
config['c_dim'] = 1
elif dataset == "cifar-10":
image = get_inputs_cifar10(filename_queue, config)
config['output_size'] = 32
config['c_dim'] = 3
else:
image = get_inputs_image(filename_queue, config)
image_batch = create_batch([image], config['batch_size'])
return image_batch
def get_inputs_image(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Read a record, getting filenames from the filename_queue.
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
image = tf.image.decode_image(value, channels=c_dim)
image = tf.cast(image, tf.float32)/255.
image_shape = tf.shape(image)
image_height, image_width = image_shape[0], image_shape[1]
offset_height = tf.cast((image_height - image_size)/2, tf.int32)
offset_width = tf.cast((image_width - image_size)/2, tf.int32)
image = tf.image.crop_to_bounding_box(image, offset_height, offset_width, image_size, image_size)
image = tf.image.resize_images(image, [output_size, output_size])
image.set_shape([output_size, output_size, c_dim])
return image
def get_inputs_mnist(filename_queue, config):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
# Defaults are not specified since all keys are required.
features={
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'depth': tf.FixedLenFeature([], tf.int64), | image = tf.decode_raw(features['image_raw'], tf.uint8)
image.set_shape([784])
image = tf.reshape(image, [28, 28, 1])
image = tf.cast(image, tf.float32) / 255.
# Convert label from a scalar uint8 tensor to an int32 scalar.
label = tf.cast(features['label'], tf.int32)
binary_image = (tf.random_uniform(image.get_shape()) <= image)
binary_image = tf.cast(binary_image, tf.float32)
return binary_image
def get_inputs_cifar10(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Dimensions of the images in the CIFAR-10 dataset.
# See http://www.cs.toronto.edu/~kriz/cifar.html for a description of the
# input format.
label_bytes = 1 # 2 for CIFAR-100
image_bytes = 32 * 32 * 3
# Every record consists of a label followed by the image, with a
# fixed number of bytes for each.
record_bytes = label_bytes + image_bytes
# Read a record, getting filenames from the filename_queue.
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
key, value = reader.read(filename_queue)
record = tf.decode_raw(value, tf.uint8)
# The first bytes represent the label, which we convert from uint8->int32.
label = tf.cast(record[0], tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
#tf.strided_slice(record, [label_bytes], [label_bytes + image_bytes])
image = tf.reshape(record[label_bytes:label_bytes+image_bytes], [3, 32, 32])
image = tf.cast(image, tf.float32)/255.
# Convert from [depth, height, width] to [height, width, depth].
image = tf.transpose(image, [1, 2, 0])
return image
def get_filename_queue(split_file, data_dir):
with open(split_file, 'r') as f:
filenames = f.readlines()
filenames = [os.path.join(data_dir, f.strip()) for f in filenames]
for f in filenames:
if not os.path.exists(f):
raise ValueError('Failed to find file: ' + f)
filename_queue = tf.train.string_input_producer(filenames)
return filename_queue
def create_batch(inputs, batch_size=64, min_queue_examples=1000, num_preprocess_threads=12, enqueue_many=False):
# Generate a batch of images and labels by building up a queue of examples.
batch = tf.train.shuffle_batch(
inputs,
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples,
enqueue_many=enqueue_many,
)
return batch | 'label': tf.FixedLenFeature([], tf.int64),
'image_raw': tf.FixedLenFeature([], tf.string),
})
| random_line_split |
View.js | "use strict";
var _prototypeProperties = function (child, staticProps, instanceProps) { if (staticProps) Object.defineProperties(child, staticProps); if (instanceProps) Object.defineProperties(child.prototype, instanceProps); };
var _get = function get(object, property, receiver) { var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc && desc.writable) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } };
var _inherits = function (subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; };
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
require("babel/polyfill");
var Module = require("../Module.js").Module;
// # Kronicle.View class
// depends: [Kronicle.Module](Module.html)
// The module used for rendering main views. An array of smaller components are usually used to make up a view.
// The constructor takes an args object that has the following two properties:
// - template - a function that returns a string
// - components - an array of Kronicle Components
var View = exports.View = (function (Module) {
function View(args) {
_classCallCheck(this, View);
this.template = args.template || function () {
return "";
}; | if (args.components) {
this.addComponents(args.components);
}
_get(Object.getPrototypeOf(View.prototype), "constructor", this).call(this, { name: args.name + "View" });
}
_inherits(View, Module);
_prototypeProperties(View, null, {
render: {
// ## render method
// The render method passes any data avaialbe to a template and returns the rendered string
// Takes two arguments
// - err - an error that occured in the parent function
// - data - the data to be passed to template
value: function render(err, data) {
if (!err) {
return this.template(data);
}
},
writable: true,
configurable: true
},
addComponents: {
// ## addComponents method
// The method used to add an array of Kronicle Components to the View
// Takes one argument:
// - components - an array of Kronicle Components
value: function addComponents(components) {
for (var _iterator = components[Symbol.iterator](), _step; !(_step = _iterator.next()).done;) {
var component = _step.value;
this.addComponent(component);
}
},
writable: true,
configurable: true
},
addComponent: {
// ## addComponent method
// The method used to add a single Kronicle Component to the View
// Takes one argument:
// - component
value: function addComponent(component) {
this.components[component.name.split("Component")[0]] = component;
},
writable: true,
configurable: true
}
});
return View;
})(Module);
Object.defineProperty(exports, "__esModule", {
value: true
}); | this.components = []; | random_line_split |
View.js | "use strict";
var _prototypeProperties = function (child, staticProps, instanceProps) { if (staticProps) Object.defineProperties(child, staticProps); if (instanceProps) Object.defineProperties(child.prototype, instanceProps); };
var _get = function get(object, property, receiver) { var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc && desc.writable) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } };
var _inherits = function (subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; };
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
require("babel/polyfill");
var Module = require("../Module.js").Module;
// # Kronicle.View class
// depends: [Kronicle.Module](Module.html)
// The module used for rendering main views. An array of smaller components are usually used to make up a view.
// The constructor takes an args object that has the following two properties:
// - template - a function that returns a string
// - components - an array of Kronicle Components
var View = exports.View = (function (Module) {
function | (args) {
_classCallCheck(this, View);
this.template = args.template || function () {
return "";
};
this.components = [];
if (args.components) {
this.addComponents(args.components);
}
_get(Object.getPrototypeOf(View.prototype), "constructor", this).call(this, { name: args.name + "View" });
}
_inherits(View, Module);
_prototypeProperties(View, null, {
render: {
// ## render method
// The render method passes any data avaialbe to a template and returns the rendered string
// Takes two arguments
// - err - an error that occured in the parent function
// - data - the data to be passed to template
value: function render(err, data) {
if (!err) {
return this.template(data);
}
},
writable: true,
configurable: true
},
addComponents: {
// ## addComponents method
// The method used to add an array of Kronicle Components to the View
// Takes one argument:
// - components - an array of Kronicle Components
value: function addComponents(components) {
for (var _iterator = components[Symbol.iterator](), _step; !(_step = _iterator.next()).done;) {
var component = _step.value;
this.addComponent(component);
}
},
writable: true,
configurable: true
},
addComponent: {
// ## addComponent method
// The method used to add a single Kronicle Component to the View
// Takes one argument:
// - component
value: function addComponent(component) {
this.components[component.name.split("Component")[0]] = component;
},
writable: true,
configurable: true
}
});
return View;
})(Module);
Object.defineProperty(exports, "__esModule", {
value: true
}); | View | identifier_name |
View.js | "use strict";
var _prototypeProperties = function (child, staticProps, instanceProps) { if (staticProps) Object.defineProperties(child, staticProps); if (instanceProps) Object.defineProperties(child.prototype, instanceProps); };
var _get = function get(object, property, receiver) { var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc && desc.writable) { return desc.value; } else | };
var _inherits = function (subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; };
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
require("babel/polyfill");
var Module = require("../Module.js").Module;
// # Kronicle.View class
// depends: [Kronicle.Module](Module.html)
// The module used for rendering main views. An array of smaller components are usually used to make up a view.
// The constructor takes an args object that has the following two properties:
// - template - a function that returns a string
// - components - an array of Kronicle Components
var View = exports.View = (function (Module) {
function View(args) {
_classCallCheck(this, View);
this.template = args.template || function () {
return "";
};
this.components = [];
if (args.components) {
this.addComponents(args.components);
}
_get(Object.getPrototypeOf(View.prototype), "constructor", this).call(this, { name: args.name + "View" });
}
_inherits(View, Module);
_prototypeProperties(View, null, {
render: {
// ## render method
// The render method passes any data avaialbe to a template and returns the rendered string
// Takes two arguments
// - err - an error that occured in the parent function
// - data - the data to be passed to template
value: function render(err, data) {
if (!err) {
return this.template(data);
}
},
writable: true,
configurable: true
},
addComponents: {
// ## addComponents method
// The method used to add an array of Kronicle Components to the View
// Takes one argument:
// - components - an array of Kronicle Components
value: function addComponents(components) {
for (var _iterator = components[Symbol.iterator](), _step; !(_step = _iterator.next()).done;) {
var component = _step.value;
this.addComponent(component);
}
},
writable: true,
configurable: true
},
addComponent: {
// ## addComponent method
// The method used to add a single Kronicle Component to the View
// Takes one argument:
// - component
value: function addComponent(component) {
this.components[component.name.split("Component")[0]] = component;
},
writable: true,
configurable: true
}
});
return View;
})(Module);
Object.defineProperty(exports, "__esModule", {
value: true
}); | { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } | conditional_block |
View.js | "use strict";
var _prototypeProperties = function (child, staticProps, instanceProps) { if (staticProps) Object.defineProperties(child, staticProps); if (instanceProps) Object.defineProperties(child.prototype, instanceProps); };
var _get = function get(object, property, receiver) { var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc && desc.writable) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } };
var _inherits = function (subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; };
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
require("babel/polyfill");
var Module = require("../Module.js").Module;
// # Kronicle.View class
// depends: [Kronicle.Module](Module.html)
// The module used for rendering main views. An array of smaller components are usually used to make up a view.
// The constructor takes an args object that has the following two properties:
// - template - a function that returns a string
// - components - an array of Kronicle Components
var View = exports.View = (function (Module) {
function View(args) |
_inherits(View, Module);
_prototypeProperties(View, null, {
render: {
// ## render method
// The render method passes any data avaialbe to a template and returns the rendered string
// Takes two arguments
// - err - an error that occured in the parent function
// - data - the data to be passed to template
value: function render(err, data) {
if (!err) {
return this.template(data);
}
},
writable: true,
configurable: true
},
addComponents: {
// ## addComponents method
// The method used to add an array of Kronicle Components to the View
// Takes one argument:
// - components - an array of Kronicle Components
value: function addComponents(components) {
for (var _iterator = components[Symbol.iterator](), _step; !(_step = _iterator.next()).done;) {
var component = _step.value;
this.addComponent(component);
}
},
writable: true,
configurable: true
},
addComponent: {
// ## addComponent method
// The method used to add a single Kronicle Component to the View
// Takes one argument:
// - component
value: function addComponent(component) {
this.components[component.name.split("Component")[0]] = component;
},
writable: true,
configurable: true
}
});
return View;
})(Module);
Object.defineProperty(exports, "__esModule", {
value: true
}); | {
_classCallCheck(this, View);
this.template = args.template || function () {
return "";
};
this.components = [];
if (args.components) {
this.addComponents(args.components);
}
_get(Object.getPrototypeOf(View.prototype), "constructor", this).call(this, { name: args.name + "View" });
} | identifier_body |
query-builder.js | 'use strict';
var resourceManager = require('./resource-manager');
var debog = require('./debog');
var relLog = debog('relationships');
var queryLog = debog('query');
/**
* @name query-builder
* @module query-builder
* @description
* build and return query object
*
* @param {object} struct - structure
* @param {array} ids - array of ids for calling specific peices fo data
* @param {functon} callback - callback
*/
module.exports = function (struct, ids) {
ids = [].concat(ids || []);
var queryObj = buildQueries(struct);
var query = 'SELECT '+getAttrQuery(queryObj)+' FROM '+queryObj.root.$$resource.table+'\n'+queryObj.joins.join('\n');
if (ids && ids.length) {
query += '\nWHERE '+queryObj.root.$$resource.table+'.'+queryObj.root.$$resource.idField+' IN ('+ids.join(',')+')'
}
if (queryLog.active) { queryLog.sql(query); }
queryObj.query = query;
return queryObj;
};
function buildQueries(struct, obj) {
obj = obj || {
attrs: {},
root: struct,
joins: [],
joinedResources: [struct.$$resource.table]
};
addAttributes(struct, obj);
// add struct join
if (obj.root !== struct) { // is not root resource
var relations = findRelations(struct.$$resource, struct.$$parent.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
// add field level joins
// there may be none if the fields contain no additional resources
Object.keys(struct).forEach(function (key) {
var item = struct[key];
if (item.$$struct) { return; }
if (obj.joinedResources.indexOf(item.resource.table) === -1) {
var relations = findRelations(item.resource, struct.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
});
// recusively handle sub structures
Object.keys(struct).forEach(function (key) {
if (!struct[key].$$struct) { return; }
buildQueries(struct[key], obj);
});
return obj;
}
function buildRelationJoin(rel) {
return 'LEFT JOIN '+
rel.resource.table+
' ON '+
rel.resource.table+'.'+rel.resourceField+
' = '+
rel.root.table+'.'+rel.rootField;
}
// walk resources to find relationship path
function findRelations(resource, parentResource) {
var arr = [];
if (relLog.active) { debugRelations(resource, parentResource); }
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
return arr;
}
// bottom up
pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
return arr;
}
function debugRelations(resource, parentResource) {
var arr = [];
relLog.stash('lookup', relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
logJoins(arr);
relLog.unstash(relLog.chalk.green('path resolved'));
return;
}
relLog.unstash(relLog.chalk.red('could not resolve path, will attemp'), relLog.chalk.yellow('reverse lookup'));
// bottom up
relLog.stash(relLog.chalk.yellow('reverse lookup'), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowLeft, relLog.chalk.magenta(resource.name));
found = pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
logJoins(arr);
relLog.unstash(found ? relLog.chalk.green('path resolved') : relLog.chalk.red('could not resolve path'));
if (found) { return; }
// run path again and logout bad pathways
// this code is meant for debugging purposes only
relLog.stash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr, true);
relLog.unstash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')));
}
// TODO fix path finding. currently is not working with legs example
// walk resources for path
function pathFinder(a, b, path, arr, debug) {
return (a.relationships || []).some(function (item) {
path.children[item.resource.name] = {
added: path.added,
parent: path,
item: item,
children: {}
};
// NOTE debug code
var subDebug = debug;
if (subDebug && relLog.active) {
if (subDebug === true) | else { // create new groups for each layer
subDebug = subDebug.nest(item.root.name);
}
subDebug.stash(item.resource.name);
}
// END Debug Code
// path completed
// convert to flat array and return true for sucessfull
if (item.resource.name === b.name) {
reducePath(path.children[item.resource.name], arr);
return true;
}
// continue finding on paths that have not been explored yet
if (path.added.indexOf(item.resource.name) === -1) {
path.added.push(item.resource.name);
return pathFinder(item.resource, b, path.children[item.resource.name], arr, subDebug);
}
});
}
// turn nested path into flat array
function reducePath(path, arr) {
while (path.item) {
arr.push(path.item);
path = path.parent;
}
}
// flatten attribute object into comma deliminated string
function getAttrQuery(queryObj) {
return Object.keys(queryObj.attrs).map(function (key) {
return queryObj.attrs[key].table+'.'+queryObj.attrs[key].field+' AS '+queryObj.attrs[key].alias
}).join(',');
}
function addAttributes(struct, obj) {
Object.keys(struct).forEach(function (key) {
if (struct[key].$$struct) { return; }
var item = struct[key];
var id = getUID();
var name = item.resource.table+'_'+item.field;
// NOTE may want to make multipel hashes so we can lookup by mulple peices of info
obj.attrs[name] = {
id: id,
table: item.resource.table,
field: item.field,
alias: id+'_'+name,
config: item.resource[item.field]
};
});
}
// uuids for attribute aliases
var uid = 0;
function getUID() {
return ''+(uid++);
}
function logJoins(arr) {
arr.forEach(function (item) {
relLog.stash('join %s %s %s on %s.%s = %s.%s',
relLog.chalk.magenta(item.resource.name),
relLog.symbols.arrowRight,
relLog.chalk.magenta(item.root.name),
relLog.chalk.magenta(item.resource.table),
relLog.chalk.magenta(item.resourceField),
relLog.chalk.magenta(item.root.table),
relLog.chalk.magenta(item.rootField)
);
});
}
| { // root
relLog.stash('Pathway that was used in attempt to find '+relLog.chalk.magenta(b.name)+' resource')
subDebug = relLog.nest(item.root.name);
} | conditional_block |
query-builder.js | var debog = require('./debog');
var relLog = debog('relationships');
var queryLog = debog('query');
/**
* @name query-builder
* @module query-builder
* @description
* build and return query object
*
* @param {object} struct - structure
* @param {array} ids - array of ids for calling specific peices fo data
* @param {functon} callback - callback
*/
module.exports = function (struct, ids) {
ids = [].concat(ids || []);
var queryObj = buildQueries(struct);
var query = 'SELECT '+getAttrQuery(queryObj)+' FROM '+queryObj.root.$$resource.table+'\n'+queryObj.joins.join('\n');
if (ids && ids.length) {
query += '\nWHERE '+queryObj.root.$$resource.table+'.'+queryObj.root.$$resource.idField+' IN ('+ids.join(',')+')'
}
if (queryLog.active) { queryLog.sql(query); }
queryObj.query = query;
return queryObj;
};
function buildQueries(struct, obj) {
obj = obj || {
attrs: {},
root: struct,
joins: [],
joinedResources: [struct.$$resource.table]
};
addAttributes(struct, obj);
// add struct join
if (obj.root !== struct) { // is not root resource
var relations = findRelations(struct.$$resource, struct.$$parent.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
// add field level joins
// there may be none if the fields contain no additional resources
Object.keys(struct).forEach(function (key) {
var item = struct[key];
if (item.$$struct) { return; }
if (obj.joinedResources.indexOf(item.resource.table) === -1) {
var relations = findRelations(item.resource, struct.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
});
// recusively handle sub structures
Object.keys(struct).forEach(function (key) {
if (!struct[key].$$struct) { return; }
buildQueries(struct[key], obj);
});
return obj;
}
function buildRelationJoin(rel) {
return 'LEFT JOIN '+
rel.resource.table+
' ON '+
rel.resource.table+'.'+rel.resourceField+
' = '+
rel.root.table+'.'+rel.rootField;
}
// walk resources to find relationship path
function findRelations(resource, parentResource) {
var arr = [];
if (relLog.active) { debugRelations(resource, parentResource); }
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
return arr;
}
// bottom up
pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
return arr;
}
function debugRelations(resource, parentResource) {
var arr = [];
relLog.stash('lookup', relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
logJoins(arr);
relLog.unstash(relLog.chalk.green('path resolved'));
return;
}
relLog.unstash(relLog.chalk.red('could not resolve path, will attemp'), relLog.chalk.yellow('reverse lookup'));
// bottom up
relLog.stash(relLog.chalk.yellow('reverse lookup'), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowLeft, relLog.chalk.magenta(resource.name));
found = pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
logJoins(arr);
relLog.unstash(found ? relLog.chalk.green('path resolved') : relLog.chalk.red('could not resolve path'));
if (found) { return; }
// run path again and logout bad pathways
// this code is meant for debugging purposes only
relLog.stash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr, true);
relLog.unstash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')));
}
// TODO fix path finding. currently is not working with legs example
// walk resources for path
function pathFinder(a, b, path, arr, debug) {
return (a.relationships || []).some(function (item) {
path.children[item.resource.name] = {
added: path.added,
parent: path,
item: item,
children: {}
};
// NOTE debug code
var subDebug = debug;
if (subDebug && relLog.active) {
if (subDebug === true) { // root
relLog.stash('Pathway that was used in attempt to find '+relLog.chalk.magenta(b.name)+' resource')
subDebug = relLog.nest(item.root.name);
} else { // create new groups for each layer
subDebug = subDebug.nest(item.root.name);
}
subDebug.stash(item.resource.name);
}
// END Debug Code
// path completed
// convert to flat array and return true for sucessfull
if (item.resource.name === b.name) {
reducePath(path.children[item.resource.name], arr);
return true;
}
// continue finding on paths that have not been explored yet
if (path.added.indexOf(item.resource.name) === -1) {
path.added.push(item.resource.name);
return pathFinder(item.resource, b, path.children[item.resource.name], arr, subDebug);
}
});
}
// turn nested path into flat array
function reducePath(path, arr) {
while (path.item) {
arr.push(path.item);
path = path.parent;
}
}
// flatten attribute object into comma deliminated string
function getAttrQuery(queryObj) {
return Object.keys(queryObj.attrs).map(function (key) {
return queryObj.attrs[key].table+'.'+queryObj.attrs[key].field+' AS '+queryObj.attrs[key].alias
}).join(',');
}
function addAttributes(struct, obj) {
Object.keys(struct).forEach(function (key) {
if (struct[key].$$struct) { return; }
var item = struct[key];
var id = getUID();
var name = item.resource.table+'_'+item.field;
// NOTE may want to make multipel hashes so we can lookup by mulple peices of info
obj.attrs[name] = {
id: id,
table: item.resource.table,
field: item.field,
alias: id+'_'+name,
config: item.resource[item.field]
};
});
}
// uuids for attribute aliases
var uid = 0;
function getUID() {
return ''+(uid++);
}
function logJoins(arr) {
arr.forEach(function (item) {
relLog.stash('join %s %s %s on %s.%s = %s.%s',
relLog.chalk.magenta(item.resource.name),
relLog.symbols.arrowRight,
relLog.chalk.magenta(item.root.name),
relLog.chalk.magenta(item.resource.table),
relLog.chalk.magenta(item.resourceField),
relLog.chalk.magenta(item.root.table),
relLog.chalk.magenta(item.rootField)
);
});
} | 'use strict';
var resourceManager = require('./resource-manager'); | random_line_split | |
query-builder.js | 'use strict';
var resourceManager = require('./resource-manager');
var debog = require('./debog');
var relLog = debog('relationships');
var queryLog = debog('query');
/**
* @name query-builder
* @module query-builder
* @description
* build and return query object
*
* @param {object} struct - structure
* @param {array} ids - array of ids for calling specific peices fo data
* @param {functon} callback - callback
*/
module.exports = function (struct, ids) {
ids = [].concat(ids || []);
var queryObj = buildQueries(struct);
var query = 'SELECT '+getAttrQuery(queryObj)+' FROM '+queryObj.root.$$resource.table+'\n'+queryObj.joins.join('\n');
if (ids && ids.length) {
query += '\nWHERE '+queryObj.root.$$resource.table+'.'+queryObj.root.$$resource.idField+' IN ('+ids.join(',')+')'
}
if (queryLog.active) { queryLog.sql(query); }
queryObj.query = query;
return queryObj;
};
function buildQueries(struct, obj) {
obj = obj || {
attrs: {},
root: struct,
joins: [],
joinedResources: [struct.$$resource.table]
};
addAttributes(struct, obj);
// add struct join
if (obj.root !== struct) { // is not root resource
var relations = findRelations(struct.$$resource, struct.$$parent.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
// add field level joins
// there may be none if the fields contain no additional resources
Object.keys(struct).forEach(function (key) {
var item = struct[key];
if (item.$$struct) { return; }
if (obj.joinedResources.indexOf(item.resource.table) === -1) {
var relations = findRelations(item.resource, struct.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
});
// recusively handle sub structures
Object.keys(struct).forEach(function (key) {
if (!struct[key].$$struct) { return; }
buildQueries(struct[key], obj);
});
return obj;
}
function buildRelationJoin(rel) {
return 'LEFT JOIN '+
rel.resource.table+
' ON '+
rel.resource.table+'.'+rel.resourceField+
' = '+
rel.root.table+'.'+rel.rootField;
}
// walk resources to find relationship path
function findRelations(resource, parentResource) {
var arr = [];
if (relLog.active) { debugRelations(resource, parentResource); }
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
return arr;
}
// bottom up
pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
return arr;
}
function debugRelations(resource, parentResource) {
var arr = [];
relLog.stash('lookup', relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
logJoins(arr);
relLog.unstash(relLog.chalk.green('path resolved'));
return;
}
relLog.unstash(relLog.chalk.red('could not resolve path, will attemp'), relLog.chalk.yellow('reverse lookup'));
// bottom up
relLog.stash(relLog.chalk.yellow('reverse lookup'), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowLeft, relLog.chalk.magenta(resource.name));
found = pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
logJoins(arr);
relLog.unstash(found ? relLog.chalk.green('path resolved') : relLog.chalk.red('could not resolve path'));
if (found) { return; }
// run path again and logout bad pathways
// this code is meant for debugging purposes only
relLog.stash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr, true);
relLog.unstash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')));
}
// TODO fix path finding. currently is not working with legs example
// walk resources for path
function pathFinder(a, b, path, arr, debug) |
// turn nested path into flat array
function reducePath(path, arr) {
while (path.item) {
arr.push(path.item);
path = path.parent;
}
}
// flatten attribute object into comma deliminated string
function getAttrQuery(queryObj) {
return Object.keys(queryObj.attrs).map(function (key) {
return queryObj.attrs[key].table+'.'+queryObj.attrs[key].field+' AS '+queryObj.attrs[key].alias
}).join(',');
}
function addAttributes(struct, obj) {
Object.keys(struct).forEach(function (key) {
if (struct[key].$$struct) { return; }
var item = struct[key];
var id = getUID();
var name = item.resource.table+'_'+item.field;
// NOTE may want to make multipel hashes so we can lookup by mulple peices of info
obj.attrs[name] = {
id: id,
table: item.resource.table,
field: item.field,
alias: id+'_'+name,
config: item.resource[item.field]
};
});
}
// uuids for attribute aliases
var uid = 0;
function getUID() {
return ''+(uid++);
}
function logJoins(arr) {
arr.forEach(function (item) {
relLog.stash('join %s %s %s on %s.%s = %s.%s',
relLog.chalk.magenta(item.resource.name),
relLog.symbols.arrowRight,
relLog.chalk.magenta(item.root.name),
relLog.chalk.magenta(item.resource.table),
relLog.chalk.magenta(item.resourceField),
relLog.chalk.magenta(item.root.table),
relLog.chalk.magenta(item.rootField)
);
});
}
| {
return (a.relationships || []).some(function (item) {
path.children[item.resource.name] = {
added: path.added,
parent: path,
item: item,
children: {}
};
// NOTE debug code
var subDebug = debug;
if (subDebug && relLog.active) {
if (subDebug === true) { // root
relLog.stash('Pathway that was used in attempt to find '+relLog.chalk.magenta(b.name)+' resource')
subDebug = relLog.nest(item.root.name);
} else { // create new groups for each layer
subDebug = subDebug.nest(item.root.name);
}
subDebug.stash(item.resource.name);
}
// END Debug Code
// path completed
// convert to flat array and return true for sucessfull
if (item.resource.name === b.name) {
reducePath(path.children[item.resource.name], arr);
return true;
}
// continue finding on paths that have not been explored yet
if (path.added.indexOf(item.resource.name) === -1) {
path.added.push(item.resource.name);
return pathFinder(item.resource, b, path.children[item.resource.name], arr, subDebug);
}
});
} | identifier_body |
query-builder.js | 'use strict';
var resourceManager = require('./resource-manager');
var debog = require('./debog');
var relLog = debog('relationships');
var queryLog = debog('query');
/**
* @name query-builder
* @module query-builder
* @description
* build and return query object
*
* @param {object} struct - structure
* @param {array} ids - array of ids for calling specific peices fo data
* @param {functon} callback - callback
*/
module.exports = function (struct, ids) {
ids = [].concat(ids || []);
var queryObj = buildQueries(struct);
var query = 'SELECT '+getAttrQuery(queryObj)+' FROM '+queryObj.root.$$resource.table+'\n'+queryObj.joins.join('\n');
if (ids && ids.length) {
query += '\nWHERE '+queryObj.root.$$resource.table+'.'+queryObj.root.$$resource.idField+' IN ('+ids.join(',')+')'
}
if (queryLog.active) { queryLog.sql(query); }
queryObj.query = query;
return queryObj;
};
function buildQueries(struct, obj) {
obj = obj || {
attrs: {},
root: struct,
joins: [],
joinedResources: [struct.$$resource.table]
};
addAttributes(struct, obj);
// add struct join
if (obj.root !== struct) { // is not root resource
var relations = findRelations(struct.$$resource, struct.$$parent.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
// add field level joins
// there may be none if the fields contain no additional resources
Object.keys(struct).forEach(function (key) {
var item = struct[key];
if (item.$$struct) { return; }
if (obj.joinedResources.indexOf(item.resource.table) === -1) {
var relations = findRelations(item.resource, struct.$$resource);
relations.forEach(function (rel) {
// block from adding the same resource because of multiple fields with the same resource
if (obj.joinedResources.indexOf(rel.resource.table) !== -1) { return; }
obj.joins.push(buildRelationJoin(rel));
obj.joinedResources.push(rel.resource.table);
});
}
});
// recusively handle sub structures
Object.keys(struct).forEach(function (key) {
if (!struct[key].$$struct) { return; }
buildQueries(struct[key], obj);
});
return obj;
}
function buildRelationJoin(rel) {
return 'LEFT JOIN '+
rel.resource.table+
' ON '+
rel.resource.table+'.'+rel.resourceField+
' = '+
rel.root.table+'.'+rel.rootField;
}
// walk resources to find relationship path
function | (resource, parentResource) {
var arr = [];
if (relLog.active) { debugRelations(resource, parentResource); }
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
return arr;
}
// bottom up
pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
return arr;
}
function debugRelations(resource, parentResource) {
var arr = [];
relLog.stash('lookup', relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
// top down
var found = pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr);
if (found) {
arr = arr.reverse();
logJoins(arr);
relLog.unstash(relLog.chalk.green('path resolved'));
return;
}
relLog.unstash(relLog.chalk.red('could not resolve path, will attemp'), relLog.chalk.yellow('reverse lookup'));
// bottom up
relLog.stash(relLog.chalk.yellow('reverse lookup'), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowLeft, relLog.chalk.magenta(resource.name));
found = pathFinder(resource, parentResource, {
added: [],
parent: resource,
children: {}
}, arr);
logJoins(arr);
relLog.unstash(found ? relLog.chalk.green('path resolved') : relLog.chalk.red('could not resolve path'));
if (found) { return; }
// run path again and logout bad pathways
// this code is meant for debugging purposes only
relLog.stash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')), relLog.chalk.magenta(parentResource.name), relLog.symbols.arrowRight, relLog.chalk.magenta(resource.name));
pathFinder(parentResource, resource, {
added: [],
parent: parentResource,
children: {}
}, arr, true);
relLog.unstash(relLog.chalk.bgYellow(relLog.chalk.black('debug Code')));
}
// TODO fix path finding. currently is not working with legs example
// walk resources for path
function pathFinder(a, b, path, arr, debug) {
return (a.relationships || []).some(function (item) {
path.children[item.resource.name] = {
added: path.added,
parent: path,
item: item,
children: {}
};
// NOTE debug code
var subDebug = debug;
if (subDebug && relLog.active) {
if (subDebug === true) { // root
relLog.stash('Pathway that was used in attempt to find '+relLog.chalk.magenta(b.name)+' resource')
subDebug = relLog.nest(item.root.name);
} else { // create new groups for each layer
subDebug = subDebug.nest(item.root.name);
}
subDebug.stash(item.resource.name);
}
// END Debug Code
// path completed
// convert to flat array and return true for sucessfull
if (item.resource.name === b.name) {
reducePath(path.children[item.resource.name], arr);
return true;
}
// continue finding on paths that have not been explored yet
if (path.added.indexOf(item.resource.name) === -1) {
path.added.push(item.resource.name);
return pathFinder(item.resource, b, path.children[item.resource.name], arr, subDebug);
}
});
}
// turn nested path into flat array
function reducePath(path, arr) {
while (path.item) {
arr.push(path.item);
path = path.parent;
}
}
// flatten attribute object into comma deliminated string
function getAttrQuery(queryObj) {
return Object.keys(queryObj.attrs).map(function (key) {
return queryObj.attrs[key].table+'.'+queryObj.attrs[key].field+' AS '+queryObj.attrs[key].alias
}).join(',');
}
function addAttributes(struct, obj) {
Object.keys(struct).forEach(function (key) {
if (struct[key].$$struct) { return; }
var item = struct[key];
var id = getUID();
var name = item.resource.table+'_'+item.field;
// NOTE may want to make multipel hashes so we can lookup by mulple peices of info
obj.attrs[name] = {
id: id,
table: item.resource.table,
field: item.field,
alias: id+'_'+name,
config: item.resource[item.field]
};
});
}
// uuids for attribute aliases
var uid = 0;
function getUID() {
return ''+(uid++);
}
function logJoins(arr) {
arr.forEach(function (item) {
relLog.stash('join %s %s %s on %s.%s = %s.%s',
relLog.chalk.magenta(item.resource.name),
relLog.symbols.arrowRight,
relLog.chalk.magenta(item.root.name),
relLog.chalk.magenta(item.resource.table),
relLog.chalk.magenta(item.resourceField),
relLog.chalk.magenta(item.root.table),
relLog.chalk.magenta(item.rootField)
);
});
}
| findRelations | identifier_name |
pact.ts | import * as q from 'q';
import * as path from 'path';
import serverFactory, { Server, ServerOptions } from './server';
import stubFactory, { Stub, StubOptions } from './stub';
import verifierFactory, { VerifierOptions } from './verifier';
import messageFactory, { MessageOptions } from './message';
import publisherFactory, { PublisherOptions } from './publisher';
import canDeployFactory, {
CanDeployOptions,
CanDeployResponse,
} from './can-deploy';
import pactEnvironment from './pact-environment';
import logger, { LogLevels, setLogLevel } from './logger';
import { AbstractService } from './service';
import * as _ from 'underscore';
import mkdirp = require('mkdirp');
import rimraf = require('rimraf');
export class Pact {
private __servers: Server[] = [];
private __stubs: Stub[] = [];
constructor() {
// Check to see if we hit into Windows Long Path issue
if (pactEnvironment.isWindows()) {
try {
// Trying to trigger windows error by creating path that's over 260 characters long
const name =
'Jctyo0NXwbPN6Y1o8p2TkicKma2kfqmXwVLw6ypBX47uktBPX9FM9kbPraQXsAUZuT6BvenTbnWczXzuN4js0KB9e7P5cccxvmXPYcFhJnBvPSKGH1FlTqEOsjl8djk3md';
const dir = mkdirp.sync(path.resolve(__dirname, name, name));
dir && rimraf.sync(dir);
} catch {
logger.warn(
'WARNING: Windows Long Paths is not enabled and might cause Pact to crash if the path is too long. ' +
'To fix this issue, please consult https://github.com/pact-foundation/pact-node#enable-long-paths`',
);
}
}
// Listen for Node exiting or someone killing the process
// Must remove all the instances of Pact mock service
process.once('exit', () => this.removeAll());
process.once('SIGINT', () => process.exit());
}
public logLevel(level?: LogLevels | number): number | void {
return setLogLevel(level);
}
// Creates server with specified options
public createServer(options: ServerOptions = {}): Server {
if (
options &&
options.port &&
_.some(this.__servers, (s: Server) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let server = serverFactory(options);
this.__servers.push(server);
logger.info(
`Creating Pact Server with options: \n${JSON.stringify(server.options)}`,
);
// Listen to server delete events, to remove from server list
server.once(AbstractService.Events.DELETE_EVENT, (s: Server) => {
logger.info(
`Deleting Pact Server with options: \n${JSON.stringify(s.options)}`,
);
this.__servers = _.without(this.__servers, s);
});
return server;
}
// Return arrays of all servers
public listServers(): Server[] {
return this.__servers;
}
// Remove all the servers that have been created
// Return promise of all others
public removeAllServers(): q.Promise<Server[]> {
if (this.__servers.length === 0) {
return q(this.__servers);
}
logger.info('Removing all Pact servers.');
return q.all<Server>(
_.map(
this.__servers,
(server: Server) => server.delete() as PromiseLike<Server>,
),
);
}
// Creates stub with specified options
public createStub(options: StubOptions = {}): Stub {
if (
options &&
options.port &&
_.some(this.__stubs, (s: Stub) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let stub = stubFactory(options);
this.__stubs.push(stub);
logger.info(
`Creating Pact Stub with options: \n${JSON.stringify(stub.options)}`, |
// Listen to stub delete events, to remove from stub list
stub.once(AbstractService.Events.DELETE_EVENT, (s: Stub) => {
logger.info(
`Deleting Pact Stub with options: \n${JSON.stringify(stub.options)}`,
);
this.__stubs = _.without(this.__stubs, s);
});
return stub;
}
// Return arrays of all stubs
public listStubs(): Stub[] {
return this.__stubs;
}
// Remove all the stubs that have been created
// Return promise of all others
public removeAllStubs(): q.Promise<Stub[]> {
if (this.__stubs.length === 0) {
return q(this.__stubs);
}
logger.info('Removing all Pact stubs.');
return q.all<Stub>(
_.map(this.__stubs, (stub: Stub) => stub.delete() as PromiseLike<Stub>),
);
}
// Remove all the servers and stubs
public removeAll(): q.Promise<AbstractService[]> {
return q.all<AbstractService>(
_.flatten([this.removeAllStubs(), this.removeAllServers()]),
);
// .tap(endDestination);
}
// Run the Pact Verification process
public verifyPacts(options: VerifierOptions): q.Promise<string> {
logger.info('Verifying Pacts.');
return verifierFactory(options).verify();
}
// Run the Message Pact creation process
public createMessage(options: MessageOptions): q.Promise<unknown> {
logger.info('Creating Message');
return messageFactory(options).createMessage();
}
// Publish Pacts to a Pact Broker
public publishPacts(options: PublisherOptions): q.Promise<string[]> {
logger.info('Publishing Pacts to Broker');
return publisherFactory(options).publish();
}
// Use can-i-deploy to determine if it is safe to deploy
public canDeploy(
options: CanDeployOptions,
): q.Promise<CanDeployResponse | string> {
logger.info('Checking if it it possible to deploy');
return canDeployFactory(options).canDeploy();
}
}
export default new Pact(); | ); | random_line_split |
pact.ts | import * as q from 'q';
import * as path from 'path';
import serverFactory, { Server, ServerOptions } from './server';
import stubFactory, { Stub, StubOptions } from './stub';
import verifierFactory, { VerifierOptions } from './verifier';
import messageFactory, { MessageOptions } from './message';
import publisherFactory, { PublisherOptions } from './publisher';
import canDeployFactory, {
CanDeployOptions,
CanDeployResponse,
} from './can-deploy';
import pactEnvironment from './pact-environment';
import logger, { LogLevels, setLogLevel } from './logger';
import { AbstractService } from './service';
import * as _ from 'underscore';
import mkdirp = require('mkdirp');
import rimraf = require('rimraf');
export class Pact {
private __servers: Server[] = [];
private __stubs: Stub[] = [];
constructor() {
// Check to see if we hit into Windows Long Path issue
if (pactEnvironment.isWindows()) {
try {
// Trying to trigger windows error by creating path that's over 260 characters long
const name =
'Jctyo0NXwbPN6Y1o8p2TkicKma2kfqmXwVLw6ypBX47uktBPX9FM9kbPraQXsAUZuT6BvenTbnWczXzuN4js0KB9e7P5cccxvmXPYcFhJnBvPSKGH1FlTqEOsjl8djk3md';
const dir = mkdirp.sync(path.resolve(__dirname, name, name));
dir && rimraf.sync(dir);
} catch {
logger.warn(
'WARNING: Windows Long Paths is not enabled and might cause Pact to crash if the path is too long. ' +
'To fix this issue, please consult https://github.com/pact-foundation/pact-node#enable-long-paths`',
);
}
}
// Listen for Node exiting or someone killing the process
// Must remove all the instances of Pact mock service
process.once('exit', () => this.removeAll());
process.once('SIGINT', () => process.exit());
}
public logLevel(level?: LogLevels | number): number | void {
return setLogLevel(level);
}
// Creates server with specified options
public createServer(options: ServerOptions = {}): Server {
if (
options &&
options.port &&
_.some(this.__servers, (s: Server) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let server = serverFactory(options);
this.__servers.push(server);
logger.info(
`Creating Pact Server with options: \n${JSON.stringify(server.options)}`,
);
// Listen to server delete events, to remove from server list
server.once(AbstractService.Events.DELETE_EVENT, (s: Server) => {
logger.info(
`Deleting Pact Server with options: \n${JSON.stringify(s.options)}`,
);
this.__servers = _.without(this.__servers, s);
});
return server;
}
// Return arrays of all servers
public listServers(): Server[] {
return this.__servers;
}
// Remove all the servers that have been created
// Return promise of all others
public removeAllServers(): q.Promise<Server[]> {
if (this.__servers.length === 0) {
return q(this.__servers);
}
logger.info('Removing all Pact servers.');
return q.all<Server>(
_.map(
this.__servers,
(server: Server) => server.delete() as PromiseLike<Server>,
),
);
}
// Creates stub with specified options
public createStub(options: StubOptions = {}): Stub {
if (
options &&
options.port &&
_.some(this.__stubs, (s: Stub) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let stub = stubFactory(options);
this.__stubs.push(stub);
logger.info(
`Creating Pact Stub with options: \n${JSON.stringify(stub.options)}`,
);
// Listen to stub delete events, to remove from stub list
stub.once(AbstractService.Events.DELETE_EVENT, (s: Stub) => {
logger.info(
`Deleting Pact Stub with options: \n${JSON.stringify(stub.options)}`,
);
this.__stubs = _.without(this.__stubs, s);
});
return stub;
}
// Return arrays of all stubs
public listStubs(): Stub[] {
return this.__stubs;
}
// Remove all the stubs that have been created
// Return promise of all others
public removeAllStubs(): q.Promise<Stub[]> {
if (this.__stubs.length === 0) |
logger.info('Removing all Pact stubs.');
return q.all<Stub>(
_.map(this.__stubs, (stub: Stub) => stub.delete() as PromiseLike<Stub>),
);
}
// Remove all the servers and stubs
public removeAll(): q.Promise<AbstractService[]> {
return q.all<AbstractService>(
_.flatten([this.removeAllStubs(), this.removeAllServers()]),
);
// .tap(endDestination);
}
// Run the Pact Verification process
public verifyPacts(options: VerifierOptions): q.Promise<string> {
logger.info('Verifying Pacts.');
return verifierFactory(options).verify();
}
// Run the Message Pact creation process
public createMessage(options: MessageOptions): q.Promise<unknown> {
logger.info('Creating Message');
return messageFactory(options).createMessage();
}
// Publish Pacts to a Pact Broker
public publishPacts(options: PublisherOptions): q.Promise<string[]> {
logger.info('Publishing Pacts to Broker');
return publisherFactory(options).publish();
}
// Use can-i-deploy to determine if it is safe to deploy
public canDeploy(
options: CanDeployOptions,
): q.Promise<CanDeployResponse | string> {
logger.info('Checking if it it possible to deploy');
return canDeployFactory(options).canDeploy();
}
}
export default new Pact();
| {
return q(this.__stubs);
} | conditional_block |
pact.ts | import * as q from 'q';
import * as path from 'path';
import serverFactory, { Server, ServerOptions } from './server';
import stubFactory, { Stub, StubOptions } from './stub';
import verifierFactory, { VerifierOptions } from './verifier';
import messageFactory, { MessageOptions } from './message';
import publisherFactory, { PublisherOptions } from './publisher';
import canDeployFactory, {
CanDeployOptions,
CanDeployResponse,
} from './can-deploy';
import pactEnvironment from './pact-environment';
import logger, { LogLevels, setLogLevel } from './logger';
import { AbstractService } from './service';
import * as _ from 'underscore';
import mkdirp = require('mkdirp');
import rimraf = require('rimraf');
export class Pact {
private __servers: Server[] = [];
private __stubs: Stub[] = [];
constructor() {
// Check to see if we hit into Windows Long Path issue
if (pactEnvironment.isWindows()) {
try {
// Trying to trigger windows error by creating path that's over 260 characters long
const name =
'Jctyo0NXwbPN6Y1o8p2TkicKma2kfqmXwVLw6ypBX47uktBPX9FM9kbPraQXsAUZuT6BvenTbnWczXzuN4js0KB9e7P5cccxvmXPYcFhJnBvPSKGH1FlTqEOsjl8djk3md';
const dir = mkdirp.sync(path.resolve(__dirname, name, name));
dir && rimraf.sync(dir);
} catch {
logger.warn(
'WARNING: Windows Long Paths is not enabled and might cause Pact to crash if the path is too long. ' +
'To fix this issue, please consult https://github.com/pact-foundation/pact-node#enable-long-paths`',
);
}
}
// Listen for Node exiting or someone killing the process
// Must remove all the instances of Pact mock service
process.once('exit', () => this.removeAll());
process.once('SIGINT', () => process.exit());
}
public | (level?: LogLevels | number): number | void {
return setLogLevel(level);
}
// Creates server with specified options
public createServer(options: ServerOptions = {}): Server {
if (
options &&
options.port &&
_.some(this.__servers, (s: Server) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let server = serverFactory(options);
this.__servers.push(server);
logger.info(
`Creating Pact Server with options: \n${JSON.stringify(server.options)}`,
);
// Listen to server delete events, to remove from server list
server.once(AbstractService.Events.DELETE_EVENT, (s: Server) => {
logger.info(
`Deleting Pact Server with options: \n${JSON.stringify(s.options)}`,
);
this.__servers = _.without(this.__servers, s);
});
return server;
}
// Return arrays of all servers
public listServers(): Server[] {
return this.__servers;
}
// Remove all the servers that have been created
// Return promise of all others
public removeAllServers(): q.Promise<Server[]> {
if (this.__servers.length === 0) {
return q(this.__servers);
}
logger.info('Removing all Pact servers.');
return q.all<Server>(
_.map(
this.__servers,
(server: Server) => server.delete() as PromiseLike<Server>,
),
);
}
// Creates stub with specified options
public createStub(options: StubOptions = {}): Stub {
if (
options &&
options.port &&
_.some(this.__stubs, (s: Stub) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let stub = stubFactory(options);
this.__stubs.push(stub);
logger.info(
`Creating Pact Stub with options: \n${JSON.stringify(stub.options)}`,
);
// Listen to stub delete events, to remove from stub list
stub.once(AbstractService.Events.DELETE_EVENT, (s: Stub) => {
logger.info(
`Deleting Pact Stub with options: \n${JSON.stringify(stub.options)}`,
);
this.__stubs = _.without(this.__stubs, s);
});
return stub;
}
// Return arrays of all stubs
public listStubs(): Stub[] {
return this.__stubs;
}
// Remove all the stubs that have been created
// Return promise of all others
public removeAllStubs(): q.Promise<Stub[]> {
if (this.__stubs.length === 0) {
return q(this.__stubs);
}
logger.info('Removing all Pact stubs.');
return q.all<Stub>(
_.map(this.__stubs, (stub: Stub) => stub.delete() as PromiseLike<Stub>),
);
}
// Remove all the servers and stubs
public removeAll(): q.Promise<AbstractService[]> {
return q.all<AbstractService>(
_.flatten([this.removeAllStubs(), this.removeAllServers()]),
);
// .tap(endDestination);
}
// Run the Pact Verification process
public verifyPacts(options: VerifierOptions): q.Promise<string> {
logger.info('Verifying Pacts.');
return verifierFactory(options).verify();
}
// Run the Message Pact creation process
public createMessage(options: MessageOptions): q.Promise<unknown> {
logger.info('Creating Message');
return messageFactory(options).createMessage();
}
// Publish Pacts to a Pact Broker
public publishPacts(options: PublisherOptions): q.Promise<string[]> {
logger.info('Publishing Pacts to Broker');
return publisherFactory(options).publish();
}
// Use can-i-deploy to determine if it is safe to deploy
public canDeploy(
options: CanDeployOptions,
): q.Promise<CanDeployResponse | string> {
logger.info('Checking if it it possible to deploy');
return canDeployFactory(options).canDeploy();
}
}
export default new Pact();
| logLevel | identifier_name |
pact.ts | import * as q from 'q';
import * as path from 'path';
import serverFactory, { Server, ServerOptions } from './server';
import stubFactory, { Stub, StubOptions } from './stub';
import verifierFactory, { VerifierOptions } from './verifier';
import messageFactory, { MessageOptions } from './message';
import publisherFactory, { PublisherOptions } from './publisher';
import canDeployFactory, {
CanDeployOptions,
CanDeployResponse,
} from './can-deploy';
import pactEnvironment from './pact-environment';
import logger, { LogLevels, setLogLevel } from './logger';
import { AbstractService } from './service';
import * as _ from 'underscore';
import mkdirp = require('mkdirp');
import rimraf = require('rimraf');
export class Pact {
private __servers: Server[] = [];
private __stubs: Stub[] = [];
constructor() {
// Check to see if we hit into Windows Long Path issue
if (pactEnvironment.isWindows()) {
try {
// Trying to trigger windows error by creating path that's over 260 characters long
const name =
'Jctyo0NXwbPN6Y1o8p2TkicKma2kfqmXwVLw6ypBX47uktBPX9FM9kbPraQXsAUZuT6BvenTbnWczXzuN4js0KB9e7P5cccxvmXPYcFhJnBvPSKGH1FlTqEOsjl8djk3md';
const dir = mkdirp.sync(path.resolve(__dirname, name, name));
dir && rimraf.sync(dir);
} catch {
logger.warn(
'WARNING: Windows Long Paths is not enabled and might cause Pact to crash if the path is too long. ' +
'To fix this issue, please consult https://github.com/pact-foundation/pact-node#enable-long-paths`',
);
}
}
// Listen for Node exiting or someone killing the process
// Must remove all the instances of Pact mock service
process.once('exit', () => this.removeAll());
process.once('SIGINT', () => process.exit());
}
public logLevel(level?: LogLevels | number): number | void |
// Creates server with specified options
public createServer(options: ServerOptions = {}): Server {
if (
options &&
options.port &&
_.some(this.__servers, (s: Server) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let server = serverFactory(options);
this.__servers.push(server);
logger.info(
`Creating Pact Server with options: \n${JSON.stringify(server.options)}`,
);
// Listen to server delete events, to remove from server list
server.once(AbstractService.Events.DELETE_EVENT, (s: Server) => {
logger.info(
`Deleting Pact Server with options: \n${JSON.stringify(s.options)}`,
);
this.__servers = _.without(this.__servers, s);
});
return server;
}
// Return arrays of all servers
public listServers(): Server[] {
return this.__servers;
}
// Remove all the servers that have been created
// Return promise of all others
public removeAllServers(): q.Promise<Server[]> {
if (this.__servers.length === 0) {
return q(this.__servers);
}
logger.info('Removing all Pact servers.');
return q.all<Server>(
_.map(
this.__servers,
(server: Server) => server.delete() as PromiseLike<Server>,
),
);
}
// Creates stub with specified options
public createStub(options: StubOptions = {}): Stub {
if (
options &&
options.port &&
_.some(this.__stubs, (s: Stub) => s.options.port === options.port)
) {
let msg = `Port '${options.port}' is already in use by another process.`;
logger.error(msg);
throw new Error(msg);
}
let stub = stubFactory(options);
this.__stubs.push(stub);
logger.info(
`Creating Pact Stub with options: \n${JSON.stringify(stub.options)}`,
);
// Listen to stub delete events, to remove from stub list
stub.once(AbstractService.Events.DELETE_EVENT, (s: Stub) => {
logger.info(
`Deleting Pact Stub with options: \n${JSON.stringify(stub.options)}`,
);
this.__stubs = _.without(this.__stubs, s);
});
return stub;
}
// Return arrays of all stubs
public listStubs(): Stub[] {
return this.__stubs;
}
// Remove all the stubs that have been created
// Return promise of all others
public removeAllStubs(): q.Promise<Stub[]> {
if (this.__stubs.length === 0) {
return q(this.__stubs);
}
logger.info('Removing all Pact stubs.');
return q.all<Stub>(
_.map(this.__stubs, (stub: Stub) => stub.delete() as PromiseLike<Stub>),
);
}
// Remove all the servers and stubs
public removeAll(): q.Promise<AbstractService[]> {
return q.all<AbstractService>(
_.flatten([this.removeAllStubs(), this.removeAllServers()]),
);
// .tap(endDestination);
}
// Run the Pact Verification process
public verifyPacts(options: VerifierOptions): q.Promise<string> {
logger.info('Verifying Pacts.');
return verifierFactory(options).verify();
}
// Run the Message Pact creation process
public createMessage(options: MessageOptions): q.Promise<unknown> {
logger.info('Creating Message');
return messageFactory(options).createMessage();
}
// Publish Pacts to a Pact Broker
public publishPacts(options: PublisherOptions): q.Promise<string[]> {
logger.info('Publishing Pacts to Broker');
return publisherFactory(options).publish();
}
// Use can-i-deploy to determine if it is safe to deploy
public canDeploy(
options: CanDeployOptions,
): q.Promise<CanDeployResponse | string> {
logger.info('Checking if it it possible to deploy');
return canDeployFactory(options).canDeploy();
}
}
export default new Pact();
| {
return setLogLevel(level);
} | identifier_body |
common.js | // shared config (dev and prod)
const {resolve} = require('path');
const {CheckerPlugin} = require('awesome-typescript-loader');
const HtmlWebpackPlugin = require('html-webpack-plugin');
module.exports = {
resolve: {
extensions: ['.ts', '.tsx', '.js', '.jsx'],
},
context: resolve(__dirname, '../../src'),
module: {
rules: [
{
test: /\.js$/,
use: ['babel-loader', 'source-map-loader'],
exclude: /node_modules/,
},
{
test: /\.tsx?$/,
use: ['babel-loader', 'awesome-typescript-loader'],
},
{
test: /\.css$/,
use: ['style-loader', { loader: 'css-loader', options: { importLoaders: 1 } }],
},
{
test: /\.scss$/,
loaders: [
'style-loader',
{ loader: 'css-loader', options: { importLoaders: 1 } },
'sass-loader',
],
},
{
test: /\.(jpe?g|png|gif|svg)$/i,
loaders: [
'file-loader?hash=sha512&digest=hex&name=img/[hash].[ext]',
'image-webpack-loader?bypassOnDebug&optipng.optimizationLevel=7&gifsicle.interlaced=false',
],
},
],
},
plugins: [
new CheckerPlugin(),
new HtmlWebpackPlugin({template: 'index.html.ejs',}),
],
externals: {
'react': 'React',
'react-dom': 'ReactDOM',
},
performance: { | }; | hints: false,
}, | random_line_split |
dbd_parsers.py | #!/usr/bin/env python
"""
@package glider_utils
@file glider_utils.py
@author Stuart Pearce & Chris Wingard
@brief Module containing glider utiliities
"""
__author__ = 'Stuart Pearce & Chris Wingard'
__license__ = 'Apache 2.0'
import numpy as np
import warnings
#import pdb
import re
#import pygsw.vectors as gsw
class DbaDataParser(object):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def __init__(self, filename):
self._fid = open(filename, 'r')
self.hdr_dict = {}
self.data_dict = {}
self._read_header()
self._read_data()
self._fid.close()
def _read_header(self):
"""
Read in the self describing header lines of an ASCII glider data
file.
"""
# There are usually 14 header lines, start with 14,
# and check the 'num_ascii_tags' line.
num_hdr_lines = 14
header_pattern = r'(.*): (.*)$'
header_re = re.compile(header_pattern)
#pdb.set_trace()
hdr_line = 1
while hdr_line <= num_hdr_lines:
line = self._fid.readline()
match = header_re.match(line)
if match:
key = match.group(1)
value = match.group(2)
value = value.strip()
if 'num_ascii_tags' in key:
num_hdr_lines = int(value)
self.hdr_dict[key] = value
hdr_line += 1
def _read_data(self):
"""
Read in the column labels, data type, number of bytes of each
data type, and the data from an ASCII glider data file.
"""
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
#pdb.set_trace()
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data, dtype=np.float) # NOTE: this is an array of strings
# warn if # of described data rows != to amount read in.
num_columns = int(self.hdr_dict['sensors_per_cycle'])
if num_columns != data_array.shape[1]:
warnings.warn('Glider data file does not have the same' +
'number of columns as described in header.\n' +
'described %d, actual %d' % (num_columns,
data_array.shape[1])
)
# extract data to dictionary
for ii in range(num_columns):
units = column_type[ii]
data_col = data_array[:, ii]
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': units,
'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_col
}
# change ISO lat or lon format to decimal degrees
if units == 'lat' or units == 'lon':
|
self.data_keys = column_labels
class DataVizDataParser(DbaDataParser):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def _read_header(self):
pass
def _read_data(self):
"""
Read in the column labels, data type/units, and the data from an Data Visualizer data file.
"""
filename_hdr = self._fid.readline()
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
#column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data) # NOTE: can't make floats because of lat & lon
num_columns = len(column_labels)
# extract data to dictionary
for ii in range(num_columns):
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': column_type[ii],
#'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_array[:, ii]
}
self.data_keys = column_labels
class GliderData(dict):
""" An object specifically to store Slocum glider data.
"""
def __init__():
dict.__init__ | min_d100, deg = np.modf(data_col/100.)
deg_col = deg + (min_d100*100.)/60.
self.data_dict[column_labels[ii]]['Data_deg'] = deg_col | conditional_block |
dbd_parsers.py | #!/usr/bin/env python
"""
@package glider_utils
@file glider_utils.py
@author Stuart Pearce & Chris Wingard
@brief Module containing glider utiliities
"""
__author__ = 'Stuart Pearce & Chris Wingard'
__license__ = 'Apache 2.0'
import numpy as np
import warnings
#import pdb
import re
#import pygsw.vectors as gsw
class DbaDataParser(object):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def __init__(self, filename):
self._fid = open(filename, 'r')
self.hdr_dict = {}
self.data_dict = {}
self._read_header()
self._read_data()
self._fid.close()
def _read_header(self):
"""
Read in the self describing header lines of an ASCII glider data
file.
"""
# There are usually 14 header lines, start with 14,
# and check the 'num_ascii_tags' line.
num_hdr_lines = 14
header_pattern = r'(.*): (.*)$'
header_re = re.compile(header_pattern)
#pdb.set_trace()
hdr_line = 1
while hdr_line <= num_hdr_lines:
line = self._fid.readline()
match = header_re.match(line)
if match:
key = match.group(1)
value = match.group(2)
value = value.strip()
if 'num_ascii_tags' in key:
num_hdr_lines = int(value)
self.hdr_dict[key] = value
hdr_line += 1
def _read_data(self):
"""
Read in the column labels, data type, number of bytes of each
data type, and the data from an ASCII glider data file.
"""
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
#pdb.set_trace()
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data, dtype=np.float) # NOTE: this is an array of strings
# warn if # of described data rows != to amount read in.
num_columns = int(self.hdr_dict['sensors_per_cycle'])
if num_columns != data_array.shape[1]:
warnings.warn('Glider data file does not have the same' +
'number of columns as described in header.\n' +
'described %d, actual %d' % (num_columns,
data_array.shape[1])
)
# extract data to dictionary
for ii in range(num_columns):
units = column_type[ii]
data_col = data_array[:, ii]
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': units,
'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_col
}
# change ISO lat or lon format to decimal degrees
if units == 'lat' or units == 'lon':
min_d100, deg = np.modf(data_col/100.)
deg_col = deg + (min_d100*100.)/60.
self.data_dict[column_labels[ii]]['Data_deg'] = deg_col
self.data_keys = column_labels
class | (DbaDataParser):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def _read_header(self):
pass
def _read_data(self):
"""
Read in the column labels, data type/units, and the data from an Data Visualizer data file.
"""
filename_hdr = self._fid.readline()
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
#column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data) # NOTE: can't make floats because of lat & lon
num_columns = len(column_labels)
# extract data to dictionary
for ii in range(num_columns):
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': column_type[ii],
#'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_array[:, ii]
}
self.data_keys = column_labels
class GliderData(dict):
""" An object specifically to store Slocum glider data.
"""
def __init__():
dict.__init__ | DataVizDataParser | identifier_name |
dbd_parsers.py | #!/usr/bin/env python
"""
@package glider_utils
@file glider_utils.py
@author Stuart Pearce & Chris Wingard
@brief Module containing glider utiliities
"""
__author__ = 'Stuart Pearce & Chris Wingard'
__license__ = 'Apache 2.0'
import numpy as np
import warnings
#import pdb
import re
#import pygsw.vectors as gsw
class DbaDataParser(object):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def __init__(self, filename):
self._fid = open(filename, 'r')
self.hdr_dict = {}
self.data_dict = {}
self._read_header()
self._read_data()
self._fid.close()
def _read_header(self):
"""
Read in the self describing header lines of an ASCII glider data
file.
"""
# There are usually 14 header lines, start with 14,
# and check the 'num_ascii_tags' line.
num_hdr_lines = 14
header_pattern = r'(.*): (.*)$'
header_re = re.compile(header_pattern)
#pdb.set_trace()
hdr_line = 1
while hdr_line <= num_hdr_lines:
line = self._fid.readline()
match = header_re.match(line)
if match:
key = match.group(1)
value = match.group(2)
value = value.strip()
if 'num_ascii_tags' in key:
num_hdr_lines = int(value)
self.hdr_dict[key] = value |
def _read_data(self):
"""
Read in the column labels, data type, number of bytes of each
data type, and the data from an ASCII glider data file.
"""
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
#pdb.set_trace()
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data, dtype=np.float) # NOTE: this is an array of strings
# warn if # of described data rows != to amount read in.
num_columns = int(self.hdr_dict['sensors_per_cycle'])
if num_columns != data_array.shape[1]:
warnings.warn('Glider data file does not have the same' +
'number of columns as described in header.\n' +
'described %d, actual %d' % (num_columns,
data_array.shape[1])
)
# extract data to dictionary
for ii in range(num_columns):
units = column_type[ii]
data_col = data_array[:, ii]
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': units,
'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_col
}
# change ISO lat or lon format to decimal degrees
if units == 'lat' or units == 'lon':
min_d100, deg = np.modf(data_col/100.)
deg_col = deg + (min_d100*100.)/60.
self.data_dict[column_labels[ii]]['Data_deg'] = deg_col
self.data_keys = column_labels
class DataVizDataParser(DbaDataParser):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def _read_header(self):
pass
def _read_data(self):
"""
Read in the column labels, data type/units, and the data from an Data Visualizer data file.
"""
filename_hdr = self._fid.readline()
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
#column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data) # NOTE: can't make floats because of lat & lon
num_columns = len(column_labels)
# extract data to dictionary
for ii in range(num_columns):
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': column_type[ii],
#'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_array[:, ii]
}
self.data_keys = column_labels
class GliderData(dict):
""" An object specifically to store Slocum glider data.
"""
def __init__():
dict.__init__ | hdr_line += 1 | random_line_split |
dbd_parsers.py | #!/usr/bin/env python
"""
@package glider_utils
@file glider_utils.py
@author Stuart Pearce & Chris Wingard
@brief Module containing glider utiliities
"""
__author__ = 'Stuart Pearce & Chris Wingard'
__license__ = 'Apache 2.0'
import numpy as np
import warnings
#import pdb
import re
#import pygsw.vectors as gsw
class DbaDataParser(object):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def __init__(self, filename):
self._fid = open(filename, 'r')
self.hdr_dict = {}
self.data_dict = {}
self._read_header()
self._read_data()
self._fid.close()
def _read_header(self):
"""
Read in the self describing header lines of an ASCII glider data
file.
"""
# There are usually 14 header lines, start with 14,
# and check the 'num_ascii_tags' line.
num_hdr_lines = 14
header_pattern = r'(.*): (.*)$'
header_re = re.compile(header_pattern)
#pdb.set_trace()
hdr_line = 1
while hdr_line <= num_hdr_lines:
line = self._fid.readline()
match = header_re.match(line)
if match:
key = match.group(1)
value = match.group(2)
value = value.strip()
if 'num_ascii_tags' in key:
num_hdr_lines = int(value)
self.hdr_dict[key] = value
hdr_line += 1
def _read_data(self):
"""
Read in the column labels, data type, number of bytes of each
data type, and the data from an ASCII glider data file.
"""
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
#pdb.set_trace()
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data, dtype=np.float) # NOTE: this is an array of strings
# warn if # of described data rows != to amount read in.
num_columns = int(self.hdr_dict['sensors_per_cycle'])
if num_columns != data_array.shape[1]:
warnings.warn('Glider data file does not have the same' +
'number of columns as described in header.\n' +
'described %d, actual %d' % (num_columns,
data_array.shape[1])
)
# extract data to dictionary
for ii in range(num_columns):
units = column_type[ii]
data_col = data_array[:, ii]
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': units,
'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_col
}
# change ISO lat or lon format to decimal degrees
if units == 'lat' or units == 'lon':
min_d100, deg = np.modf(data_col/100.)
deg_col = deg + (min_d100*100.)/60.
self.data_dict[column_labels[ii]]['Data_deg'] = deg_col
self.data_keys = column_labels
class DataVizDataParser(DbaDataParser):
"""
A class that parses a glider data file and holds it in dictionaries.
GliderParsedData parses a Slocum Electric Glider data file that has
been converted to ASCII from binary, and holds the self describing
header data in a header dictionary and the data in a data dictionary
using the column labels as the dictionary keys.
Construct an instance of GliderParsedData using the filename of the
ASCII file containing the glider data.
E.g.:
glider_data = GliderParsedData('glider_data_file.mbd')
glider_data.hdr_dict holds the header dictionary with the self
describing ASCII tags from the file as keys.
data_dict holds a data dictionary with the variable names (column
labels) as keys.
A sub-dictionary holds the name of the variable (same as the key),
the data units, the number of binary bytes used to store each
variable type, the name of the variable, and the data using the
keys:
'Name'
'Units'
'Number_of_Bytes'
'Data'
For example, to retrieve the data for 'variable_name':
vn_data = glider_data.data_dict['variable_name]['Data']
"""
def _read_header(self):
pass
def _read_data(self):
"""
Read in the column labels, data type/units, and the data from an Data Visualizer data file.
"""
filename_hdr = self._fid.readline()
column_labels = self._fid.readline().split()
column_type = self._fid.readline().split()
#column_num_bytes = self._fid.readline().split()
# read each row of data & use np.array's ability to grab a
# column of an array
data = []
for line in self._fid.readlines():
data.append(line.split())
data_array = np.array(data) # NOTE: can't make floats because of lat & lon
num_columns = len(column_labels)
# extract data to dictionary
for ii in range(num_columns):
self.data_dict[column_labels[ii]] = {
'Name': column_labels[ii],
'Units': column_type[ii],
#'Number_of_Bytes': int(column_num_bytes[ii]),
'Data': data_array[:, ii]
}
self.data_keys = column_labels
class GliderData(dict):
""" An object specifically to store Slocum glider data.
"""
def __init__():
| dict.__init__ | identifier_body | |
main.rs | mod sqrl_s4;
mod sqrl_crypto;
use sqrl_s4::SqrlS4Identity;
use sqrl_crypto::en_scrypt;
fn main() | {
//let identity = SqrlS4Identity{type1_length: 34, ..Default::default()};
//let mut identity = SqrlS4Identity::default();
let sqrlbinary = b"sqrldata}\x00\x01\x00-\x00\"wQ\x122\x0e\xb5\x891\xfep\x97\xef\xf2e]\xf6\x0fg\x07\x8c_\xda\xd4\xe0Z\xe0\xb8\t\x96\x00\x00\x00\xf3\x01\x04\x05\x0f\x00\x023\x88\xcd\xa0\xd7WN\xf7\x8a\xd19\xf8\x1c]\x13\x87\x06\xc6\xe8\xf8\xb08\xf6\x14\xd9m\x9e\xf6|\x94\xa4\x1fF\xab}\x0e\xd3\xbf\xa3r\xa3^\xb4\xfb\xcc\xe7\x8cQ\x8d\x8dyRl\x05\xf1\x19|\x90\x03\x06\t\xe0\xb3\x85H\x8c\xe0\xa6\x0fQm\xf6\x94q6-\xee\xe0\xe9I\x00\x02\x00\xea\xde\x04q\xa1\xfaO\x8f\x1c\xf5e\xea\xb3)-^\t\xa5\x00\x00\x00\xf9o$\"\x9e\x91\xa6\xa9k\xde\xe2z^&j\xa6\x15\xb5\x04\xf4P\x01e\xcc\xfa\xa8V\xd7\xf4\x94L\xea\xea\xdd><\xcbC\xc5+\xeb\xaf\x18\x88\xf9\xa6\xd4\xce";
let mut identity = SqrlS4Identity::from_binary(sqrlbinary);
println!("identity debug\n{:?}", identity);
println!("identity print\n{}", identity);
identity.type1_length = 125;
println!("identity.type1_length {}", identity.type1_length);
println!("{:?}", en_scrypt(b"", b"", 64, 1));
} | identifier_body | |
main.rs | mod sqrl_s4;
mod sqrl_crypto;
use sqrl_s4::SqrlS4Identity;
use sqrl_crypto::en_scrypt;
fn main() {
//let identity = SqrlS4Identity{type1_length: 34, ..Default::default()};
//let mut identity = SqrlS4Identity::default();
let sqrlbinary = b"sqrldata}\x00\x01\x00-\x00\"wQ\x122\x0e\xb5\x891\xfep\x97\xef\xf2e]\xf6\x0fg\x07\x8c_\xda\xd4\xe0Z\xe0\xb8\t\x96\x00\x00\x00\xf3\x01\x04\x05\x0f\x00\x023\x88\xcd\xa0\xd7WN\xf7\x8a\xd19\xf8\x1c]\x13\x87\x06\xc6\xe8\xf8\xb08\xf6\x14\xd9m\x9e\xf6|\x94\xa4\x1fF\xab}\x0e\xd3\xbf\xa3r\xa3^\xb4\xfb\xcc\xe7\x8cQ\x8d\x8dyRl\x05\xf1\x19|\x90\x03\x06\t\xe0\xb3\x85H\x8c\xe0\xa6\x0fQm\xf6\x94q6-\xee\xe0\xe9I\x00\x02\x00\xea\xde\x04q\xa1\xfaO\x8f\x1c\xf5e\xea\xb3)-^\t\xa5\x00\x00\x00\xf9o$\"\x9e\x91\xa6\xa9k\xde\xe2z^&j\xa6\x15\xb5\x04\xf4P\x01e\xcc\xfa\xa8V\xd7\xf4\x94L\xea\xea\xdd><\xcbC\xc5+\xeb\xaf\x18\x88\xf9\xa6\xd4\xce"; | println!("identity debug\n{:?}", identity);
println!("identity print\n{}", identity);
identity.type1_length = 125;
println!("identity.type1_length {}", identity.type1_length);
println!("{:?}", en_scrypt(b"", b"", 64, 1));
} | let mut identity = SqrlS4Identity::from_binary(sqrlbinary); | random_line_split |
main.rs | mod sqrl_s4;
mod sqrl_crypto;
use sqrl_s4::SqrlS4Identity;
use sqrl_crypto::en_scrypt;
fn | () {
//let identity = SqrlS4Identity{type1_length: 34, ..Default::default()};
//let mut identity = SqrlS4Identity::default();
let sqrlbinary = b"sqrldata}\x00\x01\x00-\x00\"wQ\x122\x0e\xb5\x891\xfep\x97\xef\xf2e]\xf6\x0fg\x07\x8c_\xda\xd4\xe0Z\xe0\xb8\t\x96\x00\x00\x00\xf3\x01\x04\x05\x0f\x00\x023\x88\xcd\xa0\xd7WN\xf7\x8a\xd19\xf8\x1c]\x13\x87\x06\xc6\xe8\xf8\xb08\xf6\x14\xd9m\x9e\xf6|\x94\xa4\x1fF\xab}\x0e\xd3\xbf\xa3r\xa3^\xb4\xfb\xcc\xe7\x8cQ\x8d\x8dyRl\x05\xf1\x19|\x90\x03\x06\t\xe0\xb3\x85H\x8c\xe0\xa6\x0fQm\xf6\x94q6-\xee\xe0\xe9I\x00\x02\x00\xea\xde\x04q\xa1\xfaO\x8f\x1c\xf5e\xea\xb3)-^\t\xa5\x00\x00\x00\xf9o$\"\x9e\x91\xa6\xa9k\xde\xe2z^&j\xa6\x15\xb5\x04\xf4P\x01e\xcc\xfa\xa8V\xd7\xf4\x94L\xea\xea\xdd><\xcbC\xc5+\xeb\xaf\x18\x88\xf9\xa6\xd4\xce";
let mut identity = SqrlS4Identity::from_binary(sqrlbinary);
println!("identity debug\n{:?}", identity);
println!("identity print\n{}", identity);
identity.type1_length = 125;
println!("identity.type1_length {}", identity.type1_length);
println!("{:?}", en_scrypt(b"", b"", 64, 1));
}
| main | identifier_name |
test_ivim.py | """
Testing the Intravoxel incoherent motion module
The values of the various parameters used in the tests are inspired by
the study of the IVIM model applied to MR images of the brain by
Federau, Christian, et al. [1].
References
----------
.. [1] Federau, Christian, et al. "Quantitative measurement
of brain perfusion with intravoxel incoherent motion
MR imaging." Radiology 265.3 (2012): 874-881.
"""
import warnings
import numpy as np
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_raises, assert_array_less, run_module_suite,
assert_, assert_equal)
from dipy.testing import assert_greater_equal
import pytest
from dipy.reconst.ivim import ivim_prediction, IvimModel
from dipy.core.gradients import gradient_table, generate_bvecs
from dipy.sims.voxel import multi_tensor
from dipy.utils.optpkg import optional_package
cvxpy, have_cvxpy, _ = optional_package("cvxpy")
needs_cvxpy = pytest.mark.skipif(not have_cvxpy, reason="REQUIRES CVXPY")
def setup_module():
global gtab, ivim_fit_single, ivim_model_trr, data_single, params_trr, \
data_multi, ivim_params_trr, D_star, D, f, S0, gtab_with_multiple_b0, \
noisy_single, mevals, gtab_no_b0, ivim_fit_multi, ivim_model_VP, \
f_VP, D_star_VP, D_VP, params_VP
# Let us generate some data for testing.
bvals = np.array([0., 10., 20., 30., 40., 60., 80., 100.,
120., 140., 160., 180., 200., 300., 400.,
500., 600., 700., 800., 900., 1000.])
N = len(bvals)
bvecs = generate_bvecs(N)
gtab = gradient_table(bvals, bvecs.T, b0_threshold=0)
S0, f, D_star, D = 1000.0, 0.132, 0.00885, 0.000921
# params for a single voxel
params_trr = np.array([S0, f, D_star, D])
mevals = np.array(([D_star, D_star, D_star], [D, D, D]))
# This gives an isotropic signal.
signal = multi_tensor(gtab, mevals, snr=None, S0=S0,
fractions=[f * 100, 100 * (1 - f)])
# Single voxel data
data_single = signal[0]
data_multi = np.zeros((2, 2, 1, len(gtab.bvals)))
data_multi[0, 0, 0] = data_multi[0, 1, 0] = data_multi[
1, 0, 0] = data_multi[1, 1, 0] = data_single
ivim_params_trr = np.zeros((2, 2, 1, 4))
ivim_params_trr[0, 0, 0] = ivim_params_trr[0, 1, 0] = params_trr
ivim_params_trr[1, 0, 0] = ivim_params_trr[1, 1, 0] = params_trr
ivim_model_trr = IvimModel(gtab, fit_method='trr')
ivim_model_one_stage = IvimModel(gtab, fit_method='trr')
ivim_fit_single = ivim_model_trr.fit(data_single)
ivim_fit_multi = ivim_model_trr.fit(data_multi)
ivim_model_one_stage.fit(data_single)
ivim_model_one_stage.fit(data_multi)
bvals_no_b0 = np.array([5., 10., 20., 30., 40., 60., 80., 100.,
120., 140., 160., 180., 200., 300., 400.,
500., 600., 700., 800., 900., 1000.])
_ = generate_bvecs(N) # bvecs_no_b0
gtab_no_b0 = gradient_table(bvals_no_b0, bvecs.T, b0_threshold=0)
bvals_with_multiple_b0 = np.array([0., 0., 0., 0., 40., 60., 80., 100.,
120., 140., 160., 180., 200., 300.,
400., 500., 600., 700., 800., 900.,
1000.])
bvecs_with_multiple_b0 = generate_bvecs(N)
gtab_with_multiple_b0 = gradient_table(bvals_with_multiple_b0,
bvecs_with_multiple_b0.T,
b0_threshold=0)
noisy_single = np.array([4243.71728516, 4317.81298828, 4244.35693359,
4439.36816406, 4420.06201172, 4152.30078125,
4114.34912109, 4104.59375, 4151.61914062,
4003.58374023, 4013.68408203, 3906.39428711,
3909.06079102, 3495.27197266, 3402.57006836,
3163.10180664, 2896.04003906, 2663.7253418,
2614.87695312, 2316.55371094, 2267.7722168])
noisy_multi = np.zeros((2, 2, 1, len(gtab.bvals)))
noisy_multi[0, 1, 0] = noisy_multi[
1, 0, 0] = noisy_multi[1, 1, 0] = noisy_single
noisy_multi[0, 0, 0] = data_single
ivim_model_VP = IvimModel(gtab, fit_method='VarPro')
f_VP, D_star_VP, D_VP = 0.13, 0.0088, 0.000921
# params for a single voxel
params_VP = np.array([f, D_star, D])
ivim_params_VP = np.zeros((2, 2, 1, 3))
ivim_params_VP[0, 0, 0] = ivim_params_VP[0, 1, 0] = params_VP
ivim_params_VP[1, 0, 0] = ivim_params_VP[1, 1, 0] = params_VP
def single_exponential(S0, D, bvals):
return S0 * np.exp(-bvals * D)
def test_single_voxel_fit():
"""
Test the implementation of the fitting for a single voxel.
Here, we will use the multi_tensor function to generate a
bi-exponential signal. The multi_tensor generates a multi
tensor signal and expects eigenvalues of each tensor in mevals.
Our basic test requires a scalar signal isotropic signal and
hence we set the same eigenvalue in all three directions to
generate the required signal.
The bvals, f, D_star and D are inspired from the paper by
Federau, Christian, et al. We use the function "generate_bvecs"
to simulate bvectors corresponding to the bvalues.
In the two stage fitting routine, initially we fit the signal
values for bvals less than the specified split_b using the
TensorModel and get an intial guess for f and D. Then, using
these parameters we fit the entire data for all bvalues.
"""
est_signal = ivim_prediction(ivim_fit_single.model_params, gtab)
assert_array_equal(est_signal.shape, data_single.shape)
assert_array_almost_equal(ivim_fit_single.model_params, params_trr)
assert_array_almost_equal(est_signal, data_single)
# Test predict function for single voxel
p = ivim_fit_single.predict(gtab)
assert_array_equal(p.shape, data_single.shape)
assert_array_almost_equal(p, data_single)
def test_multivoxel():
"""Test fitting with multivoxel data.
We generate a multivoxel signal to test the fitting for multivoxel data.
This is to ensure that the fitting routine takes care of signals packed as
1D, 2D or 3D arrays.
"""
ivim_fit_multi = ivim_model_trr.fit(data_multi)
est_signal = ivim_fit_multi.predict(gtab, S0=1.)
assert_array_equal(est_signal.shape, data_multi.shape)
assert_array_almost_equal(ivim_fit_multi.model_params, ivim_params_trr)
assert_array_almost_equal(est_signal, data_multi)
def test_ivim_errors():
"""
Test if errors raised in the module are working correctly.
Scipy introduced bounded least squares fitting in the version 0.17
and is not supported by the older versions. Initializing an IvimModel
with bounds for older Scipy versions should raise an error.
"""
ivim_model_trr = IvimModel(gtab, bounds=([0., 0., 0., 0.],
[np.inf, 1., 1., 1.]),
fit_method='trr')
ivim_fit = ivim_model_trr.fit(data_multi)
est_signal = ivim_fit.predict(gtab, S0=1.)
assert_array_equal(est_signal.shape, data_multi.shape)
assert_array_almost_equal(ivim_fit.model_params, ivim_params_trr)
assert_array_almost_equal(est_signal, data_multi)
def test_mask():
"""
Test whether setting incorrect mask raises and error
"""
mask_correct = data_multi[..., 0] > 0.2
mask_not_correct = np.array([[False, True, False], [True, False]],
dtype=np.bool)
ivim_fit = ivim_model_trr.fit(data_multi, mask_correct)
est_signal = ivim_fit.predict(gtab, S0=1.)
assert_array_equal(est_signal.shape, data_multi.shape)
assert_array_almost_equal(est_signal, data_multi)
assert_array_almost_equal(ivim_fit.model_params, ivim_params_trr)
assert_raises(ValueError, ivim_model_trr.fit, data_multi,
mask=mask_not_correct)
def test_with_higher_S0():
"""
Test whether fitting works for S0 > 1.
"""
# params for a single voxel
S0_2 = 1000.
params2 = np.array([S0_2, f, D_star, D])
mevals2 = np.array(([D_star, D_star, D_star], [D, D, D]))
# This gives an isotropic signal.
signal2 = multi_tensor(gtab, mevals2, snr=None, S0=S0_2,
fractions=[f * 100, 100 * (1 - f)])
# Single voxel data
data_single2 = signal2[0]
ivim_fit = ivim_model_trr.fit(data_single2)
est_signal = ivim_fit.predict(gtab)
assert_array_equal(est_signal.shape, data_single2.shape)
assert_array_almost_equal(est_signal, data_single2)
assert_array_almost_equal(ivim_fit.model_params, params2)
def test_b0_threshold_greater_than0():
"""
Added test case for default b0_threshold set to 50.
Checks if error is thrown correctly.
"""
bvals_b0t = np.array([50., 10., 20., 30., 40., 60., 80., 100.,
120., 140., 160., 180., 200., 300., 400.,
500., 600., 700., 800., 900., 1000.])
N = len(bvals_b0t)
bvecs = generate_bvecs(N)
gtab = gradient_table(bvals_b0t, bvecs.T)
with assert_raises(ValueError) as vae:
_ = IvimModel(gtab, fit_method='trr')
b0_s = "The IVIM model requires a measurement at b==0. As of "
assert b0_s in vae.exception
def test_bounds_x0():
"""
Test to check if setting bounds for signal where initial value is
higher than subsequent values works.
These values are from the IVIM dataset which can be obtained by using
the `read_ivim` function from dipy.data.fetcher. These are values from
the voxel [160, 98, 33] which can be obtained by :
.. code-block:: python
from dipy.data.fetcher import read_ivim
img, gtab = read_ivim()
data = load_nifti_data(img)
signal = data[160, 98, 33, :]
"""
x0_test = np.array([1., 0.13, 0.001, 0.0001])
test_signal = ivim_prediction(x0_test, gtab)
ivim_fit = ivim_model_trr.fit(test_signal)
est_signal = ivim_fit.predict(gtab)
assert_array_equal(est_signal.shape, test_signal.shape)
def test_predict():
"""
Test the model prediction API.
The predict method is already used in previous tests for estimation of the
signal. But here, we will test is separately.
"""
assert_array_almost_equal(ivim_fit_single.predict(gtab),
data_single)
assert_array_almost_equal(ivim_model_trr.predict
(ivim_fit_single.model_params, gtab),
data_single)
ivim_fit_multi = ivim_model_trr.fit(data_multi)
assert_array_almost_equal(ivim_fit_multi.predict(gtab),
data_multi)
def test_fit_object():
"""
Test the method of IvimFit class
"""
assert_raises(IndexError, ivim_fit_single.__getitem__, (-.1, 0, 0))
# Check if the S0 called is matching
assert_array_almost_equal(
ivim_fit_single.__getitem__(0).model_params, 1000.)
ivim_fit_multi = ivim_model_trr.fit(data_multi)
# Should raise a TypeError if the arguments are not passed as tuple
assert_raises(TypeError, ivim_fit_multi.__getitem__, -.1, 0)
# Should return IndexError if invalid indices are passed
assert_raises(IndexError, ivim_fit_multi.__getitem__, (100, -0))
assert_raises(IndexError, ivim_fit_multi.__getitem__, (100, -0, 2))
assert_raises(IndexError, ivim_fit_multi.__getitem__, (-100, 0))
assert_raises(IndexError, ivim_fit_multi.__getitem__, [-100, 0])
assert_raises(IndexError, ivim_fit_multi.__getitem__, (1, 0, 0, 3, 4))
# Check if the get item returns the S0 value for voxel (1,0,0)
assert_array_almost_equal(
ivim_fit_multi.__getitem__((1, 0, 0)).model_params[0],
data_multi[1, 0, 0][0])
def test_shape():
"""
Test if `shape` in `IvimFit` class gives the correct output.
"""
assert_array_equal(ivim_fit_single.shape, ())
ivim_fit_multi = ivim_model_trr.fit(data_multi)
assert_array_equal(ivim_fit_multi.shape, (2, 2, 1))
def test_multiple_b0():
# Generate a signal with multiple b0
# This gives an isotropic signal.
signal = multi_tensor(gtab_with_multiple_b0, mevals, snr=None, S0=S0,
fractions=[f * 100, 100 * (1 - f)])
# Single voxel data
data_single = signal[0]
ivim_model_multiple_b0 = IvimModel(gtab_with_multiple_b0, fit_method='trr')
ivim_model_multiple_b0.fit(data_single)
# Test if all signals are positive
def test_no_b0():
assert_raises(ValueError, IvimModel, gtab_no_b0)
def test_noisy_fit():
"""
Test fitting for noisy signals. This tests whether the threshold condition
applies correctly and returns the linear fitting parameters.
For older scipy versions, the returned value of `f` from a linear fit is
around 135 and D and D_star values are equal. Hence doing a test based on
Scipy version.
"""
model_one_stage = IvimModel(gtab, fit_method='trr')
with warnings.catch_warnings(record=True) as w:
fit_one_stage = model_one_stage.fit(noisy_single)
assert_equal(len(w), 3)
for l_w in w:
assert_(issubclass(l_w.category, UserWarning))
assert_("" in str(w[0].message))
assert_("x0 obtained from linear fitting is not feasibile" in
str(w[0].message))
assert_("x0 is unfeasible" in str(w[1].message))
assert_("Bounds are violated for leastsq fitting" in str(w[2].message))
assert_array_less(fit_one_stage.model_params, [10000., 0.3, .01, 0.001])
def test_S0():
"""
Test if the `IvimFit` class returns the correct S0
"""
assert_array_almost_equal(ivim_fit_single.S0_predicted, S0)
assert_array_almost_equal(ivim_fit_multi.S0_predicted,
ivim_params_trr[..., 0])
def test_perfusion_fraction():
"""
Test if the `IvimFit` class returns the correct f
"""
assert_array_almost_equal(ivim_fit_single.perfusion_fraction, f)
assert_array_almost_equal(
ivim_fit_multi.perfusion_fraction, ivim_params_trr[..., 1])
def test_D_star():
"""
Test if the `IvimFit` class returns the correct D_star
"""
assert_array_almost_equal(ivim_fit_single.D_star, D_star)
assert_array_almost_equal(ivim_fit_multi.D_star, ivim_params_trr[..., 2])
def test_D():
"""
Test if the `IvimFit` class returns the correct D
"""
assert_array_almost_equal(ivim_fit_single.D, D)
assert_array_almost_equal(ivim_fit_multi.D, ivim_params_trr[..., 3])
def test_estimate_linear_fit():
"""
Test the linear estimates considering a single exponential fit.
"""
data_single_exponential_D = single_exponential(S0, D, gtab.bvals)
assert_array_almost_equal(ivim_model_trr.estimate_linear_fit(
data_single_exponential_D,
split_b=500.,
less_than=False),
(S0, D))
data_single_exponential_D_star = single_exponential(S0, D_star, gtab.bvals)
assert_array_almost_equal(ivim_model_trr.estimate_linear_fit(
data_single_exponential_D_star,
split_b=100.,
less_than=True),
(S0, D_star))
def test_estimate_f_D_star():
"""
Test if the `estimate_f_D_star` returns the correct parameters after a
non-linear fit.
"""
params_f_D = f + 0.001, D + 0.0001
assert_array_almost_equal(ivim_model_trr.estimate_f_D_star(params_f_D,
data_single, S0,
D),
(f, D_star))
def test_fit_one_stage():
"""
Test to check the results for the one_stage linear fit.
"""
model = IvimModel(gtab, two_stage=False)
fit = model.fit(data_single)
linear_fit_params = [9.88834140e+02, 1.19707191e-01, 7.91176970e-03,
9.30095210e-04]
linear_fit_signal = [988.83414044, 971.77122546, 955.46786293,
939.87125905, 924.93258982, 896.85182201,
870.90346447, 846.81187693, 824.34108781,
803.28900104, 783.48245048, 764.77297789,
747.03322866, 669.54798887, 605.03328304,
549.00852235, 499.21077611, 454.40299244,
413.83192296, 376.98072773, 343.45531017]
assert_array_almost_equal(fit.model_params, linear_fit_params)
assert_array_almost_equal(fit.predict(gtab), linear_fit_signal)
def test_leastsq_failing():
"""
Test for cases where leastsq fitting fails and the results from a linear
fit is returned.
"""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", category=UserWarning)
fit_single = ivim_model_trr.fit(noisy_single)
assert_greater_equal(len(w), 3)
u_warn = [l_w for l_w in w if issubclass(l_w.category, UserWarning)]
assert_greater_equal(len(u_warn), 3)
message = ["x0 obtained from linear fitting is not feasibile",
"x0 is unfeasible",
"Bounds are violated for leastsq fitting"]
assert_greater_equal(len([lw for lw in u_warn for m in message
if m in str(lw.message)]), 3)
# Test for the S0 and D values
assert_array_almost_equal(fit_single.S0_predicted, 4356.268901117833)
assert_array_almost_equal(fit_single.D, 6.936684e-04)
def test_leastsq_error():
"""
Test error handling of the `_leastsq` method works when unfeasible x0 is
passed. If an unfeasible x0 value is passed using which leastsq fails, the
x0 value is returned as it is.
"""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", category=UserWarning)
fit = ivim_model_trr._leastsq(data_single, [-1, -1, -1, -1])
assert_greater_equal(len(w), 1)
assert_(issubclass(w[-1].category, UserWarning))
assert_("" in str(w[-1].message))
assert_("x0 is unfeasible" in str(w[-1].message))
assert_array_almost_equal(fit, [-1, -1, -1, -1])
@needs_cvxpy
def test_perfusion_fraction_vp():
"""
Test if the `IvimFit` class returns the correct f
"""
ivim_fit_VP = ivim_model_VP.fit(data_single)
assert_array_almost_equal(ivim_fit_VP.perfusion_fraction, f_VP,
decimal=2)
@needs_cvxpy
def test_D_star_vp():
"""
Test if the `IvimFit` class returns the correct D_star
"""
ivim_fit_VP = ivim_model_VP.fit(data_single)
assert_array_almost_equal(ivim_fit_VP.D_star, D_star_VP, decimal=4)
@needs_cvxpy
def test_D_vp():
"""
Test if the `IvimFit` class returns the correct D
"""
ivim_fit_VP = ivim_model_VP.fit(data_single)
assert_array_almost_equal(ivim_fit_VP.D, D_VP, decimal=4)
if __name__ == '__main__':
| run_module_suite() | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.