prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>accounts_items.py<|end_file_name|><|fim▁begin|># -*-coding: utf-8-*-
import logging
from pyramid.view import view_config, view_defaults
from pyramid.httpexceptions import HTTPFound
from . import BaseView
from ..models import DBSession
from ..models.account_item import AccountItem
from ..lib.bl.subscriptions import subscribe_resource
from ..lib.utils.common_utils import translate as _
from ..forms.accounts_items import (
AccountItemForm,
AccountItemSearchForm
)
from ..lib.events.resources import (
ResourceCreated,
ResourceChanged,
ResourceDeleted,
)
log = logging.getLogger(__name__)
@view_defaults(
context='..resources.accounts_items.AccountsItemsResource',
)
class AccountsItemsView(BaseView):
@view_config(
request_method='GET',
renderer='travelcrm:templates/accounts_items/index.mako',
permission='view'
)
def index(self):
return {
'title': self._get_title(),
}
@view_config(
name='list',
xhr='True',
request_method='POST',
renderer='json',
permission='view'
)
def list(self):
form = AccountItemSearchForm(self.request, self.context)
form.validate()
qb = form.submit()
return qb.get_serialized()
@view_config(
name='view',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='view'
)
def view(self):
if self.request.params.get('rid'):
resource_id = self.request.params.get('rid')
account_item = AccountItem.by_resource_id(resource_id)
return HTTPFound(
location=self.request.resource_url(
self.context, 'view', query={'id': account_item.id}
)
)
result = self.edit()
result.update({
'title': self._get_title(_(u'View')),
'readonly': True,
})
return result
@view_config(
name='add',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='add'
)
def add(self):
return {
'title': self._get_title(_(u'Add')),
}
@view_config(
name='add',
request_method='POST',
renderer='json',
permission='add'
)
def _add(self):
form = AccountItemForm(self.request)
if form.validate():
account_item = form.submit()
DBSession.add(account_item)
DBSession.flush()
event = ResourceCreated(self.request, account_item)
event.registry()
return {
'success_message': _(u'Saved'),
'response': account_item.id
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='edit',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='edit'
)
def edit(self):
account_item = AccountItem.get(self.request.params.get('id'))
return {
'item': account_item,
'title': self._get_title(_(u'Edit')),
}
@view_config(
name='edit',
request_method='POST',
renderer='json',
permission='edit'
)
def _edit(self):
account_item = AccountItem.get(self.request.params.get('id'))
form = AccountItemForm(self.request)
if form.validate():
form.submit(account_item)
event = ResourceChanged(self.request, account_item)
event.registry()
return {
'success_message': _(u'Saved'),
'response': account_item.id
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='copy',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='add'
)
def copy(self):
account_item = AccountItem.get_copy(self.request.params.get('id'))
return {
'action': self.request.path_url,
'item': account_item,
'title': self._get_title(_(u'Copy')),
}
@view_config(
name='copy',
request_method='POST',
renderer='json',<|fim▁hole|> return self._add()
@view_config(
name='delete',
request_method='GET',
renderer='travelcrm:templates/accounts_items/delete.mako',
permission='delete'
)
def delete(self):
return {
'title': self._get_title(_(u'Delete')),
'rid': self.request.params.get('rid')
}
@view_config(
name='delete',
request_method='POST',
renderer='json',
permission='delete'
)
def _delete(self):
errors = False
ids = self.request.params.getall('id')
if ids:
try:
items = DBSession.query(AccountItem).filter(
AccountItem.id.in_(ids)
)
for item in items:
DBSession.delete(item)
event = ResourceDeleted(self.request, item)
event.registry()
DBSession.flush()
except:
errors=True
DBSession.rollback()
if errors:
return {
'error_message': _(
u'Some objects could not be delete'
),
}
return {'success_message': _(u'Deleted')}
@view_config(
name='subscribe',
request_method='GET',
renderer='travelcrm:templates/accounts_items/subscribe.mako',
permission='view'
)
def subscribe(self):
return {
'id': self.request.params.get('id'),
'title': self._get_title(_(u'Subscribe')),
}
@view_config(
name='subscribe',
request_method='POST',
renderer='json',
permission='view'
)
def _subscribe(self):
ids = self.request.params.getall('id')
for id in ids:
account_item = AccountItem.get(id)
subscribe_resource(self.request, account_item.resource)
return {
'success_message': _(u'Subscribed'),
}<|fim▁end|> | permission='add'
)
def _copy(self): |
<|file_name|>test_nsoltChannelConcatenation2dLayer.py<|end_file_name|><|fim▁begin|>import itertools
import unittest
from parameterized import parameterized
import torch
import torch.nn as nn
from nsoltChannelConcatenation2dLayer import NsoltChannelConcatenation2dLayer
nchs = [ [3, 3], [4, 4] ]
datatype = [ torch.float, torch.double ]
nrows = [ 4, 8, 16 ]<|fim▁hole|>ncols = [ 4, 8, 16 ]
class NsoltChannelConcatenation2dLayerTestCase(unittest.TestCase):
"""
NSOLTCHANNELCONCATENATION2DLAYERTESTCASE
2コンポーネント入力(nComponents=2のみサポート):
nSamples x nRows x nCols x (nChsTotal-1)
nSamples x nRows x nCols
1コンポーネント出力(nComponents=1のみサポート):
nSamples x nRows x nCols x nChsTotal
Requirements: Python 3.7.x, PyTorch 1.7.x
Copyright (c) 2020-2021, Shogo MURAMATSU
All rights reserved.
Contact address: Shogo MURAMATSU,
Faculty of Engineering, Niigata University,
8050 2-no-cho Ikarashi, Nishi-ku,
Niigata, 950-2181, JAPAN
http://msiplab.eng.niigata-u.ac.jp/
"""
def testConstructor(self):
# Expected values
expctdName = 'Cn'
expctdDescription = "Channel concatenation"
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name=expctdName
)
# Actual values
actualName = layer.name
actualDescription = layer.description
# Evaluation
self.assertTrue(isinstance(layer, nn.Module))
self.assertEqual(actualName,expctdName)
self.assertEqual(actualDescription,expctdDescription)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testPredict(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols
Xdc = torch.randn(nSamples,nrows,ncols,dtype=datatype,device=device,requires_grad=True)
# Expected values
# nSamples x nRows x nCols x nChsTotal
expctdZ = torch.cat((Xdc.unsqueeze(dim=3),Xac),dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
with torch.no_grad():
actualZ = layer.forward(Xac=Xac,Xdc=Xdc)
# Evaluation
self.assertEqual(actualZ.dtype,datatype)
self.assertTrue(torch.allclose(actualZ,expctdZ,rtol=rtol,atol=atol))
self.assertFalse(actualZ.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testPredictUnsqueezedXdc(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x 1
Xdc = torch.randn(nSamples,nrows,ncols,1,dtype=datatype,device=device,requires_grad=True)
# Expected values
# nSamples x nRows x nCols x nChsTotal
expctdZ = torch.cat((Xdc,Xac),dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
with torch.no_grad():
actualZ = layer.forward(Xac=Xac,Xdc=Xdc)
# Evaluation
self.assertEqual(actualZ.dtype,datatype)
self.assertTrue(torch.allclose(actualZ,expctdZ,rtol=rtol,atol=atol))
self.assertFalse(actualZ.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testBackward(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols
Xdc = torch.randn(nSamples,nrows,ncols,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x nChsTotal
dLdZ = torch.randn(nSamples,nrows,ncols,nChsTotal,dtype=datatype)
dLdZ = dLdZ.to(device)
# Expected values
# nSamples x nRows x nCols x (nChsTotal-1)
expctddLdXac = dLdZ[:,:,:,1:]
# nSamples x nRows x nCols x 1
expctddLdXdc = dLdZ[:,:,:,0]
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
Z = layer.forward(Xac=Xac,Xdc=Xdc)
Z.backward(dLdZ)
actualdLdXac = Xac.grad
actualdLdXdc = Xdc.grad
# Evaluation
self.assertEqual(actualdLdXdc.dtype,datatype)
self.assertEqual(actualdLdXac.dtype,datatype)
self.assertTrue(torch.allclose(actualdLdXdc,expctddLdXdc,rtol=rtol,atol=atol))
self.assertTrue(torch.allclose(actualdLdXac,expctddLdXac,rtol=rtol,atol=atol))
self.assertTrue(Z.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testBackwardUnsqueezedXdc(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x 1
Xdc = torch.randn(nSamples,nrows,ncols,1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x nChsTotal
dLdZ = torch.randn(nSamples,nrows,ncols,nChsTotal,dtype=datatype)
dLdZ = dLdZ.to(device)
# Expected values
# nSamples x nRows x nCols x (nChsTotal-1)
expctddLdXac = dLdZ[:,:,:,1:]
# nSamples x nRows x nCols x 1
expctddLdXdc = dLdZ[:,:,:,0].unsqueeze(dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
Z = layer.forward(Xac=Xac,Xdc=Xdc)
Z.backward(dLdZ)
actualdLdXac = Xac.grad
actualdLdXdc = Xdc.grad
# Evaluation
self.assertEqual(actualdLdXdc.dtype,datatype)
self.assertEqual(actualdLdXac.dtype,datatype)
self.assertTrue(torch.allclose(actualdLdXdc,expctddLdXdc,rtol=rtol,atol=atol))
self.assertTrue(torch.allclose(actualdLdXac,expctddLdXac,rtol=rtol,atol=atol))
self.assertTrue(Z.requires_grad)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>script.py<|end_file_name|><|fim▁begin|>"""Helpers to execute scripts."""
import logging
from contextlib import suppress
from itertools import islice
from typing import Optional, Sequence
import voluptuous as vol
from homeassistant.core import HomeAssistant, Context, callback
from homeassistant.const import CONF_CONDITION, CONF_TIMEOUT
from homeassistant import exceptions
from homeassistant.helpers import (
service, condition, template as template,
config_validation as cv)
from homeassistant.helpers.event import (
async_track_point_in_utc_time, async_track_template)
from homeassistant.helpers.typing import ConfigType
import homeassistant.util.dt as date_util
from homeassistant.util.async_ import (
run_coroutine_threadsafe, run_callback_threadsafe)
_LOGGER = logging.getLogger(__name__)
CONF_ALIAS = 'alias'
CONF_SERVICE = 'service'
CONF_SERVICE_DATA = 'data'
CONF_SEQUENCE = 'sequence'
CONF_EVENT = 'event'
CONF_EVENT_DATA = 'event_data'
CONF_EVENT_DATA_TEMPLATE = 'event_data_template'
CONF_DELAY = 'delay'
CONF_WAIT_TEMPLATE = 'wait_template'
CONF_CONTINUE = 'continue_on_timeout'
ACTION_DELAY = 'delay'
ACTION_WAIT_TEMPLATE = 'wait_template'
ACTION_CHECK_CONDITION = 'condition'
ACTION_FIRE_EVENT = 'event'
ACTION_CALL_SERVICE = 'call_service'
def _determine_action(action):
"""Determine action type."""
if CONF_DELAY in action:
return ACTION_DELAY
if CONF_WAIT_TEMPLATE in action:
return ACTION_WAIT_TEMPLATE
if CONF_CONDITION in action:
return ACTION_CHECK_CONDITION
if CONF_EVENT in action:
return ACTION_FIRE_EVENT
return ACTION_CALL_SERVICE
def call_from_config(hass: HomeAssistant, config: ConfigType,
variables: Optional[Sequence] = None,
context: Optional[Context] = None) -> None:
"""Call a script based on a config entry."""
Script(hass, cv.SCRIPT_SCHEMA(config)).run(variables, context)
class _StopScript(Exception):
"""Throw if script needs to stop."""
class _SuspendScript(Exception):
"""Throw if script needs to suspend."""
class Script():
"""Representation of a script."""
def __init__(self, hass: HomeAssistant, sequence, name: str = None,
change_listener=None) -> None:
"""Initialize the script."""
self.hass = hass
self.sequence = sequence
template.attach(hass, self.sequence)
self.name = name
self._change_listener = change_listener
self._cur = -1
self._exception_step = None
self.last_action = None
self.last_triggered = None
self.can_cancel = any(CONF_DELAY in action or CONF_WAIT_TEMPLATE
in action for action in self.sequence)
self._async_listener = []
self._template_cache = {}
self._config_cache = {}
self._actions = {
ACTION_DELAY: self._async_delay,
ACTION_WAIT_TEMPLATE: self._async_wait_template,
ACTION_CHECK_CONDITION: self._async_check_condition,
ACTION_FIRE_EVENT: self._async_fire_event,
ACTION_CALL_SERVICE: self._async_call_service,
}
@property
def is_running(self) -> bool:
"""Return true if script is on."""
return self._cur != -1
def run(self, variables=None, context=None):
"""Run script."""
run_coroutine_threadsafe(
self.async_run(variables, context), self.hass.loop).result()
async def async_run(self, variables: Optional[Sequence] = None,
context: Optional[Context] = None) -> None:
"""Run script.
This method is a coroutine.
"""
self.last_triggered = date_util.utcnow()
if self._cur == -1:
self._log('Running script')
self._cur = 0
# Unregister callback if we were in a delay or wait but turn on is
# called again. In that case we just continue execution.
self._async_remove_listener()
for cur, action in islice(enumerate(self.sequence), self._cur, None):
try:
await self._handle_action(action, variables, context)
except _SuspendScript:
# Store next step to take and notify change listeners
self._cur = cur + 1
if self._change_listener:
self.hass.async_add_job(self._change_listener)
return
except _StopScript:
break
except Exception:
# Store the step that had an exception
self._exception_step = cur
# Set script to not running
self._cur = -1
self.last_action = None
# Pass exception on.
raise
# Set script to not-running.
self._cur = -1
self.last_action = None
if self._change_listener:
self.hass.async_add_job(self._change_listener)
def stop(self) -> None:
"""Stop running script."""
run_callback_threadsafe(self.hass.loop, self.async_stop).result()
def async_stop(self) -> None:
"""Stop running script."""
if self._cur == -1:
return
self._cur = -1
self._async_remove_listener()
if self._change_listener:
self.hass.async_add_job(self._change_listener)
@callback
def async_log_exception(self, logger, message_base, exception):
"""Log an exception for this script.
Should only be called on exceptions raised by this scripts async_run.
"""
# pylint: disable=protected-access
step = self._exception_step
action = self.sequence[step]
action_type = _determine_action(action)
error = None
meth = logger.error
if isinstance(exception, vol.Invalid):
error_desc = "Invalid data"
elif isinstance(exception, exceptions.TemplateError):
error_desc = "Error rendering template"
elif isinstance(exception, exceptions.Unauthorized):
error_desc = "Unauthorized"
elif isinstance(exception, exceptions.ServiceNotFound):
error_desc = "Service not found"
else:
# Print the full stack trace, unknown error
error_desc = 'Unknown error'
meth = logger.exception
error = ""
if error is None:
error = str(exception)
meth("%s. %s for %s at pos %s: %s",
message_base, error_desc, action_type, step + 1, error)
async def _handle_action(self, action, variables, context):
"""Handle an action."""
await self._actions[_determine_action(action)](
action, variables, context)
async def _async_delay(self, action, variables, context):
"""Handle delay."""
# Call ourselves in the future to continue work
unsub = None
@callback
def async_script_delay(now):
"""Handle delay."""
# pylint: disable=cell-var-from-loop
with suppress(ValueError):
self._async_listener.remove(unsub)
self.hass.async_create_task(
self.async_run(variables, context))
delay = action[CONF_DELAY]
try:
if isinstance(delay, template.Template):
delay = vol.All(
cv.time_period,
cv.positive_timedelta)(
delay.async_render(variables))
elif isinstance(delay, dict):
delay_data = {}
delay_data.update(
template.render_complex(delay, variables))
delay = cv.time_period(delay_data)
except (exceptions.TemplateError, vol.Invalid) as ex:
_LOGGER.error("Error rendering '%s' delay template: %s",
self.name, ex)
raise _StopScript
self.last_action = action.get(
CONF_ALIAS, 'delay {}'.format(delay))
self._log("Executing step %s" % self.last_action)
unsub = async_track_point_in_utc_time(
self.hass, async_script_delay,
date_util.utcnow() + delay
)
self._async_listener.append(unsub)
raise _SuspendScript
async def _async_wait_template(self, action, variables, context):
"""Handle a wait template."""
# Call ourselves in the future to continue work
wait_template = action[CONF_WAIT_TEMPLATE]
wait_template.hass = self.hass
self.last_action = action.get(CONF_ALIAS, 'wait template')
self._log("Executing step %s" % self.last_action)
# check if condition already okay
if condition.async_template(
self.hass, wait_template, variables):
return
@callback
def async_script_wait(entity_id, from_s, to_s):
"""Handle script after template condition is true."""
self._async_remove_listener()
self.hass.async_create_task(
self.async_run(variables, context))
self._async_listener.append(async_track_template(<|fim▁hole|> self.hass, wait_template, async_script_wait, variables))
if CONF_TIMEOUT in action:
self._async_set_timeout(
action, variables, context,
action.get(CONF_CONTINUE, True))
raise _SuspendScript
async def _async_call_service(self, action, variables, context):
"""Call the service specified in the action.
This method is a coroutine.
"""
self.last_action = action.get(CONF_ALIAS, 'call service')
self._log("Executing step %s" % self.last_action)
await service.async_call_from_config(
self.hass, action,
blocking=True,
variables=variables,
validate_config=False,
context=context
)
async def _async_fire_event(self, action, variables, context):
"""Fire an event."""
self.last_action = action.get(CONF_ALIAS, action[CONF_EVENT])
self._log("Executing step %s" % self.last_action)
event_data = dict(action.get(CONF_EVENT_DATA, {}))
if CONF_EVENT_DATA_TEMPLATE in action:
try:
event_data.update(template.render_complex(
action[CONF_EVENT_DATA_TEMPLATE], variables))
except exceptions.TemplateError as ex:
_LOGGER.error('Error rendering event data template: %s', ex)
self.hass.bus.async_fire(action[CONF_EVENT],
event_data, context=context)
async def _async_check_condition(self, action, variables, context):
"""Test if condition is matching."""
config_cache_key = frozenset((k, str(v)) for k, v in action.items())
config = self._config_cache.get(config_cache_key)
if not config:
config = condition.async_from_config(action, False)
self._config_cache[config_cache_key] = config
self.last_action = action.get(CONF_ALIAS, action[CONF_CONDITION])
check = config(self.hass, variables)
self._log("Test condition {}: {}".format(self.last_action, check))
if not check:
raise _StopScript
def _async_set_timeout(self, action, variables, context,
continue_on_timeout):
"""Schedule a timeout to abort or continue script."""
timeout = action[CONF_TIMEOUT]
unsub = None
@callback
def async_script_timeout(now):
"""Call after timeout is retrieve."""
with suppress(ValueError):
self._async_listener.remove(unsub)
# Check if we want to continue to execute
# the script after the timeout
if continue_on_timeout:
self.hass.async_create_task(
self.async_run(variables, context))
else:
self._log("Timeout reached, abort script.")
self.async_stop()
unsub = async_track_point_in_utc_time(
self.hass, async_script_timeout,
date_util.utcnow() + timeout
)
self._async_listener.append(unsub)
def _async_remove_listener(self):
"""Remove point in time listener, if any."""
for unsub in self._async_listener:
unsub()
self._async_listener.clear()
def _log(self, msg):
"""Logger helper."""
if self.name is not None:
msg = "Script {}: {}".format(self.name, msg)
_LOGGER.info(msg)<|fim▁end|> | |
<|file_name|>csection.py<|end_file_name|><|fim▁begin|># coding=utf8
r"""
csection.py -- Create a tree of contents, organized by sections and inside
sections the exercises unique_name.
AUTHOR:
- Pedro Cruz (2012-01): initial version
- Pedro Cruz (2016-03): improvment for smc
An exercise could contain um its %summary tag line a description of section
in form::
%sumary section descriptive text; subsection descriptive text; etc
The class transform contents of some MegUA database into a tree of sections specifying exercises as leaves.
Then, this tree can be flushed out to some file or output system.
STRUTURE SAMPLE::
contents -> { 'Section1': Section('Section1',0), 'Section2': Section('Section2',0) }
For each Section object see below in this file.
A brief description is:
* a SectionClassifier is the "book" made with keys (chapter names) that are keys of a dictionary.
* SectionClassifier is a dictionary: keys are the chapter names and the values are Section objects.
* a Section object is defined by
* a name (the key of the SectionClassifiers appears again in sec_name)
* level (0 if it is top level sections: chapters, and so on)
* a list of exercises beloging to the section and
* a dictionary of subsections (again Section objects)
* Section = (sec_name, level, [list of exercises names], dict( subsections ) )
EXAMPLES:
Test with:
::
sage -t csection.py
Create or edit a database:
::
sage: from megua.megbook import MegBook
sage: meg = MegBook(r'_input/csection.sqlite')
Save a new or changed exercise
::
sage: txt=r'''
....: %Summary Primitives; Imediate primitives; Trigonometric
....:
....: Here, is a summary.
....:
....: %Problem Some Name
....: What is the primitive of $a x + b@()$ ?
....:
....: %Answer
....: The answer is $prim+C$, for $C in \mathbb{R}$.
....:
....: class E28E28_pimtrig_001(ExerciseBase):
....: pass
....: '''
sage: meg.save(txt)
-------------------------------
Instance of: E28E28_pimtrig_001
-------------------------------
==> Summary:
Here, is a summary.
==> Problem instance
What is the primitive of $a x + b$ ?
==> Answer instance
The answer is $prim+C$, for $C in \mathbb{R}$.
sage: txt=r'''
....: %Summary Primitives; Imediate primitives; Trigonometric
....:
....: Here, is a summary.
....:
....: %Problem Some Name2
....: What is the primitive of $a x + b@()$ ?
....:
....: %Answer
....: The answer is $prim+C$, for $C in \mathbb{R}$.
....:
....: class E28E28_pimtrig_002(ExerciseBase):
....: pass
....: '''
sage: meg.save(txt)
-------------------------------
Instance of: E28E28_pimtrig_002
-------------------------------
==> Summary:
Here, is a summary.
==> Problem instance
What is the primitive of $a x + b$ ?
==> Answer instance
The answer is $prim+C$, for $C in \mathbb{R}$.
sage: txt=r'''
....: %Summary Primitives; Imediate primitives; Polynomial
....:
....: Here, is a summary.
....:
....: %Problem Some Problem 1
....: What is the primitive of $a x + b@()$ ?
....:
....: %Answer
....: The answer is $prim+C$, for $C in \mathbb{R}$.
....:
....: class E28E28_pdirect_001(ExerciseBase):
....: pass
....: '''
sage: meg.save(txt)
-------------------------------
Instance of: E28E28_pdirect_001
-------------------------------
==> Summary:
Here, is a summary.
==> Problem instance
What is the primitive of $a x + b$ ?
==> Answer instance
The answer is $prim+C$, for $C in \mathbb{R}$.
sage: txt=r'''
....: %Summary
....:
....: Here, is a summary.
....:
....: %Problem
....: What is the primitive of $a x + b@()$ ?
....:
....: %Answer
....: The answer is $prim+C$, for $C in \mathbb{R}$.
....:
....: class E28E28_pdirect_003(ExerciseBase):
....: pass
....: '''
sage: meg.save(txt)
Each exercise can belong to a section/subsection/subsubsection.
Write sections using ';' in the '%summary' line. For ex., '%summary Section; Subsection; Subsubsection'.
<BLANKLINE>
Each problem can have a suggestive name.
Write in the '%problem' line a name, for ex., '%problem The Fish Problem'.
<BLANKLINE>
Check exercise E28E28_pdirect_003 for the above warnings.
-------------------------------
Instance of: E28E28_pdirect_003
-------------------------------
==> Summary:
Here, is a summary.
==> Problem instance
What is the primitive of $a x + b$ ?
==> Answer instance
The answer is $prim+C$, for $C in \mathbb{R}$.
Travel down the tree sections:
::
sage: s = SectionClassifier(meg.megbook_store)
sage: s.textprint()
Primitives
Imediate primitives
Polynomial
> E28E28_pdirect_001
Trigonometric
> E28E28_pimtrig_001
> E28E28_pimtrig_002
E28E28_pdirect
> E28E28_pdirect_003
Testing a recursive iterator:
::
sage: meg = MegBook("_input/paula.sqlite")
sage: s = SectionClassifier(meg.megbook_store)
sage: for section in s.section_iterator():
....: print section
<|fim▁hole|>"""
#*****************************************************************************
# Copyright (C) 2011,2016 Pedro Cruz <PedroCruz@ua.pt>
#
# Distributed under the terms of the GNU General Public License (GPL)
# http://www.gnu.org/licenses/
#*****************************************************************************
#PYHTON modules
import collections
#MEGUA modules
from megua.localstore import ExIter
class SectionClassifier:
"""
"""
def __init__(self,megbook_store,max_level=4,debug=False,exerset=None):
#save megstore reference
self.megbook_store = megbook_store
self.max_level = max_level
#Exercise set or none for all
self.exercise_set = exerset
#dictionary of sections
self.contents = dict()
self.classify()
def classify(self):
"""
Classify by sections.
"""
for row in ExIter(self.megbook_store):
if self.exercise_set and not row['unique_name'] in self.exercise_set:
continue
#get a list in form ["section", "subsection", "subsubsection", ...]
sec_list = str_to_list(row['sections_text'])
if sec_list == [] or sec_list == [u'']:
sec_list = [ first_part(row['unique_name']) ]
#sec_list contain at least one element.
if not sec_list[0] in self.contents:
self.contents[sec_list[0]] = Section(sec_list[0])
#sec_list contains less than `max_level` levels
subsec_list = sec_list[1:self.max_level]
self.contents[sec_list[0]].add(row['unique_name'],subsec_list)
def textprint(self):
"""
Textual print of all the contents.
"""
for c in self.contents:
self.contents[c].textprint()
def section_iterator(self):
r"""
OUTPUT:
- an iterator yielding (secname, sorted exercises)
"""
# A stack-based alternative to the traverse_tree method above.
od_top = collections.OrderedDict(sorted(self.contents.items()))
stack = []
for secname,section in od_top.iteritems():
stack.append(section)
while stack:
section_top = stack.pop(0) #remove left element
yield section_top
od_sub = collections.OrderedDict(sorted(section_top.subsections.items()))
desc = []
for secname,section in od_sub.iteritems():
desc.append(section)
stack[:0] = desc #add elemnts from desc list at left (":0")
class Section:
r"""
Section = (sec_name, level, [list of exercises names], dict( subsections ) )
"""
def __init__(self,sec_name,level=0):
self.sec_name = sec_name
self.level = level
#Exercises of this section (self).
self.exercises = []
#This section (self) can have subsections.
self.subsections = dict()
def __str__(self):
return self.level*" " + self.sec_name.encode("utf8") + " has " + str(len(self.exercises))
def __repr__(self):
return self.level*" " + self.sec_name.encode("utf8") + " has " + str(len(self.exercises))
def add(self,exname,sections):
r"""
Recursive function to add an exercise to """
if sections == []:
self.exercises.append(exname)
self.exercises.sort()
return
if not sections[0] in self.subsections:
self.subsections[sections[0]] = Section(sections[0],self.level+1)
self.subsections[sections[0]].add(exname,sections[1:])
def textprint(self):
"""
Textual print of the contents of this section and, recursivly, of the subsections.
"""
sp = " "*self.level
print sp + self.sec_name
for e in self.exercises:
print sp+r"> "+e
for sub in self.subsections:
self.subsections[sub].textprint()
def str_to_list(s):
"""
Convert::
'section description; subsection description; subsubsection description'
into::
[ 'section description', 'subsection description', 'subsubsection description']
"""
sl = s.split(';')
for i in range(len(sl)):
sl[i] = sl[i].strip()
return sl
def first_part(s):
"""
Usually exercise are named like `E12X34_name_001` and this routine extracts `E12X34` or `top` if no underscore is present.
"""
p = s.find("_")
p = s.find("_",p+1)
if p!=-1:
s = s[:p]
if s=='':
s = 'top'
return s<|fim▁end|> | |
<|file_name|>workerpool.go<|end_file_name|><|fim▁begin|>package fasthttp
import (
"net"
"runtime"
"runtime/debug"
"strings"
"sync"
"time"
)
// workerPool serves incoming connections via a pool of workers
// in FILO order, i.e. the most recently stopped worker will serve the next
// incoming connection.
//
// Such a scheme keeps CPU caches hot (in theory).
type workerPool struct {
// Function for serving server connections.
// It must leave c unclosed.
WorkerFunc func(c net.Conn) error
MaxWorkersCount int
LogAllErrors bool
Logger Logger
lock sync.Mutex
workersCount int
mustStop bool
ready []*workerChan
stopCh chan struct{}
}
type workerChan struct {
t time.Time
ch chan net.Conn
}
func (wp *workerPool) Start() {
if wp.stopCh != nil {<|fim▁hole|> stopCh := wp.stopCh
go func() {
for {
select {
case <-stopCh:
return
default:
time.Sleep(10 * time.Second)
}
wp.clean()
}
}()
}
func (wp *workerPool) Stop() {
if wp.stopCh == nil {
panic("BUG: workerPool wasn't started")
}
close(wp.stopCh)
wp.stopCh = nil
// Stop all the workers waiting for incoming connections.
// Do not wait for busy workers - they will stop after
// serving the connection and noticing wp.mustStop = true.
wp.lock.Lock()
for _, ch := range wp.ready {
ch.ch <- nil
}
wp.ready = nil
wp.mustStop = true
wp.lock.Unlock()
}
const maxIdleWorkerDuration = 10 * time.Second
func (wp *workerPool) clean() {
// Clean least recently used workers if they didn't serve connections
// for more than maxIdleWorkerDuration.
wp.lock.Lock()
ready := wp.ready
for len(ready) > 1 && time.Since(ready[0].t) > maxIdleWorkerDuration {
// notify the worker to stop.
ready[0].ch <- nil
ready = ready[1:]
wp.workersCount--
}
if len(ready) < len(wp.ready) {
copy(wp.ready, ready)
for i := len(ready); i < len(wp.ready); i++ {
wp.ready[i] = nil
}
wp.ready = wp.ready[:len(ready)]
}
wp.lock.Unlock()
}
func (wp *workerPool) Serve(c net.Conn) bool {
ch := wp.getCh()
if ch == nil {
return false
}
ch.ch <- c
return true
}
var workerChanCap = func() int {
// Use blocking workerChan if GOMAXPROCS=1.
// This immediately switches Serve to WorkerFunc, which results
// in higher performance (under go1.5 at least).
if runtime.GOMAXPROCS(0) == 1 {
return 0
}
// Use non-blocking workerChan if GOMAXPROCS>1,
// since otherwise the Serve caller (Acceptor) may lag accepting
// new connections if WorkerFunc is CPU-bound.
return 1
}()
func (wp *workerPool) getCh() *workerChan {
var ch *workerChan
createWorker := false
wp.lock.Lock()
ready := wp.ready
n := len(ready) - 1
if n < 0 {
if wp.workersCount < wp.MaxWorkersCount {
createWorker = true
wp.workersCount++
}
} else {
ch = ready[n]
wp.ready = ready[:n]
}
wp.lock.Unlock()
if ch == nil {
if !createWorker {
return nil
}
vch := workerChanPool.Get()
if vch == nil {
vch = &workerChan{
ch: make(chan net.Conn, workerChanCap),
}
}
ch = vch.(*workerChan)
go func() {
wp.workerFunc(ch)
workerChanPool.Put(vch)
}()
}
return ch
}
func (wp *workerPool) release(ch *workerChan) bool {
ch.t = time.Now()
wp.lock.Lock()
if wp.mustStop {
wp.lock.Unlock()
return false
}
wp.ready = append(wp.ready, ch)
wp.lock.Unlock()
return true
}
var workerChanPool sync.Pool
func (wp *workerPool) workerFunc(ch *workerChan) {
var c net.Conn
var err error
defer func() {
if r := recover(); r != nil {
wp.Logger.Printf("panic: %s\nStack trace:\n%s", r, debug.Stack())
}
if c != nil {
c.Close()
wp.release(ch)
}
}()
for c = range ch.ch {
if c == nil {
break
}
if err = wp.WorkerFunc(c); err != nil && err != errHijacked {
errStr := err.Error()
if wp.LogAllErrors || !(strings.Contains(errStr, "broken pipe") ||
strings.Contains(errStr, "reset by peer") ||
strings.Contains(errStr, "i/o timeout")) {
wp.Logger.Printf("error when serving connection %q<->%q: %s", c.LocalAddr(), c.RemoteAddr(), err)
}
}
if err != errHijacked {
c.Close()
}
c = nil
if !wp.release(ch) {
break
}
}
}<|fim▁end|> | panic("BUG: workerPool already started")
}
wp.stopCh = make(chan struct{}) |
<|file_name|>3_2_HelloWorld.py<|end_file_name|><|fim▁begin|>#%%
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from tensorflow.examples.tutorials.mnist import input_data<|fim▁hole|>
print(mnist.train.images.shape, mnist.train.labels.shape)
print(mnist.test.images.shape, mnist.test.labels.shape)
print(mnist.validation.images.shape, mnist.validation.labels.shape)
import tensorflow as tf
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, W) + b)
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
tf.global_variables_initializer().run()
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
train_step.run({x: batch_xs, y_: batch_ys})
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(accuracy.eval({x: mnist.test.images, y_: mnist.test.labels}))<|fim▁end|> | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) |
<|file_name|>supplier.js<|end_file_name|><|fim▁begin|>$(document).ready(function () {
//selecting supplier
// $('body').off('click','supplier_id.select_popup').on('click','supplier_id.select_popup' ,function () {
// void window.open('select.php?class_name=supplier', '_blank',
// 'width=1200,height=1000,TOOLBAR=no,MENUBAR=no,SCROLLBARS=yes,RESIZABLE=yes,LOCATION=no,DIRECTORIES=no,STATUS=no');
// return false;
// });
//Popup for selecting address
$('body').off('click','.address_popup').on('click','.address_popup',function (e) {
e.preventDefault();
var rowClass = $(this).closest('div').prop('class');
localStorage.setItem("addressPopupDivClass", rowClass);
void window.open('form.php?class_name=address&mode=9&window_type=popup', '_blank',
'width=1200,height=1000,TOOLBAR=no,MENUBAR=no,SCROLLBARS=yes,RESIZABLE=yes,LOCATION=no,DIRECTORIES=no,STATUS=no');
return false;
});
$("#supplier_site_name").on("change", function () {<|fim▁hole|> $(".show.supplier_site_id").hide();
$("#supplier_site_id").val("");
$("#supplier_site_number").val("");
}
}
});
});<|fim▁end|> | if ($(this).val() == 'newentry') {
if (confirm("Do you want to create a new supplier site?")) {
$(this).replaceWith('<input id="supplier_site_name" class="textfield supplier_site_name" type="text" size="25" maxlength="50" name="supplier_site_name[]">'); |
<|file_name|>impl_gcd.py<|end_file_name|><|fim▁begin|>__author__ = 'Nishanth'
from juliabox.cloud import JBPluginCloud
from juliabox.jbox_util import JBoxCfg, retry_on_errors
from googleapiclient.discovery import build
from oauth2client.client import GoogleCredentials
import threading
class JBoxGCD(JBPluginCloud):
provides = [JBPluginCloud.JBP_DNS, JBPluginCloud.JBP_DNS_GCD]
threadlocal = threading.local()
INSTALLID = None
REGION = None
DOMAIN = None
<|fim▁hole|> cloud_host = JBoxCfg.get('cloud_host')
JBoxGCD.INSTALLID = cloud_host['install_id']
JBoxGCD.REGION = cloud_host['region']
JBoxGCD.DOMAIN = cloud_host['domain']
@staticmethod
def domain():
if JBoxGCD.DOMAIN is None:
JBoxGCD.configure()
return JBoxGCD.DOMAIN
@staticmethod
def connect():
c = getattr(JBoxGCD.threadlocal, 'conn', None)
if c is None:
JBoxGCD.configure()
creds = GoogleCredentials.get_application_default()
JBoxGCD.threadlocal.conn = c = build("dns", "v1", credentials=creds)
return c
@staticmethod
@retry_on_errors(retries=2)
def add_cname(name, value):
JBoxGCD.connect().changes().create(
project=JBoxGCD.INSTALLID, managedZone=JBoxGCD.REGION,
body={'kind': 'dns#change',
'additions': [
{'rrdatas': [value],
'kind': 'dns#resourceRecordSet',
'type': 'A',
'name': name,
'ttl': 300} ] }).execute()
@staticmethod
@retry_on_errors(retries=2)
def delete_cname(name):
resp = JBoxGCD.connect().resourceRecordSets().list(
project=JBoxGCD.INSTALLID, managedZone=JBoxGCD.REGION,
name=name, type='A').execute()
if len(resp['rrsets']) == 0:
JBoxGCD.log_debug('No prior dns registration found for %s', name)
else:
cname = resp['rrsets'][0]['rrdatas'][0]
ttl = resp['rrsets'][0]['ttl']
JBoxGCD.connect().changes().create(
project=JBoxGCD.INSTALLID, managedZone=JBoxGCD.REGION,
body={'kind': 'dns#change',
'deletions': [
{'rrdatas': [str(cname)],
'kind': 'dns#resourceRecordSet',
'type': 'A',
'name': name,
'ttl': ttl} ] }).execute()
JBoxGCD.log_warn('Prior dns registration was found for %s', name)<|fim▁end|> | @staticmethod
def configure(): |
<|file_name|>test_isotime.py<|end_file_name|><|fim▁begin|># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import unittest
from st2common.util import isotime
class TestTimeUtil(unittest.TestCase):
def test_add_utc_tz_info(self):
dt = datetime.datetime.utcnow()
self.assertIsNone(dt.tzinfo)
dt = isotime.add_utc_tz(dt)<|fim▁hole|> self.assertIsNotNone(dt.tzinfo)
self.assertEqual(dt.tzinfo.tzname(None), 'UTC')
def test_validate(self):
self.assertTrue(isotime.validate('2000-01-01 12:00:00Z'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00+00'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00+0000'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00+00:00'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000Z'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000+00'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000+0000'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000+00:00'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00Z'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00.000000Z'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00+00:00'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00.000000+00:00'))
self.assertTrue(isotime.validate('2015-02-10T21:21:53.399Z'))
self.assertFalse(isotime.validate('2000-01-01', raise_exception=False))
self.assertFalse(isotime.validate('2000-01-01T12:00:00', raise_exception=False))
self.assertFalse(isotime.validate('2000-01-01T12:00:00+00:00Z', raise_exception=False))
self.assertFalse(isotime.validate('2000-01-01T12:00:00.000000', raise_exception=False))
self.assertFalse(isotime.validate('Epic!', raise_exception=False))
self.assertFalse(isotime.validate(object(), raise_exception=False))
self.assertRaises(ValueError, isotime.validate, 'Epic!', True)
def test_parse(self):
dt = isotime.add_utc_tz(datetime.datetime(2000, 1, 1, 12))
self.assertEqual(isotime.parse('2000-01-01 12:00:00Z'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00+00'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00+0000'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000Z'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+00'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+0000'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00Z'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00.000000Z'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00.000000+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00.000Z'), dt)
def test_format(self):
dt = isotime.add_utc_tz(datetime.datetime(2000, 1, 1, 12))
dt_str_usec_offset = '2000-01-01T12:00:00.000000+00:00'
dt_str_usec = '2000-01-01T12:00:00.000000Z'
dt_str_offset = '2000-01-01T12:00:00+00:00'
dt_str = '2000-01-01T12:00:00Z'
dt_unicode = u'2000-01-01T12:00:00Z'
self.assertEqual(isotime.format(dt, usec=True, offset=True), dt_str_usec_offset)
self.assertEqual(isotime.format(dt, usec=True, offset=False), dt_str_usec)
self.assertEqual(isotime.format(dt, usec=False, offset=True), dt_str_offset)
self.assertEqual(isotime.format(dt, usec=False, offset=False), dt_str)
self.assertEqual(isotime.format(dt_str, usec=False, offset=False), dt_str)
self.assertEqual(isotime.format(dt_unicode, usec=False, offset=False), dt_unicode)
def test_format_tz_naive(self):
dt1 = datetime.datetime.utcnow()
dt2 = isotime.parse(isotime.format(dt1, usec=True))
self.assertEqual(dt2, isotime.add_utc_tz(dt1))
def test_format_tz_aware(self):
dt1 = isotime.add_utc_tz(datetime.datetime.utcnow())
dt2 = isotime.parse(isotime.format(dt1, usec=True))
self.assertEqual(dt2, dt1)
def test_format_sec_truncated(self):
dt1 = isotime.add_utc_tz(datetime.datetime.utcnow())
dt2 = isotime.parse(isotime.format(dt1, usec=False))
dt3 = datetime.datetime(dt1.year, dt1.month, dt1.day, dt1.hour, dt1.minute, dt1.second)
self.assertLess(dt2, dt1)
self.assertEqual(dt2, isotime.add_utc_tz(dt3))<|fim▁end|> | |
<|file_name|>world_script_loader.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "World.h"
// This is where scripts' loading functions should be declared:
// world
void AddSC_areatrigger_scripts();
void AddSC_emerald_dragons();
void AddSC_generic_creature();
void AddSC_go_scripts();
void AddSC_guards();
void AddSC_item_scripts();
void AddSC_npc_professions();
void AddSC_npc_innkeeper();
void AddSC_npcs_special();
void AddSC_achievement_scripts();
void AddSC_action_ip_logger();
void AddSC_scene_scripts();
// player
void AddSC_chat_log();
void AddSC_duel_reset();
// The name of this function should match:
// void Add${NameOfDirectory}Scripts()
void AddWorldScripts()
{
AddSC_areatrigger_scripts();
AddSC_emerald_dragons();
AddSC_generic_creature();
AddSC_go_scripts();
AddSC_guards();
AddSC_item_scripts();
AddSC_npc_professions();
AddSC_npc_innkeeper();
AddSC_npcs_special();
AddSC_achievement_scripts();
AddSC_chat_log(); // location: scripts\World\chat_log.cpp
AddSC_scene_scripts();
// FIXME: This should be moved in a script validation hook.
// To avoid duplicate code, we check once /*ONLY*/ if logging is permitted or not.
if (sWorld->getBoolConfig(CONFIG_IP_BASED_ACTION_LOGGING))<|fim▁hole|><|fim▁end|> | AddSC_action_ip_logger(); // location: scripts\World\action_ip_logger.cpp
AddSC_duel_reset();
} |
<|file_name|>test_compressing_file_reader.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests for swift.common.compressing_file_reader """
import unittest
import cStringIO
from slogging.compressing_file_reader import CompressingFileReader
class TestCompressingFileReader(unittest.TestCase):
def test_read(self):
plain = 'obj\ndata'
s = cStringIO.StringIO(plain)
expected = '\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff\xcaO\xca\xe2JI,'\
'I\x04\x00\x00\x00\xff\xff\x03\x00P(\xa8\x1f\x08\x00\x00'\
'\x00'
x = CompressingFileReader(s)
compressed = ''.join(iter(lambda: x.read(), ''))
self.assertEquals(compressed, expected)
self.assertEquals(x.read(), '')<|fim▁end|> | # Unless required by applicable law or agreed to in writing, software |
<|file_name|>_util.py<|end_file_name|><|fim▁begin|>import matplotlib.transforms
import numpy
<|fim▁hole|>
def get_legend_text(obj):
"""Check if line is in legend."""
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [h.get_label() for h in leg.legendHandles if h is not None]
values = [t.get_text() for t in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None
def transform_to_data_coordinates(obj, xdata, ydata):
"""The coordinates might not be in data coordinates, but could be sometimes in axes
coordinates. For example, the matplotlib command
axes.axvline(2)
will have the y coordinates set to 0 and 1, not to the limits. Therefore, a
two-stage transform has to be applied:
1. first transforming to display coordinates, then
2. from display to data.
"""
if obj.axes is not None and obj.get_transform() != obj.axes.transData:
points = numpy.array([xdata, ydata]).T
transform = matplotlib.transforms.composite_transform_factory(
obj.get_transform(), obj.axes.transData.inverted()
)
return transform.transform(points).T
return xdata, ydata<|fim▁end|> | def has_legend(axes):
return axes.get_legend() is not None
|
<|file_name|>Badge.tsx<|end_file_name|><|fim▁begin|>import React, {forwardRef} from 'react'
import cn from 'classnames'
import {Text} from '../Typography/Text'
import type {HTMLAttributes, PropsWithRef} from 'react'
import type {OmuiColors} from '@/styles/colorType'
export type BadgeVariantProp = 'gray' | 'primary' | 'tertiary' | 'quaternary' | 'danger' | 'success'
export type BadgeTypeProp = 'outline' | 'subtle' | 'solid'
interface Props extends HTMLAttributes<HTMLDivElement> {
/**
* The variant of the badge.
* @defaultValue primary
*/
variant: BadgeVariantProp
/**
* The type of the badge.
* @defaultValue outline
*/
type: BadgeTypeProp
/**
* Indicates if the inner text should be truncated.
* @defaultValue outline
*/
truncate?: boolean
}
export type BadgeProps = PropsWithRef<Props>
const VARIANT_COLORS: Record<BadgeVariantProp, OmuiColors> = {
gray: 'gray',
primary: 'primary',
tertiary: 'violet',
quaternary: 'blue',
danger: 'error',
success: 'success'
} as const
type Badges = {
[K in BadgeVariantProp]: {
[O in BadgeTypeProp]: string | string[]
}
}
const badges: Badges = Object.assign(
{},
...(Object.keys(VARIANT_COLORS) as Array<BadgeVariantProp>).map(variant => {
const color = VARIANT_COLORS[variant]
return {
[variant]: {
outline: [
'py-px border',
`border-${color}-500 text-${color}-600 dark:border-${color}-200 dark:text-${color}-200`
],
subtle: ['py-0.5', `bg-${color}-100 text-${color}-600`],
solid: ['py-0.5', color === 'gray' ? `text-primary-200 bg-gray-800` : `text-white bg-${color}-500`]
}
}
})
)
const defaultClass = 'inline-block h-5.5 px-1.5 rounded-sm leading-normal'
const Badge = forwardRef<HTMLDivElement, Props>(function Badge(
{className, children, variant = 'primary', type = 'outline', truncate = false, ...props},
ref
) {
const classes = cn(defaultClass, badges[variant]?.[type], truncate && 'truncate', className)
return (
<div className={classes} ref={ref} {...props}>
<Text bold size="xs" as="p" className={cn(truncate && 'truncate')}>
{children}
</Text>
</div>
)
})<|fim▁hole|><|fim▁end|> |
export {Badge} |
<|file_name|>quest.ts<|end_file_name|><|fim▁begin|>import Convolvr from "../../model/world";
import Component from "../../model/component";
export default class QuestSystem {
private world: Convolvr;
constructor (world: Convolvr) {
this.world = world
}
init(component: Component) {
let attr = component.attrs.quest,
state: any = {}
//TODO: implement
return state<|fim▁hole|><|fim▁end|> |
}
} |
<|file_name|>context_managers.py<|end_file_name|><|fim▁begin|>"""useful context managers"""
from contextlib import suppress
with suppress(ModuleNotFoundError):
from lag import *
import os
import contextlib
def clog(*args, condition=True, log_func=print, **kwargs):
if condition:
return log_func(*args, **kwargs)
@contextlib.contextmanager
def cd(newdir, verbose=True):<|fim▁hole|> """Change your working directory, do stuff, and change back to the original"""
_clog = partial(clog, condition=verbose, log_func=print)
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
_clog(f'cd {newdir}')
yield
finally:
_clog(f'cd {prevdir}')
os.chdir(prevdir)
# from pathlib import Path
# _clog("Called before cd", Path().absolute())
# with cd(Path.home()):
# if verbose: print("Called under cd", Path().absolute())
# _clog("Called after cd and same as before", Path().absolute())<|fim▁end|> | |
<|file_name|>timeline.js<|end_file_name|><|fim▁begin|>var events = {};
function showEvent(e) {
eid = e.getAttribute('data-event-id');
fid = e.getAttribute('data-frame-id');
var url = '?view=event&eid='+eid+'&fid='+fid;
url += filterQuery;
window.location.href = url;
//video element is blocking video elements elsewhere in chrome possible interaction with mouseover event?
//FIXME unless an exact cause can be determined should store all video controls and do something to the other controls when we want to load a new video seek etc or whatever may block
/*var vid= $('preview');
vid.oncanplay=null;
// vid.currentTime=vid.currentTime-0.1;
vid.pause();*/
}
function createEventHtml(zm_event, frame) {
var eventHtml = new Element('div');
if ( zm_event.Archived > 0 ) {
eventHtml.addClass('archived');
}
new Element('p').inject(eventHtml).set('text', monitors[zm_event.MonitorId].Name);
new Element('p').inject(eventHtml).set('text', zm_event.Name+(frame?('('+frame.FrameId+')'):''));
new Element('p').inject(eventHtml).set('text', zm_event.StartTime+' - '+zm_event.Length+'s');
new Element('p').inject(eventHtml).set('text', zm_event.Cause);
if ( event.Notes ) {
new Element('p').inject(eventHtml).set('text', event.Notes);
}
if ( event.Archived > 0 ) {
new Element('p').inject(eventHtml).set( 'text', archivedString);
}
return eventHtml;
}
function showEventDetail( eventHtml ) {
$('instruction').addClass( 'hidden' );
$('eventData').empty();
$('eventData').adopt( eventHtml );
$('eventData').removeClass( 'hidden' );
}
function eventDataResponse( respObj, respText ) {
var zm_event = respObj.event;
if ( !zm_event ) {
console.log('Null event');
return;
}
events[zm_event.Id] = zm_event;
if ( respObj.loopback ) {
requestFrameData(zm_event.Id, respObj.loopback);
}
}
function frameDataResponse( respObj, respText ) {
var frame = respObj.frameimage;
if ( !frame.FrameId ) {
console.log('Null frame');
return;
}
var zm_event = events[frame.EventId];
if ( !zm_event ) {
console.error('No event '+frame.eventId+' found');
return;
}
if ( !zm_event['frames'] ) {
console.log("No frames data in event response");
console.log(zm_event);
console.log(respObj);
zm_event['frames'] = {};
}
zm_event['frames'][frame.FrameId] = frame;
zm_event['frames'][frame.FrameId]['html'] = createEventHtml( zm_event, frame );
showEventData(frame.EventId, frame.FrameId);
}
function showEventData(eventId, frameId) {
if ( events[eventId] ) {
var zm_event = events[eventId];
if ( zm_event['frames'] ) {
if ( zm_event['frames'][frameId] ) {
showEventDetail( zm_event['frames'][frameId]['html'] );
var imagePath = 'index.php?view=image&eid='+eventId+'&fid='+frameId;
var videoName = zm_event.DefaultVideo;
loadEventImage( imagePath, eventId, frameId, zm_event.Width, zm_event.Height, zm_event.Frames/zm_event.Length, videoName, zm_event.Length, zm_event.StartTime, monitors[zm_event.MonitorId]);
return;
} else {
console.log('No frames for ' + frameId);
}
} else {
console.log('No frames');
}
} else {
console.log('No event for ' + eventId);
}
}
var eventQuery = new Request.JSON({
url: thisUrl,
method: 'get',
timeout: AJAX_TIMEOUT,
link: 'cancel',
onSuccess: eventDataResponse
});
var frameQuery = new Request.JSON({
url: thisUrl,
method: 'get',
timeout: AJAX_TIMEOUT,
link: 'cancel',
onSuccess: frameDataResponse
});
function requestFrameData( eventId, frameId ) {
if ( !events[eventId] ) {
eventQuery.options.data = "view=request&request=status&entity=event&id="+eventId+"&loopback="+frameId;
eventQuery.send();
} else {
frameQuery.options.data = "view=request&request=status&entity=frameimage&id[0]="+eventId+"&id[1]="+frameId;
frameQuery.send();
}
}
function previewEvent(slot) {
eventId = slot.getAttribute('data-event-id');
frameId = slot.getAttribute('data-frame-id');
if ( events[eventId] ) {
showEventData(eventId, frameId);
} else {
requestFrameData(eventId, frameId);
}
}
function loadEventImage( imagePath, eid, fid, width, height, fps, videoName, duration, startTime, Monitor ) {
var vid = $('preview');
var imageSrc = $('imageSrc');
if ( videoName && vid ) {
vid.show();
imageSrc.hide();
var newsource=imagePath.slice(0, imagePath.lastIndexOf('/'))+'/'+videoName;
//console.log(newsource);
//console.log(sources[0].src.slice(-newsource.length));
if ( newsource != vid.currentSrc.slice(-newsource.length) || vid.readyState == 0 ) {
//console.log("loading new");
//it is possible to set a long source list here will that be unworkable?
var sources = vid.getElementsByTagName('source');
sources[0].src = newsource;
var tracks = vid.getElementsByTagName('track');
if (tracks.length) {
tracks[0].parentNode.removeChild(tracks[0]);
}
vid.load();
addVideoTimingTrack(vid, Monitor.LabelFormat, Monitor.Name, duration, startTime);
vid.currentTime = fid/fps;
} else {
if ( ! vid.seeking ) {
vid.currentTime=fid/fps;
}
}
} else {
if ( vid ) vid.hide();
imageSrc.show();
imageSrc.setProperty('src', imagePath);
imageSrc.setAttribute('data-event-id', eid);
imageSrc.setAttribute('data-frame-id', fid);
imageSrc.onclick=window['showEvent'].bind(imageSrc, imageSrc);
}
var eventData = $('eventData');<|fim▁hole|>
function tlZoomBounds( minTime, maxTime ) {
location.replace('?view='+currentView+filterQuery+'&minTime='+minTime+'&maxTime='+maxTime);
}
function tlZoomOut() {
location.replace('?view='+currentView+filterQuery+'&midTime='+midTime+'&range='+zoom_range);
}
function tlPanLeft() {
location.replace('?view='+currentView+filterQuery+'&midTime='+minTime+'&range='+range);
}
function tlPanRight() {
location.replace('?view='+currentView+filterQuery+'&midTime='+maxTime+'&range='+range);
}
window.addEventListener("DOMContentLoaded", function() {
document.querySelectorAll("div.event").forEach(function(el) {
el.onclick = window[el.getAttribute('data-on-click-this')].bind(el, el);
el.onmouseover = window[el.getAttribute('data-on-mouseover-this')].bind(el, el);
});
document.querySelectorAll("div.activity").forEach(function(el) {
el.onclick = window[el.getAttribute('data-on-click-this')].bind(el, el);
el.onmouseover = window[el.getAttribute('data-on-mouseover-this')].bind(el, el);
});
});<|fim▁end|> | eventData.removeEvent('click');
eventData.addEvent('click', showEvent.pass());
} |
<|file_name|>version.go<|end_file_name|><|fim▁begin|>package network
import "github.com/Azure/azure-sdk-for-go/version"
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.<|fim▁hole|>// UserAgent returns the UserAgent string to use when sending http.Requests.
func UserAgent() string {
return "Azure-SDK-For-Go/" + Version() + " network/2020-11-01"
}
// Version returns the semantic version (see http://semver.org) of the client.
func Version() string {
return version.Number
}<|fim▁end|> | // Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
<|file_name|>hash_check.py<|end_file_name|><|fim▁begin|>from flask_bcrypt import generate_password_hash
<|fim▁hole|># 0.25 and 0.5 seconds to run.
generate_password_hash('password1', 8)<|fim▁end|> |
# Change the number of rounds (second argument) until it takes between
|
<|file_name|>init.go<|end_file_name|><|fim▁begin|>package command
import (
"fmt"
"io/ioutil"
"os"
"strings"
)
const (
// DefaultInitName is the default name we use when
// initializing the example file
DefaultInitName = "example.nomad"
)
// InitCommand generates a new job template that you can customize to your
// liking, like vagrant init
type InitCommand struct {
Meta
}
func (c *InitCommand) Help() string {
helpText := `
Usage: nomad init
Creates an example job file that can be used as a starting
point to customize further.
`
return strings.TrimSpace(helpText)
}
func (c *InitCommand) Synopsis() string {
return "Create an example job file"<|fim▁hole|>
func (c *InitCommand) Run(args []string) int {
// Check if the file already exists
_, err := os.Stat(DefaultInitName)
if err == nil || !os.IsNotExist(err) {
c.Ui.Error(fmt.Sprintf("Job '%s' already exists", DefaultInitName))
return 1
} else if !os.IsNotExist(err) {
c.Ui.Error(fmt.Sprintf("Failed to stat '%s': %v", DefaultInitName, err))
return 1
}
// Write out the example
err = ioutil.WriteFile(DefaultInitName, []byte(defaultJob), 0660)
if err != nil {
c.Ui.Error(fmt.Sprintf("Failed to write '%s': %v", DefaultInitName, err))
return 1
}
// Success
c.Ui.Output(fmt.Sprintf("Example job file written to %s", DefaultInitName))
return 0
}
const defaultJob = `
# There can only be a single job definition per file.
# Create a job with ID and Name 'example'
job "example" {
# Run the job in the global region, which is the default.
# region = "global"
# Specify the datacenters within the region this job can run in.
datacenters = ["dc1"]
# Service type jobs optimize for long-lived services. This is
# the default but we can change to batch for short-lived tasks.
# type = "service"
# Priority controls our access to resources and scheduling priority.
# This can be 1 to 100, inclusively, and defaults to 50.
# priority = 50
# Restrict our job to only linux. We can specify multiple
# constraints as needed.
constraint {
attribute = "$attr.kernel.name"
value = "linux"
}
# Configure the job to do rolling updates
update {
# Stagger updates every 10 seconds
stagger = "10s"
# Update a single task at a time
max_parallel = 1
}
# Create a 'cache' group. Each task in the group will be
# scheduled onto the same machine.
group "cache" {
# Control the number of instances of this groups.
# Defaults to 1
# count = 1
# Define a task to run
task "redis" {
# Use Docker to run the task.
driver = "docker"
# Configure Docker driver with the image
config {
image = "redis:latest"
}
# We must specify the resources required for
# this task to ensure it runs on a machine with
# enough capacity.
resources {
cpu = 500 # 500 Mhz
memory = 256 # 256MB
network {
mbits = 10
dynamic_ports = ["redis"]
}
}
}
}
}
`<|fim▁end|> | } |
<|file_name|>dircolors.rs<|end_file_name|><|fim▁begin|>#![crate_name = "uu_dircolors"]
// This file is part of the uutils coreutils package.
//
// (c) Jian Zeng <anonymousknight96@gmail.com>
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
//
extern crate glob;
#[macro_use]
extern crate uucore;
use std::fs::File;
use std::io::{BufRead, BufReader, Write};
use std::borrow::Borrow;
use std::env;
static SYNTAX: &'static str = "[OPTION]... [FILE]";
static SUMMARY: &'static str = "Output commands to set the LS_COLORS environment variable.";
static LONG_HELP: &'static str = "
If FILE is specified, read it to determine which colors to use for which
file types and extensions. Otherwise, a precompiled database is used.
For details on the format of these files, run 'dircolors --print-database'
";
mod colors;
use colors::INTERNAL_DB;
#[derive(PartialEq, Debug)]
pub enum OutputFmt {
Shell,
CShell,
Unknown,
}
pub fn guess_syntax() -> OutputFmt {
use std::path::Path;
match env::var("SHELL") {
Ok(ref s) if !s.is_empty() => {
let shell_path: &Path = s.as_ref();
if let Some(name) = shell_path.file_name() {
if name == "csh" || name == "tcsh" {
OutputFmt::CShell
} else {
OutputFmt::Shell
}
} else {
OutputFmt::Shell
}
}
_ => OutputFmt::Unknown,
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)
.optflag("b", "sh", "output Bourne shell code to set LS_COLORS")
.optflag("",
"bourne-shell",
"output Bourne shell code to set LS_COLORS")
.optflag("c", "csh", "output C shell code to set LS_COLORS")
.optflag("", "c-shell", "output C shell code to set LS_COLORS")
.optflag("p", "print-database", "print the byte counts")
.parse(args);
if (matches.opt_present("csh") || matches.opt_present("c-shell") ||
matches.opt_present("sh") || matches.opt_present("bourne-shell")) &&
matches.opt_present("print-database") {
disp_err!("the options to output dircolors' internal database and\nto select a shell \
syntax are mutually exclusive");
return 1;
}
if matches.opt_present("print-database") {
if !matches.free.is_empty() {
disp_err!("extra operand ‘{}’\nfile operands cannot be combined with \
--print-database (-p)",
matches.free[0]);
return 1;
}
println!("{}", INTERNAL_DB);
return 0;
}
let mut out_format = OutputFmt::Unknown;
if matches.opt_present("csh") || matches.opt_present("c-shell") {
out_format = OutputFmt::CShell;
} else if matches.opt_present("sh") || matches.opt_present("bourne-shell") {
out_format = OutputFmt::Shell;
}
if out_format == OutputFmt::Unknown {
match guess_syntax() {
OutputFmt::Unknown => {
show_info!("no SHELL environment variable, and no shell type option given");
return 1;
}
fmt => out_format = fmt,
}
}
let result;
if matches.free.is_empty() {
result = parse(INTERNAL_DB.lines(), out_format, "")
} else {
if matches.free.len() > 1 {
disp_err!("extra operand ‘{}’", matches.free[1]);
return 1;
}
match File::open(matches.free[0].as_str()) {
Ok(f) => {
let fin = BufReader::new(f);
result = parse(fin.lines().filter_map(|l| l.ok()),
out_format,
matches.free[0].as_str())
}
Err(e) => {
show_info!("{}: {}", matches.free[0], e);
return 1;
}
}
}
match result {
Ok(s) => {
println!("{}", s);
0
}
Err(s) => {
show_info!("{}", s);
1
}
}
}
pub trait StrUtils {
/// Remove comments and trim whitespaces
fn purify(&self) -> &Self;
/// Like split_whitespace() but only produce 2 components
fn split_two(&self) -> (&str, &str);
fn fnmatch(&self, pattern: &str) -> bool;
}
impl StrUtils for str {
fn purify(&self) -> &Self {
let mut line = self;
for (n, c) in self.chars().enumerate() {
if c != '#' {
continue;
}
// Ignore if '#' is at the beginning of line
if n == 0 {
line = &self[..0];
break;
}
// Ignore the content after '#'
// only if it is preceded by at least one whitespace
if self.chars().nth(n - 1).unwrap().is_whitespace() {
line = &self[..n];
}
}
line.trim()
}
fn split_two(&self) -> (&str, &str) {
if let Some(b) = self.find(char::is_whitespace) {
let key = &self[..b];
if let Some(e) = self[b..].find(|c: char| !c.is_whitespace()) {
(key, &self[b + e..])
} else {
(key, "")
}
} else {
("", "")
}
}
fn fnmatch(&self, pat: &str) -> bool {
pat.parse::<glob::Pattern>().unwrap().matches(self)
}
}
#[derive(PartialEq)]
enum ParseState {
Global,
Matched,
Continue,
Pass,
}
use std::collections::HashMap;
fn parse<T>(lines: T, fmt: OutputFmt, fp: &str) -> Result<String, String>
where T: IntoIterator,
T::Item: Borrow<str>
{
// 1440 > $(dircolors | wc -m)
let mut result = String::with_capacity(1440);
match fmt {
OutputFmt::Shell => result.push_str("LS_COLORS='"),
OutputFmt::CShell => result.push_str("setenv LS_COLORS '"),
_ => unreachable!(),
}
let mut table: HashMap<&str, &str> = HashMap::with_capacity(48);
table.insert("normal", "no");
table.insert("norm", "no");
table.insert("file", "fi");
table.insert("reset", "rs");
table.insert("dir", "di");
table.insert("lnk", "ln");
table.insert("link", "ln");
table.insert("symlink", "ln");
table.insert("orphan", "or");
table.insert("missing", "mi");
table.insert("fifo", "pi");
table.insert("pipe", "pi");
table.insert("sock", "so");
table.insert("blk", "bd");
table.insert("block", "bd");
table.insert("chr", "cd");
table.insert("char", "cd");
table.insert("door", "do");
table.insert("exec", "ex");
table.insert("left", "lc");
table.insert("leftcode", "lc");
table.insert("right", "rc");
table.insert("rightcode", "rc");
table.insert("end", "ec");
table.insert("endcode", "ec");
table.insert("suid", "su");
table.insert("setuid", "su");
table.insert("sgid", "sg");
table.insert("setgid", "sg");
table.insert("sticky", "st");
table.insert("other_writable", "ow");
table.insert("owr", "ow");
table.insert("sticky_other_writable", "tw");
table.insert("owt", "tw");
table.insert("capability", "ca");
table.insert("multihardlink", "mh");
table.insert("clrtoeol", "cl");
let term = env::var("TERM").unwrap_or("none".to_owned());
let term = term.as_str();
let mut state = ParseState::Global;
for (num, line) in lines.into_iter().enumerate() {
let num = num + 1;
let line = line.borrow().purify();
if line.is_empty() {<|fim▁hole|>
let (key, val) = line.split_two();
if val.is_empty() {
return Err(format!("{}:{}: invalid line; missing second token", fp, num));
}
let lower = key.to_lowercase();
if lower == "term" {
if term.fnmatch(val) {
state = ParseState::Matched;
} else if state != ParseState::Matched {
state = ParseState::Pass;
}
} else {
if state == ParseState::Matched {
// prevent subsequent mismatched TERM from
// cancelling the input
state = ParseState::Continue;
}
if state != ParseState::Pass {
if key.starts_with(".") {
result.push_str(format!("*{}={}:", key, val).as_str());
} else if key.starts_with("*") {
result.push_str(format!("{}={}:", key, val).as_str());
} else if lower == "options" || lower == "color" || lower == "eightbit" {
// Slackware only. Ignore
} else {
if let Some(s) = table.get(lower.as_str()) {
result.push_str(format!("{}={}:", s, val).as_str());
} else {
return Err(format!("{}:{}: unrecognized keyword {}", fp, num, key));
}
}
}
}
}
match fmt {
OutputFmt::Shell => result.push_str("';\nexport LS_COLORS"),
OutputFmt::CShell => result.push('\''),
_ => unreachable!(),
}
Ok(result)
}<|fim▁end|> | continue;
} |
<|file_name|>ticTacToe.py<|end_file_name|><|fim▁begin|>import random
import os
# TicTacToe
def createNewField():
result = []
for i in range(3):
tmp = []
for i2 in range(3):
tmp.append(' ')
result.append(tmp)
return result
def printField(field):
print ''
for element in field:
print element
print ''
def isFieldFull(field):
occupiedPlaces = 0
for row in field:
for place in row:
if place != ' ':
occupiedPlaces += 1
elif place == ' ':
return False
if occupiedPlaces == 9:
return True
def KI_Turn(field):
fieldStatus = isFieldFull(field)
if fieldStatus == True:
return field
result = field
running = True
"It is the turn of the computer."
while running == True:
row = random.randint(0,2)
column = random.randint(0,2)
if field[row][column] == ' ':
result[row][column] = 'O'
running = False
else:
pass
return result
def USER_Turn(field):
fieldStatus = isFieldFull(field)
if fieldStatus == True:
return field
result = field
running = True
print "User it's your turn"
while running == True:
row = int(raw_input('Which row? '))
column = int(raw_input('Which column? '))
if field[row][column] == ' ':
result[row][column] = 'X'
running = False
else:
print 'This place is occupied!'
return result
def Winner(field):
winner = ''
for row in field:
if row == ['X','X','X']:
winner = 'User'
return winner
elif row == ['O','O','O']:
winner = 'Computer'
return winner
else:
winner = ''
columns = [[],[],[]]
for row in field:
columns[0].append(row[0])
columns[1].append(row[1])
columns[2].append(row[2])
for col in columns:
if col == ['X','X','X']:
winner = 'User'
return winner
elif col == ['O','O','O']:
winner = 'Computer'
return winner
else:
winner = ''
dia1 = [field[0][0],field[1][1],field[2][2]]
dia2 = [field[0][2],field[1][1],field[2][0]]
if dia1 == ['X','X','X'] or dia2 == ['X','X','X']:
winner = 'User'
return winner
elif dia1 == ['O','O','O'] or dia2 == ['O','O','O']:
winner = 'Computer'
return winner
else:
winner = ''
fieldStatus = isFieldFull(field)
if fieldStatus == True:
return "Nobody"
return winner
# Time to play!
userScore = 0
computerScore = 0
answer = ''
while answer != 'q':
print 'User: ' + str(userScore)
print 'Computer: ' + str(computerScore)
print 'Press q to exit or anything else to continue'
answer = raw_input(': ')
if answer == 'q':
break
os.system('clear')
field = createNewField()
win = Winner(field)
turn = 0
while win == '':
if win == 'Nobody':
print 'There is no winner.'
break
turn += 1
print 'Turn: ' + str(turn)
printField(field)
field = USER_Turn(field)
win = Winner(field)
if win == 'User':
break<|fim▁hole|> print 'Turn: ' + str(turn)
printField(field)
field = KI_Turn(field)
win = Winner(field)
if win == 'Computer':
break
os.system('clear')
printField(field)
print 'The winner is: ' + win
if win == 'User':
userScore += (10-turn)
elif win == 'Computer':
computerScore += (10-turn)
print "User: " + str(userScore)
print "Computer: " + str(computerScore)<|fim▁end|> | os.system('clear')
turn += 1 |
<|file_name|>privileges.js<|end_file_name|><|fim▁begin|>/**
* PhilaeCMS
* Copyright (C) 2014 Daniel Budick
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
/**
* These functions check privileges of the user.
* Privileges:
* - isRoot: User is a godlike individual and can write, view and update everything
* - isAdmin: User can write, view and update almost everything
* - isEditor: User can change content (Plaintext and HTML)
* - isDesigner: User can change css
* - canUpload: User can upload data
* - isBackendUser: User can visit the backend
* - isUserManager: User can write, view and update user-data. Even Admin cannot do this. Important for countries with high privacy standards.
* - isIntern: Interns should not be able to destroy anything. They can create content, but cannot write HTML
*/
Privilege = {
/**
* @param privilege is the name of the privilege, that is to check
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user has not the permission. true, the user has the permission.<|fim▁hole|> return false;
if (user.profile === undefined) {
throwError('User has no profile. This should not happen.');
return false;
}
if (user.profile.privileges === undefined) {
throwError('User has no privileges. This should not happen.');
return false;
}
if (user.profile.privileges.isRoot === true)
return true; //overwrites everything. This user has godlike powers!
if (user.profile.privileges[privilege] === undefined)
return false;
if (user.profile.privileges[privilege] === true)
return true;
return false; //the user has not the sufficient permission
},
/**
* isAdmin: User can write, view and update almost everything
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user is not an admin.
*/
isAdmin: function (user) {
return this.checkPrivilege('isAdmin', user);
},
/**
* isEditor: User can change content (Plaintext and HTML)
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user cannot change content
*/
isEditor: function (user) {
return this.checkPrivilege('isEditor', user);
},
/**
* isDesigner: User can change CSS
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user cannot change CSS
*/
isDesigner: function (user) {
return this.checkPrivilege('isDesigner', user);
},
/**
* canUpload: User can upload data
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user cannot upload data
*/
canUpload: function (user) {
return this.checkPrivilege('canUpload', user);
},
/**
* isBackendUser: User can visit the backend
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user cannot access the backend
*/
isBackendUser: function (user) {
return this.checkPrivilege('isBackendUser', user);
},
/**
* isUserManager: User can write, view and update user-data. Even Admin cannot do this. Important for countries with high privacy standards.
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user is not an UserManager
*/
isUserManager: function (user) {
return this.checkPrivilege('isUserManager', user);
},
/**
* isIntern: Interns should not be able to destroy anything. They can create content, but cannot write HTML
* @param user is the Userobject which permission is to check
* @returns {boolean} false: the user is not an intern
*/
isIntern: function (user) {
return this.checkPrivilege('isIntern', user);
}
};<|fim▁end|> | */
checkPrivilege: function (privilege, user) {
if (user === null || user === undefined) |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.core import serializers
from rest_framework.response import Response
from django.http import JsonResponse
try:
from urllib import quote_plus # python 2
except:
pass
try:
from urllib.parse import quote_plus # python 3
except:
pass
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db.models import Q
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from comments.forms import CommentForm
from comments.models import Comment
from .forms import PostForm
from .models import Post
def post_create(request):
if not request.user.is_staff or not request.user.is_superuser:
raise Http404
form = PostForm(request.POST or None, request.FILES or None)
if form.is_valid():<|fim▁hole|> # message success
messages.success(request, "Successfully Created")
return HttpResponseRedirect(instance.get_absolute_url())
context = {
"form": form,
}
return render(request, "post_form.html", context)
def post_detail(request, slug=None):
instance = get_object_or_404(Post, slug=slug)
if instance.publish > timezone.now().date() or instance.draft:
if not request.user.is_staff or not request.user.is_superuser:
raise Http404
share_string = quote_plus(instance.content)
initial_data = {
"content_type": instance.get_content_type,
"object_id": instance.id
}
form = CommentForm(request.POST or None, initial=initial_data)
if form.is_valid() and request.user.is_authenticated():
c_type = form.cleaned_data.get("content_type")
content_type = ContentType.objects.get(model=c_type)
obj_id = form.cleaned_data.get('object_id')
content_data = form.cleaned_data.get("content")
parent_obj = None
try:
parent_id = int(request.POST.get("parent_id"))
except:
parent_id = None
if parent_id:
parent_qs = Comment.objects.filter(id=parent_id)
if parent_qs.exists() and parent_qs.count() == 1:
parent_obj = parent_qs.first()
new_comment, created = Comment.objects.get_or_create(
user=request.user,
content_type=content_type,
object_id=obj_id,
content=content_data,
parent=parent_obj,
)
return HttpResponseRedirect(new_comment.content_object.get_absolute_url())
comments = instance.comments
context = {
"title": instance.title,
"instance": instance,
"share_string": share_string,
"comments": comments,
"comment_form": form,
}
return render(request, "post_detail.html", context)
def post_list(request):
today = timezone.now().date()
queryset_list = Post.objects.active() # .order_by("-timestamp")
if request.user.is_staff or request.user.is_superuser:
queryset_list = Post.objects.all()
query = request.GET.get("q")
if query:
queryset_list = queryset_list.filter(
Q(title__icontains=query) |
Q(content__icontains=query) |
Q(user__first_name__icontains=query) |
Q(user__last_name__icontains=query)
).distinct()
paginator = Paginator(queryset_list, 8) # Show 25 contacts per page
page_request_var = "page"
page = request.GET.get(page_request_var)
try:
queryset = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
queryset = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
queryset = paginator.page(paginator.num_pages)
context = {
"object_list": queryset,
"title": "List",
"page_request_var": page_request_var,
"today": today,
}
return render(request, "post_list.html", context)
def post_update(request, slug=None):
if not request.user.is_staff or not request.user.is_superuser:
raise Http404
instance = get_object_or_404(Post, slug=slug)
form = PostForm(request.POST or None,
request.FILES or None, instance=instance)
if form.is_valid():
instance = form.save(commit=False)
instance.save()
messages.success(request, "<a href='#'>Item</a> Saved",
extra_tags='html_safe')
return HttpResponseRedirect(instance.get_absolute_url())
context = {
"title": instance.title,
"instance": instance,
"form": form,
}
return render(request, "post_form.html", context)
def post_delete(request, slug=None):
if not request.user.is_staff or not request.user.is_superuser:
raise Http404
instance = get_object_or_404(Post, slug=slug)
instance.delete()
messages.success(request, "Successfully deleted")
return redirect("posts:list")<|fim▁end|> | instance = form.save(commit=False)
instance.user = request.user
instance.save() |
<|file_name|>01a_quick_example.rs<|end_file_name|><|fim▁begin|>extern crate clap;
use clap::{App, SubCommand};
fn main() {
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// The example below is functionally identical to the 01b_quick_example.rs and 01c_quick_example.rs
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <kbknapp@gmail.com>")
.about("Does awesome things")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'
<output> 'Sets an optional output file'
-d... 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("-l, --list 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("d") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
<|fim▁hole|> // Continued program logic goes here...
}<|fim▁end|> | |
<|file_name|>Component.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
return UIComponent.extend("sap.ui.webc.main.sample.FileUploader.Component", {
metadata: {
manifest: "json"
}
});
});<|fim▁end|> | sap.ui.define([
"sap/ui/core/UIComponent"
], function(UIComponent) {
"use strict"; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![crate_type="dylib"]
#![feature(plugin_registrar, rustc_private)]
extern crate syntax;
extern crate rustc;
extern crate rustc_plugin;
extern crate rand;
use rand::Rng;
use std::ops::Deref;
use std::rc::Rc;
use syntax::ptr::P;
use syntax::parse::token;
use syntax::tokenstream::TokenTree;
use syntax::ast::{Expr, LitKind, Ident};
use syntax::ext::quote::rt::Span;
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder;
use rustc_plugin::Registry;
fn encrypt_str(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) -> Box<MacResult + 'static> {
let text = match args[0] {
TokenTree::Token(_, token::Literal(token::Lit::Str_(s), _)) => s,
_ => {
cx.span_err(sp, "argument must be a string literal");
return DummyResult::any(sp);
}
};
let text = text.as_str();
let text = text.deref();
let (cipher_text, key) = encrypt_str_with_rand_key(text);
let cipher_text_expr = get_expr_from_bytes(cx, sp, cipher_text);
let key_expr = get_expr_from_bytes(cx, sp, key);
// Generate a call to the decryption function in string-decryption
MacEager::expr(cx.expr_call(
sp,
cx.expr_path(cx.path_global(sp, vec![Ident::from_str("d")])),
vec![cx.expr_tuple(sp, vec![cipher_text_expr, key_expr])]
))
}
fn encrypt_str_with_rand_key(text: &str) -> (Vec<u8>, Vec<u8>) {
let text = String::from(text);
let mut text = text.into_bytes();
// Generate a random key using the rand crate
let mut random = rand::thread_rng();
let mut key = vec![0; text.len()];
random.fill_bytes(&mut key);
// Encrypt the text using XOR
for i in 0..text.len() {
text[i] = text[i] ^ key[i];
}
(text, key)
}
fn get_expr_from_bytes(cx: &mut ExtCtxt, sp: Span, bytes: Vec<u8>) -> P<Expr> {
cx.expr_lit(sp, LitKind::ByteStr(Rc::new(bytes)))
}<|fim▁hole|>
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("e", encrypt_str);
}<|fim▁end|> | |
<|file_name|>json2.js<|end_file_name|><|fim▁begin|>/*
json2.js
2013-05-26
Public Domain.
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
See http://www.JSON.org/js.html
This code should be minified before deployment.
See http://javascript.crockford.com/jsmin.html
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
NOT CONTROL.
This file creates a global JSON object containing two methods: stringify
and parse.
JSON.stringify(value, replacer, space)
value any JavaScript value, usually an object or array.
replacer an optional parameter that determines how object
values are stringified for objects. It can be a
function or an array of strings.
space an optional parameter that specifies the indentation
of nested structures. If it is omitted, the text will
be packed without extra whitespace. If it is a number,
it will specify the number of spaces to indent at each
level. If it is a string (such as '\t' or ' '),
it contains the characters used to indent at each level.
This method produces a JSON text from a JavaScript value.
When an object value is found, if the object contains a toJSON
method, its toJSON method will be called and the result will be
stringified. A toJSON method does not serialize: it returns the
value represented by the name/value pair that should be serialized,
or undefined if nothing should be serialized. The toJSON method
will be passed the key associated with the value, and this will be
bound to the value
For example, this would serialize Dates as ISO strings.
Date.prototype.toJSON = function (key) {
function f(n) {
// Format integers to have at least two digits.
return n < 10 ? '0' + n : n;
}
return this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z';
};
You can provide an optional replacer method. It will be passed the
key and value of each member, with this bound to the containing
object. The value that is returned from your method will be
serialized. If your method returns undefined, then the member will
be excluded from the serialization.
If the replacer parameter is an array of strings, then it will be
used to select the members to be serialized. It filters the results
such that only members with keys listed in the replacer array are
stringified.
Values that do not have JSON representations, such as undefined or
functions, will not be serialized. Such values in objects will be
dropped; in arrays they will be replaced with null. You can use
a replacer function to replace those with JSON values.
JSON.stringify(undefined) returns undefined.
The optional space parameter produces a stringification of the
value that is filled with line breaks and indentation to make it
easier to read.
If the space parameter is a non-empty string, then that string will
be used for indentation. If the space parameter is a number, then
the indentation will be that many spaces.
Example:
text = JSON.stringify(['e', {pluribus: 'unum'}]);
// text is '["e",{"pluribus":"unum"}]'
text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
// text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
text = JSON.stringify([new Date()], function (key, value) {
return this[key] instanceof Date ?
'Date(' + this[key] + ')' : value;
});
// text is '["Date(---current time---)"]'
JSON.parse(text, reviver)
This method parses a JSON text to produce an object or array.
It can throw a SyntaxError exception.
The optional reviver parameter is a function that can filter and
transform the results. It receives each of the keys and values,
and its return value is used instead of the original value.
If it returns what it received, then the structure is not modified.
If it returns undefined then the member is deleted.
Example:
// Parse the text. Values that look like ISO date strings will
// be converted to Date objects.
myData = JSON.parse(text, function (key, value) {
var a;
if (typeof value === 'string') {
a =
/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
if (a) {
return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
+a[5], +a[6]));
}
}
return value;
});
myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
var d;
if (typeof value === 'string' &&
value.slice(0, 5) === 'Date(' &&
value.slice(-1) === ')') {
d = new Date(value.slice(5, -1));
if (d) {
return d;
}
}
return value;
});
This is a reference implementation. You are free to copy, modify, or
redistribute.
*/
/*jslint evil: true, regexp: true */
/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
lastIndex, length, parse, prototype, push, replace, slice, stringify,
test, toJSON, toString, valueOf
*/
// Create a JSON object only if one does not already exist. We create the
// methods in a closure to avoid creating global variables.
if (typeof JSON !== 'object') {
JSON = {};
}
(function () {
'use strict';
function f(n) {
// Format integers to have at least two digits.
return n < 10 ? '0' + n : n;
}
if (typeof Date.prototype.toJSON !== 'function') {
Date.prototype.toJSON = function () {
return isFinite(this.valueOf())
? this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z'
: null;
};
String.prototype.toJSON =
Number.prototype.toJSON =
Boolean.prototype.toJSON = function () {
return this.valueOf();
};
}
var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
gap,
indent,
meta = { // table of character substitutions
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\f': '\\f',
'\r': '\\r',
'"': '\\"',
'\\': '\\\\'
},
rep;
function quote(string) {
// If the string contains no control characters, no quote characters, and no
// backslash characters, then we can safely slap some quotes around it.
// Otherwise we must also replace the offending characters with safe escape
// sequences.
escapable.lastIndex = 0;
return escapable.test(string) ? '"' + string.replace(escapable, function (a) {
var c = meta[a];
return typeof c === 'string'
? c
: '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
}) + '"' : '"' + string + '"';
}
function str(key, holder) {
// Produce a string from holder[key].
var i, // The loop counter.
k, // The member key.
v, // The member value.<|fim▁hole|> value = holder[key];
// If the value has a toJSON method, call it to obtain a replacement value.
if (value && typeof value === 'object' &&
typeof value.toJSON === 'function') {
value = value.toJSON(key);
}
// If we were called with a replacer function, then call the replacer to
// obtain a replacement value.
if (typeof rep === 'function') {
value = rep.call(holder, key, value);
}
// What happens next depends on the value's type.
switch (typeof value) {
case 'string':
return quote(value);
case 'number':
// JSON numbers must be finite. Encode non-finite numbers as null.
return isFinite(value) ? String(value) : 'null';
case 'boolean':
case 'null':
// If the value is a boolean or null, convert it to a string. Note:
// typeof null does not produce 'null'. The case is included here in
// the remote chance that this gets fixed someday.
return String(value);
// If the type is 'object', we might be dealing with an object or an array or
// null.
case 'object':
// Due to a specification blunder in ECMAScript, typeof null is 'object',
// so watch out for that case.
if (!value) {
return 'null';
}
// Make an array to hold the partial results of stringifying this object value.
gap += indent;
partial = [];
// Is the value an array?
if (Object.prototype.toString.apply(value) === '[object Array]') {
// The value is an array. Stringify every element. Use null as a placeholder
// for non-JSON values.
length = value.length;
for (i = 0; i < length; i += 1) {
partial[i] = str(i, value) || 'null';
}
// Join all of the elements together, separated with commas, and wrap them in
// brackets.
v = partial.length === 0
? '[]'
: gap
? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']'
: '[' + partial.join(',') + ']';
gap = mind;
return v;
}
// If the replacer is an array, use it to select the members to be stringified.
if (rep && typeof rep === 'object') {
length = rep.length;
for (i = 0; i < length; i += 1) {
if (typeof rep[i] === 'string') {
k = rep[i];
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
} else {
// Otherwise, iterate through all of the keys in the object.
for (k in value) {
if (Object.prototype.hasOwnProperty.call(value, k)) {
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
}
// Join all of the member texts together, separated with commas,
// and wrap them in braces.
v = partial.length === 0
? '{}'
: gap
? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}'
: '{' + partial.join(',') + '}';
gap = mind;
return v;
}
}
// If the JSON object does not yet have a stringify method, give it one.
if (typeof JSON.stringify !== 'function') {
JSON.stringify = function (value, replacer, space) {
// The stringify method takes a value and an optional replacer, and an optional
// space parameter, and returns a JSON text. The replacer can be a function
// that can replace values, or an array of strings that will select the keys.
// A default replacer method can be provided. Use of the space parameter can
// produce text that is more easily readable.
var i;
gap = '';
indent = '';
// If the space parameter is a number, make an indent string containing that
// many spaces.
if (typeof space === 'number') {
for (i = 0; i < space; i += 1) {
indent += ' ';
}
// If the space parameter is a string, it will be used as the indent string.
} else if (typeof space === 'string') {
indent = space;
}
// If there is a replacer, it must be a function or an array.
// Otherwise, throw an error.
rep = replacer;
if (replacer && typeof replacer !== 'function' &&
(typeof replacer !== 'object' ||
typeof replacer.length !== 'number')) {
throw new Error('JSON.stringify');
}
// Make a fake root object containing our value under the key of ''.
// Return the result of stringifying the value.
return str('', { '': value });
};
}
// If the JSON object does not yet have a parse method, give it one.
if (typeof JSON.parse !== 'function') {
JSON.parse = function (text, reviver) {
// The parse method takes a text and an optional reviver function, and returns
// a JavaScript value if the text is a valid JSON text.
var j;
function walk(holder, key) {
// The walk method is used to recursively walk the resulting structure so
// that modifications can be made.
var k, v, value = holder[key];
if (value && typeof value === 'object') {
for (k in value) {
if (Object.prototype.hasOwnProperty.call(value, k)) {
v = walk(value, k);
if (v !== undefined) {
value[k] = v;
} else {
delete value[k];
}
}
}
}
return reviver.call(holder, key, value);
}
// Parsing happens in four stages. In the first stage, we replace certain
// Unicode characters with escape sequences. JavaScript handles many characters
// incorrectly, either silently deleting them, or treating them as line endings.
text = String(text);
cx.lastIndex = 0;
if (cx.test(text)) {
text = text.replace(cx, function (a) {
return '\\u' +
('0000' + a.charCodeAt(0).toString(16)).slice(-4);
});
}
// In the second stage, we run the text against regular expressions that look
// for non-JSON patterns. We are especially concerned with '()' and 'new'
// because they can cause invocation, and '=' because it can cause mutation.
// But just to be safe, we want to reject all unexpected forms.
// We split the second stage into 4 regexp operations in order to work around
// crippling inefficiencies in IE's and Safari's regexp engines. First we
// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
// replace all simple value tokens with ']' characters. Third, we delete all
// open brackets that follow a colon or comma or that begin the text. Finally,
// we look to see that the remaining characters are only whitespace or ']' or
// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
if (/^[\],:{}\s]*$/
.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@')
.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']')
.replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
// In the third stage we use the eval function to compile the text into a
// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
// in JavaScript: it can begin a block or an object literal. We wrap the text
// in parens to eliminate the ambiguity.
j = eval('(' + text + ')');
// In the optional fourth stage, we recursively walk the new structure, passing
// each name/value pair to a reviver function for possible transformation.
return typeof reviver === 'function'
? walk({ '': j }, '')
: j;
}
// If the text is not JSON parseable, then a SyntaxError is thrown.
throw new SyntaxError('JSON.parse');
};
}
} ());<|fim▁end|> | length,
mind = gap,
partial, |
<|file_name|>settings-input-hdmi.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|>var React = require('react');
var mui = require('material-ui');
var SvgIcon = mui.SvgIcon;
var ActionSettingsInputHdmi = React.createClass({
displayName: 'ActionSettingsInputHdmi',
render: function render() {
return React.createElement(
SvgIcon,
this.props,
React.createElement('path', { d: 'M18 7V4c0-1.1-.9-2-2-2H8c-1.1 0-2 .9-2 2v3H5v6l3 6v3h8v-3l3-6V7h-1zM8 4h8v3h-2V5h-1v2h-2V5h-1v2H8V4z' })
);
}
});
module.exports = ActionSettingsInputHdmi;<|fim▁end|> | |
<|file_name|>aptBackend.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Provides an apt backend to PackageKit
Copyright (C) 2007 Ali Sabil <ali.sabil@gmail.com>
Copyright (C) 2007 Tom Parker <palfrey@tevp.net>
Copyright (C) 2008-2009 Sebastian Heinlein <glatzor@ubuntu.com>
Copyright (C) 2012 Martin Pitt <martin.pitt@ubuntu.com>
Licensed under the GNU General Public License Version 2
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = "Sebastian Heinlein <devel@glatzor.de>"
import datetime
import errno
import fcntl
import gdbm
import glob
import gzip
import locale
import logging
import logging.handlers
import os
import pty
import re
import signal
import socket
import stat
import string
import subprocess
import sys
import time
import fnmatch
import apt
import apt.debfile
import apt_pkg
try:
import pkg_resources
except ImportError:
# no plugin support available
pkg_resources = None
from packagekit.backend import (PackageKitBaseBackend, format_string)
from packagekit import enums
logging.basicConfig(format="%(levelname)s:%(message)s")
pklog = logging.getLogger("PackageKitBackend")
pklog.setLevel(logging.NOTSET)
try:
_syslog = logging.handlers.SysLogHandler("/dev/log",
logging.handlers.SysLogHandler.LOG_DAEMON)
formatter = logging.Formatter('PackageKit: %(levelname)s: %(message)s')
_syslog.setFormatter(formatter)
pklog.addHandler(_syslog)
except:
pass
# Xapian database is optionally used to speed up package description search
XAPIAN_DB_PATH = os.environ.get("AXI_DB_PATH", "/var/lib/apt-xapian-index")
XAPIAN_DB = XAPIAN_DB_PATH + "/index"
XAPIAN_DB_VALUES = XAPIAN_DB_PATH + "/values"
XAPIAN_SUPPORT = False
try:
import xapian
except ImportError:
pass
else:
if os.access(XAPIAN_DB, os.R_OK):
pklog.debug("Use XAPIAN for the search")
XAPIAN_SUPPORT = True
# SoftwareProperties is required to proivde information about repositories
try:
import softwareproperties.SoftwareProperties
except ImportError:
REPOS_SUPPORT = False
else:
REPOS_SUPPORT = True
# Check if update-manager-core is installed to get aware of the
# latest distro releases
try:
from UpdateManager.Core.MetaRelease import MetaReleaseCore
except ImportError:
META_RELEASE_SUPPORT = False
else:
META_RELEASE_SUPPORT = True
# Set a timeout for the changelog download
socket.setdefaulttimeout(2)
# Required for daemon mode
os.putenv("PATH",
"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin")
# Map Debian sections to the PackageKit group name space
SECTION_GROUP_MAP = {
"admin" :enums.GROUP_ADMIN_TOOLS,
"base" :enums.GROUP_SYSTEM,
"comm" :enums.GROUP_COMMUNICATION,
"devel" :enums.GROUP_PROGRAMMING,
"doc" :enums.GROUP_DOCUMENTATION,
"editors" :enums.GROUP_PUBLISHING,
"electronics" :enums.GROUP_ELECTRONICS,
"embedded" :enums.GROUP_SYSTEM,
"games" :enums.GROUP_GAMES,
"gnome" :enums.GROUP_DESKTOP_GNOME,
"graphics" :enums.GROUP_GRAPHICS,
"hamradio" :enums.GROUP_COMMUNICATION,
"interpreters" :enums.GROUP_PROGRAMMING,
"kde" :enums.GROUP_DESKTOP_KDE,
"libdevel" :enums.GROUP_PROGRAMMING,
"libs" :enums.GROUP_SYSTEM,
"mail" :enums.GROUP_INTERNET,
"math" :enums.GROUP_SCIENCE,
"misc" :enums.GROUP_OTHER,
"net" :enums.GROUP_NETWORK,
"news" :enums.GROUP_INTERNET,
"oldlibs" :enums.GROUP_LEGACY,
"otherosfs" :enums.GROUP_SYSTEM,
"perl" :enums.GROUP_PROGRAMMING,
"python" :enums.GROUP_PROGRAMMING,
"science" :enums.GROUP_SCIENCE,
"shells" :enums.GROUP_SYSTEM,
"sound" :enums.GROUP_MULTIMEDIA,
"tex" :enums.GROUP_PUBLISHING,
"text" :enums.GROUP_PUBLISHING,
"utils" :enums.GROUP_ACCESSORIES,
"web" :enums.GROUP_INTERNET,
"x11" :enums.GROUP_DESKTOP_OTHER,
"unknown" :enums.GROUP_UNKNOWN,
"alien" :enums.GROUP_UNKNOWN,
"translations" :enums.GROUP_LOCALIZATION,
"metapackages" :enums.GROUP_COLLECTIONS,
}
# Regular expressions to detect bug numbers in changelogs according to the
# Debian Policy Chapter 4.4. For details see the footnote 16:
# http://www.debian.org/doc/debian-policy/footnotes.html#f16
MATCH_BUG_CLOSES_DEBIAN=r"closes:\s*(?:bug)?\#?\s?\d+(?:,\s*(?:bug)?\#?\s?\d+)*"
MATCH_BUG_NUMBERS=r"\#?\s?(\d+)"
# URL pointing to a bug in the Debian bug tracker
HREF_BUG_DEBIAN="http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s"
MATCH_BUG_CLOSES_UBUNTU = r"lp:\s+\#\d+(?:,\s*\#\d+)*"
HREF_BUG_UBUNTU = "https://bugs.launchpad.net/bugs/%s"
# Regular expression to find cve references
MATCH_CVE="CVE-\d{4}-\d{4}"
HREF_CVE="http://web.nvd.nist.gov/view/vuln/detail?vulnId=%s"
SYNAPTIC_PIN_FILE = "/var/lib/synaptic/preferences"
# After the given amount of seconds without any updates on the console or
# progress kill the installation
TIMEOUT_IDLE_INSTALLATION = 10 * 60 * 10000
# Required to get translated descriptions
try:
locale.setlocale(locale.LC_ALL, "")
except locale.Error:
pklog.debug("Failed to unset LC_ALL")
# Required to parse RFC822 time stamps
try:
locale.setlocale(locale.LC_TIME, "C")
except locale.Error:
pklog.debug("Failed to unset LC_TIME")
def catch_pkerror(func):
"""Decorator to catch a backend error and report
it correctly to the daemon.
"""
def _catch_error(*args, **kwargs):
try:
return func(*args, **kwargs)
except PKError as error:
backend = args[0]
backend.error(error.enum, error.msg, error.exit)
return _catch_error
def lock_cache(func):
"""Lock the system package cache before excuting the decorated function and
release the lock afterwards.
"""
def _locked_cache(*args, **kwargs):
backend = args[0]
backend.status(enums.STATUS_WAITING_FOR_LOCK)
while True:
try:
# see if the lock for the download dir can be acquired
# (work around bug in python-apt/apps that call _fetchArchives)
lockfile = apt_pkg.config.find_dir("Dir::Cache::Archives") + \
"lock"
lock = apt_pkg.get_lock(lockfile)
if lock < 0:
raise SystemError("failed to lock '%s'" % lockfile)
else:
os.close(lock)
# then lock the main package system
apt_pkg.pkgsystem_lock()
except SystemError:
time.sleep(3)
else:
break
try:
func(*args, **kwargs)
finally:
backend._unlock_cache()
return _locked_cache
class PKError(Exception):
def __init__(self, enum, msg, exit=True):
self.enum = enum
self.msg = msg
self.exit = exit
def __str__(self):
return "%s: %s" % (self.enum, self.msg)
class PackageKitOpProgress(apt.progress.base.OpProgress):
"""Handle the cache opening progress."""
def __init__(self, backend, start=0, end=100, progress=True):
self._backend = backend
apt.progress.base.OpProgress.__init__(self)
self.steps = []
for val in [0.12, 0.25, 0.50, 0.75, 1.00]:
step = start + (end - start) * val
self.steps.append(step)
self.pstart = float(start)
self.pend = self.steps.pop(0)
self.pprev = None
self.show_progress = progress
# OpProgress callbacks
def update(self, percent=None):
if percent is None:
return
progress = int(self.pstart + percent / 100 * (self.pend - self.pstart))
if self.show_progress == True and self.pprev < progress:
self._backend.percentage(progress)
self.pprev = progress
def done(self):
self.pstart = self.pend
try:
self.pend = self.steps.pop(0)
except:
pklog.warning("An additional step to open the cache is required")
class PackageKitAcquireProgress(apt.progress.base.AcquireProgress):
"""Handle the package download progress.
TODO: Add a progress for Updating the cache.
"""
def __init__(self, backend, start=0, end=100):
self._backend = backend
apt.progress.base.AcquireProgress.__init__(self)
self.start_progress = start
self.end_progress = end
self.last_progress = None
self.last_sub_progress = None
self.package_states = {}
self.media_change_required = None
def pulse(self, owner):
#TODO: port to pulse(owner)
percent = self.current_bytes * 100.0 / self.total_bytes
progress = int(self.start_progress + percent / 100 *
(self.end_progress - self.start_progress))
# A backwards running progress is reported as a not available progress
if self.last_progress > progress:
self._backend.percentage()
else:
self._backend.percentage(progress)
self.last_progress = progress
for worker in owner.workers:
if not worker.current_item or not worker.total_size:
continue
item_id = "%s;;;" % worker.current_item.shortdesc
item_percent = worker.current_size * 100 / worker.total_size
self._backend.item_progress(item_id, item_percent)
return True
def fetch(self, item):
info = enums.INFO_DOWNLOADING
try:
pkg = self._backend._cache[item.shortdesc]
except:
self._backend.package("%s;;;" % item.shortdesc, info, "")
else:
self._backend._emit_package(pkg, info)
def start(self):
self._backend.status(enums.STATUS_DOWNLOAD)
self._backend.allow_cancel(True)
def stop(self):
self._backend.percentage(self.end_progress)
self._backend.allow_cancel(False)
def media_change(self, medium, drive):
#FIXME: Perhaps use gudev to show a nicer drive name
self._backend.media_change_required(enums.MEDIA_TYPE_DISC, medium,
drive)
self.media_change_required = medium, drive
return False
class PackageKitAcquireRepoProgress(PackageKitAcquireProgress):
"""Handle the download of of repository information."""
def pulse(self, owner):
self._backend.percentage(None)
#TODO: Emit repositories here
#for worker in owner.workers:
# if not worker.current_item or not worker.total_size:
# continue
# item_id = "%s;;;" % worker.current_item.shortdesc
# item_percent = worker.current_size * 100 / worker.total_size
# self._backend.item_progress(item_id, item_percent)
return True
def fetch(self, item):
pass
def start(self):
self._backend.status(enums.STATUS_DOWNLOAD_REPOSITORY)
self._backend.allow_cancel(True)
class PackageKitInstallProgress(apt.progress.base.InstallProgress):
"""Handle the installation and removal process."""
def __init__(self, backend, start=0, end=100):
apt.progress.base.InstallProgress.__init__(self)
self._backend = backend
self.pstart = start
self.pend = end
self.pprev = None
self.last_activity = None
self.conffile_prompts = set()
# insanly long timeout to be able to kill hanging maintainer scripts
self.start_time = None
self.output = ""
self.master_fd = None
self.child_pid = None
self.last_pkg = None
self.last_item_percentage = 0
def status_change(self, pkg_name, percent, status):
"""Callback for APT status updates."""
self.last_activity = time.time()
progress = self.pstart + percent / 100 * (self.pend - self.pstart)
if self.pprev < progress:
self._backend.percentage(int(progress))
self.pprev = progress
# INSTALL/UPDATE lifecycle (taken from aptcc)
# - Starts:
# - "Running dpkg"
# - Loops:
# - "Installing pkg" (0%)
# - "Preparing pkg" (25%)
# - "Unpacking pkg" (50%)
# - "Preparing to configure pkg" (75%)
# - Some packages have:
# - "Runnung post-installation"
# - "Running dpkg"
# - Loops:
# - "Configuring pkg" (0%)
# - Sometimes "Configuring pkg" (+25%)
# - "Installed pkg"
# - Afterwards:
# - "Running post-installation"
#
# REMOVING lifecylce
# - Starts:
# - "Running dpkg"
# - loops:
# - "Removing pkg" (25%)
# - "Preparing for removal" (50%)
# - "Removing pkg" (75%)
# - "Removed pkg" (100%)
# - Afterwards:
# - "Running post-installation"
# Emit a Package signal for the currently processed package
if status.startswith("Preparing"):
item_percentage = self.last_item_percentage + 25
info = enums.INFO_PREPARING
elif status.startswith("Installing"):
item_percentage = 0
info = enums.INFO_INSTALLING
elif status.startswith("Installed"):
item_percentage = 100
info = enums.INFO_FINISHED
elif status.startswith("Configuring"):
if self.last_item_percentage >= 100:
item_percentage = 0
item_percentage = self.last_item_percentage + 25
info = enums.INFO_INSTALLING
elif status.startswith("Removing"):
item_percentage = self.last_item_percentage + 25
info = enums.INFO_REMOVING
elif status.startswith("Removed"):
item_percentage = 100
info = enums.INFO_FINISHED
elif status.startswith("Completely removing"):
item_percentage = self.last_item_percentage + 25
info = enums.INFO_REMOVING
elif status.startswith("Completely removed"):
item_percentage = 100
info = enums.INFO_FINISHED
elif status.startswith("Unpacking"):
item_percentage = 50
info = enums.INFO_DECOMPRESSING
elif status.startswith("Noting disappearance of"):
item_percentage = self.last_item_percentage
info = enums.INFO_UNKNOWN
elif status.startswith("Running"):
item_percentage = self.last_item_percentage
info = enums.INFO_CLEANUP
else:
item_percentage = self.last_item_percentage
info = enums.INFO_UNKNOWN
try:
pkg = self._backend._cache[pkg_name]
except KeyError:
# Emit a fake package
id = "%s;;;" % pkg_name
self._backend.package(id, info, "")
self._backend.item_progress(id, item_percentage)
else:
# Always use the candidate - except for removals
self._backend._emit_package(pkg, info, not pkg.marked_delete)
if pkg.marked_delete:
version = pkg.installed
else:
version = pkg.candidate
id = self._backend._get_id_from_version(version)
self._backend.item_progress(id, item_percentage)
self.last_pkg = pkg_name
self.last_item_percentage = item_percentage
def processing(self, pkg_name, status):
"""Callback for dpkg status updates."""
if status == "install":
info = enums.INFO_INSTALLING
elif status == "configure":
info = enums.INFO_INSTALLING
elif status == "remove":
info = enums.INFO_REMOVING
elif status == "purge":
info = enums.INFO_PURGING
elif status == "disappear":
info = enums.INFO_CLEANINGUP
elif status == "upgrade":
info = enums.INFO_UPDATING
elif status == "trigproc":
info = enums.INFO_CLEANINGUP
else:
info = enums.INFO_UNKNOWN
self._backend.package("%s;;;" % pkg_name, info, "")
def start_update(self):
# The apt system lock was set by _lock_cache() before
self._backend._unlock_cache()
self._backend.status(enums.STATUS_COMMIT)
self.last_activity = time.time()
self.start_time = time.time()
def fork(self):
pklog.debug("fork()")
(pid, self.master_fd) = pty.fork()
if pid != 0:
fcntl.fcntl(self.master_fd, fcntl.F_SETFL, os.O_NONBLOCK)
else:
def interrupt_handler(signum, frame):
# Exit the child immediately if we receive the interrupt signal
# or a Ctrl+C - to avoid that atexit would be called
os._exit(apt_pkg.PackageManager.RESULT_FAILED)
# Restore the exception handler to avoid catches by apport
sys.excepthook = sys.__excepthook__
signal.signal(signal.SIGINT, interrupt_handler)
# Avoid questions as far as possible
os.environ["APT_LISTCHANGES_FRONTEND"] = "none"
os.environ["APT_LISTBUGS_FRONTEND"] = "none"
# Check if debconf communication can be piped to the client
frontend_socket = os.getenv("FRONTEND_SOCKET", None)
if frontend_socket:
os.environ["DEBCONF_PIPE"] = frontend_socket
os.environ["DEBIAN_FRONTEND"] = "passthrough"
else:
os.environ["DEBIAN_FRONTEND"] = "noninteractive"
# Force terminal messages in dpkg to be untranslated, status-fd or
# debconf prompts won't be affected
os.environ["DPKG_UNTRANSLATED_MESSAGES"] = "1"
# We also want untranslated status messages from apt on status-fd
locale.setlocale(locale.LC_ALL, "C")
return pid
def update_interface(self):
apt.progress.base.InstallProgress.update_interface(self)
# Collect the output from the package manager
try:
out = os.read(self.master_fd, 512)
self.output = self.output + out
pklog.debug("APT out: %s " % out)
except OSError:
pass
# catch a time out by sending crtl+c
if self.last_activity + TIMEOUT_IDLE_INSTALLATION < time.time():
pklog.critical("no activity for %s seconds sending ctrl-c" \
% TIMEOUT_IDLE_INSTALLATION)
os.write(self.master_fd, chr(3))
msg = "Transaction was cancelled since the installation " \
"of a package hung.\n" \
"This can be caused by maintainer scripts which " \
"require input on the terminal:\n%s" % self.output
raise PKError(enums.ERROR_PACKAGE_FAILED_TO_CONFIGURE,
format_string(msg))
def conffile(self, current, new):
pklog.warning("Config file prompt: '%s' (sending no)" % current)
self.conffile_prompts.add(new)
def error(self, pkg, msg):
try:
pkg = self._backend._cache[pkg]
except KeyError:
err_enum = enums.ERROR_TRANSACTION_FAILED
else:
if pkg.marked_delete:
err_enum = enums.ERROR_PACKAGE_FAILED_TO_REMOVE
elif pkg.marked_keep:
# Should be called in the case of triggers
err_enum = enums.ERROR_PACKAGE_FAILED_TO_CONFIGURE
else:
err_enum = enums.ERROR_PACKAGE_FAILED_TO_INSTALL
raise PKError(err_enum, self.output)
def finish_update(self):
pklog.debug("finishUpdate()")
if self.conffile_prompts:
self._backend.message(enums.MESSAGE_CONFIG_FILES_CHANGED,
"The following conffile prompts were found "
"and need investigation: %s" % \
"\n".join(self.conffile_prompts))
# Check for required restarts
if os.path.exists("/var/run/reboot-required") and \
os.path.getmtime("/var/run/reboot-required") > self.start_time:
self._backend.require_restart(enums.RESTART_SYSTEM, "")
class PackageKitDpkgInstallProgress(PackageKitInstallProgress):
"""Class to initiate and monitor installation of local package
files with dpkg.
"""
def recover(self):
"""Run 'dpkg --configure -a'."""
cmd = [apt_pkg.config.find_file("Dir::Bin::Dpkg"),
"--status-fd", str(self.writefd),
"--root", apt_pkg.config["Dir"],
"--force-confdef", "--force-confold"]
cmd.extend(apt_pkg.config.value_list("Dpkg::Options"))
cmd.extend(("--configure", "-a"))
self.run(cmd)
def install(self, filenames):
"""Install the given package using a dpkg command line call."""
cmd = [apt_pkg.config.find_file("Dir::Bin::Dpkg"),
"--force-confdef", "--force-confold",
"--status-fd", str(self.writefd),
"--root", apt_pkg.config["Dir"]]
cmd.extend(apt_pkg.config.value_list("Dpkg::Options"))
cmd.append("-i")
cmd.extend([str(f) for f in filenames])
self.run(cmd)
def run(self, cmd):
"""Run and monitor a dpkg command line call."""
pklog.debug("Executing: %s" % cmd)
(self.master_fd, slave) = pty.openpty()
fcntl.fcntl(self.master_fd, fcntl.F_SETFL, os.O_NONBLOCK)
p = subprocess.Popen(cmd, stdout=slave, stdin=slave)
self.child_pid = p.pid
res = self.wait_child()
return res
if REPOS_SUPPORT == True:
class PackageKitSoftwareProperties(softwareproperties.SoftwareProperties.SoftwareProperties):
"""
Helper class to fix a siily bug in python-software-properties
"""
def set_modified_sourceslist(self):
self.save_sourceslist()
class PackageKitAptBackend(PackageKitBaseBackend):
"""PackageKit backend for APT"""
def __init__(self, cmds=""):
pklog.info("Initializing APT backend")
signal.signal(signal.SIGQUIT, self._sigquit)
self._cache = None
self._last_cache_refresh = None
apt_pkg.init_config()
apt_pkg.config.set("DPkg::Options::", '--force-confdef')
apt_pkg.config.set("DPkg::Options::", '--force-confold')
PackageKitBaseBackend.__init__(self, cmds)
self._init_plugins()
# Methods ( client -> engine -> backend )
@catch_pkerror
def search_file(self, filters, filenames):
"""Search for files in packages.
Works only for installed files if apt-file isn't installed.
"""
pklog.info("Searching for file: %s" % filenames)
self.status(enums.STATUS_QUERY)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(True)
result_names = set()
# Optionally make use of apt-file's Contents cache to search for not
# installed files. But still search for installed files additionally
# to make sure that we provide up-to-date results
if (os.path.exists("/usr/bin/apt-file") and
enums.FILTER_INSTALLED not in filters):
#FIXME: Make use of rapt-file on Debian if the network is available
#FIXME: Show a warning to the user if the apt-file cache is several
# weeks old
pklog.debug("Using apt-file")
filenames_regex = []
for filename in filenames:
if filename.startswith("/"):
pattern = "^%s$" % filename[1:].replace("/", "\/")
else:
pattern = "\/%s$" % filename
filenames_regex.append(pattern)
cmd = ["/usr/bin/apt-file", "--regexp", "--non-interactive",
"--package-only", "find", "|".join(filenames_regex)]
pklog.debug("Calling: %s" % cmd)
apt_file = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = apt_file.communicate()
if apt_file.returncode == 0:
#FIXME: Actually we should check if the file is part of the
# candidate, e.g. if unstable and experimental are
# enabled and a file would only be part of the
# experimental version
result_names.update(stdout.split())
self._emit_visible_packages_by_name(filters, result_names)
else:
raise PKError(enums.ERROR_INTERNAL_ERROR,
format_string("%s %s" % (stdout, stderr)))
# Search for installed files
filenames_regex = []
for filename in filenames:
if filename.startswith("/"):
pattern = "^%s$" % filename.replace("/", "\/")
else:
pattern = ".*\/%s$" % filename
filenames_regex.append(pattern)
files_pattern = re.compile("|".join(filenames_regex))
for pkg in self._cache:
if pkg.name in result_names:
continue
for installed_file in pkg.installed_files:
if files_pattern.match(installed_file):
self._emit_visible_package(filters, pkg)
break
@catch_pkerror
def search_group(self, filters, groups):
"""Search packages by their group."""
pklog.info("Searching for groups: %s" % groups)
self.status(enums.STATUS_QUERY)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(True)
for pkg in self._cache:
if self._get_package_group(pkg) in groups:
self._emit_visible_package(filters, pkg)
@catch_pkerror
def search_name(self, filters, values):
"""Search packages by name."""
def matches(searches, text):
for search in searches:
if not search in text:
return False
return True
pklog.info("Searching for package name: %s" % values)
self.status(enums.STATUS_QUERY)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(True)
for pkg_name in self._cache.keys():
if matches(values, pkg_name):
self._emit_all_visible_pkg_versions(filters,
self._cache[pkg_name])
@catch_pkerror
def search_details(self, filters, values):
"""Search packages by details."""
pklog.info("Searching for package details: %s" % values)
self.status(enums.STATUS_QUERY)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(True)
results = []
if XAPIAN_SUPPORT == True:
search_flags = (xapian.QueryParser.FLAG_BOOLEAN |
xapian.QueryParser.FLAG_PHRASE |
xapian.QueryParser.FLAG_LOVEHATE |
xapian.QueryParser.FLAG_BOOLEAN_ANY_CASE)
pklog.debug("Performing xapian db based search")
db = xapian.Database(XAPIAN_DB)
parser = xapian.QueryParser()
parser.set_default_op(xapian.Query.OP_AND)
query = parser.parse_query(" ".join(values), search_flags)
enquire = xapian.Enquire(db)
enquire.set_query(query)
matches = enquire.get_mset(0, 1000)
for pkg_name in (match.document.get_data()
for match in enquire.get_mset(0, 1000)):
if pkg_name in self._cache:
self._emit_visible_package(filters, self._cache[pkg_name])
else:
def matches(searches, text):
for search in searches:
if not search in text:
return False
return True
pklog.debug("Performing apt cache based search")
values = [val.lower() for val in values]
for pkg in self._cache:
txt = pkg.name
try:
txt += pkg.candidate.raw_description.lower()
txt += pkg.candidate._translated_records.long_desc.lower()
except AttributeError:
pass
if matches(values, txt):
self._emit_visible_package(filters, pkg)
@catch_pkerror
def get_distro_upgrades(self):
"""
Implement the {backend}-get-distro-upgrades functionality
"""
pklog.info("Get distro upgrades")
self.status(enums.STATUS_INFO)
self.allow_cancel(False)
self.percentage(None)
if META_RELEASE_SUPPORT == False:
if "update-manager-core" in self._cache and \
self._cache["update-manager-core"].is_installed == False:
raise PKError(enums.ERROR_INTERNAL_ERROR,
"Please install the package update-manager-core "
"to get notified of the latest distribution "
"releases.")
else:
raise PKError(enums.ERROR_INTERNAL_ERROR,
"Please make sure that update-manager-core is"
"correctly installed.")
return
#FIXME Evil to start the download during init
meta_release = MetaReleaseCore(False, False)
#FIXME: should use a lock
while meta_release.downloading:
time.sleep(1)
#FIXME: Add support for description
if meta_release.new_dist != None:
self.distro_upgrade("stable",
"%s %s" % (meta_release.new_dist.name,
meta_release.new_dist.version),
"The latest stable release")
@catch_pkerror
def get_updates(self, filters):
"""Get available package updates."""
def succeeds_security_update(pkg):
"""
Return True if an update succeeds a previous security update
An example would be a package with version 1.1 in the security
archive and 1.1.1 in the archive of proposed updates or the
same version in both archives.
"""
for version in pkg.versions:
# Only check versions between the installed and the candidate
if (pkg.installed and
apt_pkg.version_compare(version.version,
pkg.installed.version) <= 0 and
apt_pkg.version_compare(version.version,
pkg.candidate.version) > 0):
continue
for origin in version.origins:
if origin.origin in ["Debian", "Ubuntu"] and \
(origin.archive.endswith("-security") or \
origin.label == "Debian-Security") and \
origin.trusted:
return True
return False
#FIXME: Implment the basename filter
pklog.info("Get updates")
self.status(enums.STATUS_QUERY)
self.allow_cancel(True)
self.percentage(None)
self._check_init(progress=False)
# Start with a safe upgrade
self._cache.upgrade(dist_upgrade=True)
# Search for upgrades which are not already part of the safe upgrade
# but would only require the installation of additional packages
for pkg in self._cache:
if not pkg.is_upgradable:
continue
# This may occur on pinned packages which have been updated to
# later version than the pinned one
if not pkg.candidate.origins:
continue
if not pkg.marked_upgrade:
#FIXME: Would be nice to have a reason here why
self._emit_package(pkg, enums.INFO_BLOCKED,
force_candidate=True)
# The update can be safely installed
info = enums.INFO_NORMAL
# Detect the nature of the upgrade (e.g. security, enhancement)
candidate_origin = pkg.candidate.origins[0]
archive = candidate_origin.archive
origin = candidate_origin.origin
trusted =candidate_origin.trusted
label = candidate_origin.label
if origin in ["Debian", "Ubuntu"] and trusted == True:
if archive.endswith("-security") or label == "Debian-Security":
info = enums.INFO_SECURITY
elif succeeds_security_update(pkg):
pklog.debug("Update of %s succeeds a security update. "
"Raising its priority." % pkg.name)
info = enums.INFO_SECURITY
elif archive.endswith("-backports"):
info = enums.INFO_ENHANCEMENT
elif archive.endswith("-updates"):
info = enums.INFO_BUGFIX
if origin in ["Backports.org archive"] and trusted == True:
info = enums.INFO_ENHANCEMENT
self._emit_package(pkg, info, force_candidate=True)
self._cache.clear()
@catch_pkerror
def get_update_detail(self, pkg_ids):
"""Get details about updates."""
def get_bug_urls(changelog):
"""Return a list of urls pointing to closed bugs in the
changelog.
"""
urls = []
for r in re.findall(MATCH_BUG_CLOSES_DEBIAN, changelog,
re.IGNORECASE | re.MULTILINE):
urls.extend([HREF_BUG_DEBIAN % bug for bug in \
re.findall(MATCH_BUG_NUMBERS, r)])
for r in re.findall(MATCH_BUG_CLOSES_UBUNTU, changelog,
re.IGNORECASE | re.MULTILINE):
urls.extend([HREF_BUG_UBUNTU % bug for bug in \
re.findall(MATCH_BUG_NUMBERS, r)])
return urls
def get_cve_urls(changelog):
"""Return a list of urls pointing to CVEs reports referred to in
the changelog.
"""
return [HREF_CVE % c for c in re.findall(MATCH_CVE, changelog,
re.MULTILINE)]
pklog.info("Get update details of %s" % pkg_ids)
self.status(enums.STATUS_DOWNLOAD_CHANGELOG)
self.percentage(0)
self.allow_cancel(True)
self._check_init(progress=False)
total = len(pkg_ids)
for count, pkg_id in enumerate(pkg_ids):
self.percentage(count * 100 / total)
pkg = self._get_package_by_id(pkg_id)
# FIXME add some real data
if pkg.installed.origins:
installed_origin = pkg.installed.origins[0].label
else:
installed_origin = ""
updates = "%s;%s;%s;%s" % (pkg.name, pkg.installed.version,
pkg.installed.architecture,
installed_origin)
obsoletes = ""
vendor_url = ""
restart = "none"
update_text = ""
state = ""
issued = ""
updated = ""
#FIXME: make this more configurable. E.g. a dbus update requires
# a reboot on Ubuntu but not on Debian
if pkg.name.startswith("linux-image-") or \
pkg.name in ["libc6"]:
restart == enums.RESTART_SYSTEM
#FIXME: Should be part of python-apt
changelog_dir = apt_pkg.config.find_dir("Dir::Cache::Changelogs")
if changelog_dir == "/":
changelog_dir = os.path.join(apt_pkg.config.find_dir("Dir::"
"Cache"),
"Changelogs")
filename = os.path.join(changelog_dir,
"%s_%s.gz" % (pkg.name,
pkg.candidate.version))
changelog_raw = ""
if os.path.exists(filename):
pklog.debug("Reading changelog from cache")
changelog_file = gzip.open(filename, "rb")
try:
changelog_raw = changelog_file.read().decode("UTF-8")
finally:
changelog_file.close()
if not changelog_raw:
pklog.debug("Downloading changelog")
changelog_raw = pkg.get_changelog()
# The internal download error string of python-apt ist not
# provided as unicode object
if not isinstance(changelog_raw, unicode):
changelog_raw = changelog_raw.decode("UTF-8")
else:
# Write the changelog to the cache
if not os.path.exists(changelog_dir):
os.makedirs(changelog_dir)
# Remove obsolete cached changelogs
pattern = os.path.join(changelog_dir, "%s_*.gz" % pkg.name)
for old_changelog in glob.glob(pattern):
os.remove(os.path.join(changelog_dir, old_changelog))
with gzip.open(filename, mode="wb") as changelog_file:
changelog_file.write(changelog_raw.encode("UTF-8"))
# Convert the changelog to markdown syntax
changelog = ""
for line in changelog_raw.split("\n"):
if line == "":
changelog += " \n"
else:
changelog += " %s \n" % line
if line.startswith(pkg.candidate.source_name):
match = re.match(r"(?P<source>.+) \((?P<version>.*)\) "
"(?P<dist>.+); urgency=(?P<urgency>.+)",
line)
update_text += "%s\n%s\n\n" % (match.group("version"),
"=" * \
len(match.group("version")))
elif line.startswith(" "):
update_text += " %s \n" % line
elif line.startswith(" --"):
#FIXME: Add %z for the time zone - requires Python 2.6
update_text += " \n"
match = re.match("^ -- (?P<maintainer>.+) (?P<mail><.+>) "
"(?P<date>.+) (?P<offset>[-\+][0-9]+)$",
line)
date = datetime.datetime.strptime(match.group("date"),
"%a, %d %b %Y %H:%M:%S")
issued = date.isoformat()
if not updated:
updated = date.isoformat()
if issued == updated:
updated = ""
bugzilla_url = ";;".join(get_bug_urls(changelog))
cve_url = ";;".join(get_cve_urls(changelog))
self.update_detail(pkg_id, updates, obsoletes, vendor_url,
bugzilla_url, cve_url, restart,
format_string(update_text),
format_string(changelog),
state, issued, updated)
@catch_pkerror
def get_details(self, pkg_ids):
"""Emit details about packages."""
pklog.info("Get details of %s" % pkg_ids)
self.status(enums.STATUS_INFO)
self.percentage(None)
self.allow_cancel(True)
self._check_init(progress=False)
total = len(pkg_ids)
for count, pkg_id in enumerate(pkg_ids):
self.percentage(count * 100 / total)
version = self._get_version_by_id(pkg_id)
#FIXME: We need more fine grained license information!
if (version.origins and
version.origins[0].component in ["main", "universe"] and
version.origins[0].origin in ["Debian", "Ubuntu"]):
license = "free"
else:
license = "unknown"
group = self._get_package_group(pkg)
self.details(pkg_id, license, group,
format_string(version.description),
version.homepage, version.size)
@catch_pkerror
@lock_cache
def update_system(self, only_trusted):
"""Upgrade the system."""
pklog.info("Upgrading system")
self.status(enums.STATUS_UPDATE)
self.allow_cancel(False)
self.percentage(0)
self._check_init()
self._cache.upgrade()
#FIXME: Emit message about skipped updates
# for pkg in self._cache:
# if pkg.is_upgradable and pkg.marked_upgrade:
# continue
self._check_trusted(only_trusted)
self._commit_changes()
@catch_pkerror
@lock_cache
def repair_system(self, only_trusted):
"""Recover from broken dependencies."""
pklog.info("Repairing system")
self.status(enums.STATUS_DEP_RESOLVE)
self.allow_cancel(False)
self.percentage(0)
self._check_init(fail_broken=False)
try:
self._cache._depcache.fix_broken()
except SystemError:
broken = [pkg.name for pkg in self._cache if pkg.is_inst_broken]
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"The following packages would break and so block"
"the removal: %s" % " ".join(broken))
self._check_trusted(only_trusted)
self._commit_changes()
@catch_pkerror
def simulate_repair_system(self):
"""Simulate recovery from broken dependencies."""
pklog.info("Simulating system repair")
self.status(enums.STATUS_DEP_RESOLVE)
self.allow_cancel(False)
self.percentage(0)
self._check_init(fail_broken=False)
try:
self._cache._depcache.fix_broken()
except SystemError:
broken = [pkg.name for pkg in self._cache if pkg.is_inst_broken]
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"The following packages would break and so block"
"the removal: %s" % " ".join(broken))
self._emit_changes()
@catch_pkerror
@lock_cache
def remove_packages(self, allow_deps, auto_remove, ids):
"""Remove packages."""
pklog.info("Removing package(s): id %s" % ids)
self.status(enums.STATUS_REMOVE)
self.allow_cancel(False)
self.percentage(0)
self._check_init()
if auto_remove:
auto_removables = [pkg.name for pkg in self._cache \
if pkg.is_auto_removable]
pkgs = self._mark_for_removal(ids)
# Check if the removal would remove further packages
if not allow_deps and self._cache.delete_count != len(ids):
dependencies = [pkg.name for pkg in self._cache.get_changes() \
if pkg.name not in pkgs]
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"The following packages would have also to be "
"removed: %s" % " ".join(dependencies))
if auto_remove:
self._check_obsoleted_dependencies()
#FIXME: Should support only_trusted
self._commit_changes(install_start=10, install_end=90)
self._open_cache(start=90, end=99)
for pkg_name in pkgs:
if pkg_name in self._cache and self._cache[pkg_name].is_installed:
raise PKError(enums.ERROR_PACKAGE_FAILED_TO_INSTALL,
"%s is still installed" % pkg_name)
self.percentage(100)
def _check_obsoleted_dependencies(self):
"""Check for no longer required dependencies which should be removed
too.
"""
installed_deps = set()
with self._cache.actiongroup():
for pkg in self._cache:
if pkg.marked_delete:
installed_deps = self._installed_dependencies(pkg.name,
installed_deps)
for dep_name in installed_deps:
if dep_name in self._cache:
pkg = self._cache[dep_name]
if pkg.is_installed and pkg.is_auto_removable:
pkg.mark_delete(False)
def _installed_dependencies(self, pkg_name, all_deps=None):
"""Recursivly return all installed dependencies of a given package."""
#FIXME: Should be part of python-apt
# apt.packagek.Version.get_dependencies(recursive=True)
if not all_deps:
all_deps = set()
if not pkg_name in self._cache:
return all_deps
cur = self._cache[pkg_name]._pkg.current_ver
if not cur:
return all_deps
for sec in ("PreDepends", "Depends", "Recommends"):
try:
for dep in cur.depends_list[sec]:
dep_name = dep[0].target_pkg.name
if not dep_name in all_deps:
all_deps.add(dep_name)
all_deps |= self._installed_dependencies(dep_name,
all_deps)
except KeyError:
pass
return all_deps
@catch_pkerror
def simulate_remove_packages(self, ids):
"""Emit the change required for the removal of the given packages."""
pklog.info("Simulating removal of package with id %s" % ids)
self.status(enums.STATUS_DEP_RESOLVE)
self.allow_cancel(True)
self.percentage(None)
self._check_init(progress=False)
pkgs = self._mark_for_removal(ids)
self._emit_changes(pkgs)
def _mark_for_removal(self, ids):
"""Resolve the given package ids and mark the packages for removal."""
pkgs = []
with self._cache.actiongroup():
resolver = apt.cache.ProblemResolver(self._cache)
for id in ids:
version = self._get_version_by_id(id)
pkg = version.package
if not pkg.is_installed:
raise PKError(enums.ERROR_PACKAGE_NOT_INSTALLED,
"Package %s isn't installed" % pkg.name)
if pkg.installed != version:
raise PKError(enums.ERROR_PACKAGE_NOT_INSTALLED,
"Version %s of %s isn't installed" % \
(version.version, pkg.name))
if pkg.essential == True:
raise PKError(enums.ERROR_CANNOT_REMOVE_SYSTEM_PACKAGE,
"Package %s cannot be removed." % pkg.name)
pkgs.append(pkg.name[:])
pkg.mark_delete(False, False)
resolver.clear(pkg)
resolver.protect(pkg)
resolver.remove(pkg)
try:
resolver.resolve()
except SystemError as error:
broken = [pkg.name for pkg in self._cache if pkg.is_inst_broken]
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"The following packages would break and so block"
"the removal: %s" % " ".join(broken))
return pkgs
@catch_pkerror
def get_repo_list(self, filters):
"""
Implement the {backend}-get-repo-list functionality
FIXME: should we use the abstration of software-properties or provide
low level access using pure aptsources?
"""
pklog.info("Getting repository list: %s" % filters)
self.status(enums.STATUS_INFO)
self.allow_cancel(False)
self.percentage(0)
if REPOS_SUPPORT == False:
if ("python-software-properties" in self._cache and
not self._cache["python-software-properties"].is_installed):
raise PKError(enums.ERROR_INTERNAL_ERROR,
"Please install the package "
"python-software-properties to handle "
"repositories")
else:
raise PKError(enums.ERROR_INTERNAL_ERROR,
"Please make sure that python-software-properties" " is correctly installed.")
repos = PackageKitSoftwareProperties()
# Emit distro components as virtual repositories
for comp in repos.distro.source_template.components:
repo_id = "%s_comp_%s" % (repos.distro.id, comp.name)
description = "%s %s - %s (%s)" % (repos.distro.id,
repos.distro.release,
comp.get_description().decode("UTF-8"),
comp.name)
#FIXME: There is no inconsitent state in PackageKit
enabled = repos.get_comp_download_state(comp)[0]
if not enums.FILTER_DEVELOPMENT in filters:
self.repo_detail(repo_id,
format_string(description),
enabled)
# Emit distro's virtual update repositories
for template in repos.distro.source_template.children:
repo_id = "%s_child_%s" % (repos.distro.id, template.name)
description = "%s %s - %s (%s)" % (repos.distro.id,
repos.distro.release,
template.description.decode("UTF-8"),
template.name)
#FIXME: There is no inconsitent state in PackageKit
enabled = repos.get_comp_child_state(template)[0]
if not enums.FILTER_DEVELOPMENT in filters:
self.repo_detail(repo_id,
format_string(description),
enabled)
# Emit distro's cdrom sources
for source in repos.get_cdrom_sources():
if enums.FILTER_NOT_DEVELOPMENT in filters and \
source.type in ("deb-src", "rpm-src"):
continue
enabled = not source.disabled
# Remove markups from the description
description = re.sub(r"</?b>", "", repos.render_source(source))
repo_id = "cdrom_%s_%s" % (source.uri, source.dist)
repo_id.join(["_%s" % c for c in source.comps])
self.repo_detail(repo_id, format_string(description), enabled)
# Emit distro's virtual source code repositoriy
if not enums.FILTER_NOT_DEVELOPMENT in filters:
repo_id = "%s_source" % repos.distro.id
enabled = repos.get_source_code_state() or False
#FIXME: no translation :(
description = "%s %s - Source code" % (repos.distro.id,
repos.distro.release)
self.repo_detail(repo_id, format_string(description), enabled)
# Emit third party repositories
for source in repos.get_isv_sources():
if enums.FILTER_NOT_DEVELOPMENT in filters and \
source.type in ("deb-src", "rpm-src"):
continue
enabled = not source.disabled
# Remove markups from the description
description = re.sub(r"</?b>", "", repos.render_source(source))
repo_id = "isv_%s_%s" % (source.uri, source.dist)
repo_id.join(["_%s" % c for c in source.comps])
self.repo_detail(repo_id,
format_string(description.decode("UTF-8")),
enabled)
@catch_pkerror
def repo_enable(self, repo_id, enable):
"""
Implement the {backend}-repo-enable functionality
FIXME: should we use the abstration of software-properties or provide
low level access using pure aptsources?
"""
pklog.info("Enabling repository: %s %s" % (repo_id, enable))
self.status(enums.STATUS_RUNNING)
self.allow_cancel(False)
self.percentage(0)
if REPOS_SUPPORT == False:
if ("python-software-properties" in self._cache and
not self._cache["python-software-properties"].is_installed):
raise PKError(enums.ERROR_INTERNAL_ERROR,
"Please install the package "
"python-software-properties to handle "
"repositories")
else:
raise PKError(enums.ERROR_INTERNAL_ERROR,
"Please make sure that python-software-properties"
"is correctly installed.")
return
repos = PackageKitSoftwareProperties()
found = False
# Check if the repo_id matches a distro component, e.g. main
if repo_id.startswith("%s_comp_" % repos.distro.id):
for comp in repos.distro.source_template.components:
if repo_id == "%s_comp_%s" % (repos.distro.id, comp.name):
if enable == repos.get_comp_download_state(comp)[0]:
pklog.debug("Repository is already enabled")
pass
if enable == True:
repos.enable_component(comp.name)
else:
repos.disable_component(comp.name)
found = True
break
# Check if the repo_id matches a distro child repository, e.g.
# hardy-updates
elif repo_id.startswith("%s_child_" % repos.distro.id):
for template in repos.distro.source_template.children:
if repo_id == "%s_child_%s" % (repos.distro.id, template.name):
if enable == repos.get_comp_child_state(template)[0]:
pklog.debug("Repository is already enabled")
pass
elif enable == True:
repos.enable_child_source(template)
else:
repos.disable_child_source(template)
found = True
break
# Check if the repo_id matches a cdrom repository
elif repo_id.startswith("cdrom_"):
for source in repos.get_isv_sources():
source_id = "cdrom_%s_%s" % (source.uri, source.dist)
source_id.join(["_%s" % c for c in source.comps])
if repo_id == source_id:
if source.disabled == enable:
source.disabled = not enable
repos.save_sourceslist()
else:
pklog.debug("Repository is already enabled")
found = True
break
# Check if the repo_id matches an isv repository
elif repo_id.startswith("isv_"):
for source in repos.get_isv_sources():
source_id = "isv_%s_%s" % (source.uri, source.dist)
source_id.join(["_%s" % c for c in source.comps])
if repo_id == source_id:
if source.disabled == enable:
source.disabled = not enable
repos.save_sourceslist()
else:
pklog.debug("Repository is already enabled")
found = True
break
if found == False:
raise PKError(enums.ERROR_REPO_NOT_AVAILABLE,
"The repository %s isn't available" % repo_id)
@catch_pkerror
@lock_cache
def update_packages(self, only_trusted, ids):
"""Update packages."""
pklog.info("Updating package with id %s" % ids)
self.status(enums.STATUS_UPDATE)
self.allow_cancel(False)
self.percentage(0)
self._check_init()
pkgs = self._mark_for_upgrade(ids)
self._check_trusted(only_trusted)
self._commit_changes()
self._open_cache(start=90, end=100)
self.percentage(100)
pklog.debug("Checking success of operation")
for pkg_name in pkgs:
if (pkg_name not in self._cache or
not self._cache[pkg_name].is_installed or
self._cache[pkg_name].is_upgradable):
raise PKError(enums.ERROR_PACKAGE_FAILED_TO_INSTALL,
"%s was not updated" % pkg_name)
pklog.debug("Sending success signal")
@catch_pkerror
def simulate_update_packages(self, ids):
"""Emit the changes required for the upgrade of the given packages."""
pklog.info("Simulating update of package with id %s" % ids)
self.status(enums.STATUS_DEP_RESOLVE)
self.allow_cancel(True)
self.percentage(None)
self._check_init(progress=False)
pkgs = self._mark_for_upgrade(ids)
self._emit_changes(pkgs)
def _mark_for_upgrade(self, ids):
"""Resolve the given package ids and mark the packages for upgrade."""
pkgs = []
with self._cache.actiongroup():
resolver = apt.cache.ProblemResolver(self._cache)
for id in ids:
version = self._get_version_by_id(id)
pkg = version.package
if not pkg.is_installed:
raise PKError(enums.ERROR_PACKAGE_NOT_INSTALLED,
"%s isn't installed" % pkg.name)
# Check if the specified version is an update
if apt_pkg.version_compare(pkg.installed.version,
version.version) >= 0:
raise PKError(enums.ERROR_UPDATE_NOT_FOUND,
"The version %s of %s isn't an update to the "
"current %s" % (version.version, pkg.name,
pkg.installed.version))
pkg.candidate = version
pkgs.append(pkg.name[:])
pkg.mark_install(False, True)
resolver.clear(pkg)
resolver.protect(pkg)
try:
resolver.resolve()
except SystemError as error:
broken = [pkg.name for pkg in self._cache if pkg.is_inst_broken]
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"The following packages block the installation: "
"%s" % " ".join(broken))
return pkgs
@catch_pkerror
def download_packages(self, dest, ids):
"""Download packages to the given destination."""
def get_download_details(ids):
"""Calculate the start and end point of a package download
progress.
"""
total = 0
downloaded = 0
versions = []
# Check if all ids are vaild and calculate the total download size
for id in ids:
pkg_ver = self._get_pkg_version_by_id(id)
if not pkg_ver.downloadable:
raise PKError(enums.ERROR_PACKAGE_DOWNLOAD_FAILED,
"package %s isn't downloadable" % id)
total += pkg_ver.size
versions.append((id, pkg_ver))
for id, ver in versions:
start = downloaded * 100 / total
end = start + ver.size * 100 / total
yield id, ver, start, end
downloaded += ver.size
pklog.info("Downloading packages: %s" % ids)
self.status(enums.STATUS_DOWNLOAD)
self.allow_cancel(True)
self.percentage(0)
# Check the destination directory
if not os.path.isdir(dest) or not os.access(dest, os.W_OK):
raise PKError(enums.ERROR_INTERNAL_ERROR,
"The directory '%s' is not writable" % dest)
# Setup the fetcher
self._check_init()
# Start the download
for id, ver, start, end in get_download_details(ids):
progress = PackageKitAcquireProgress(self, start, end)
self._emit_pkg_version(ver, enums.INFO_DOWNLOADING)
try:
ver.fetch_binary(dest, progress)
except Exception as error:
raise PKError(enums.ERROR_PACKAGE_DOWNLOAD_FAILED,
format_string(str(error)))
else:
self.files(id, os.path.join(dest,
os.path.basename(ver.filename)))
self._emit_pkg_version(ver, enums.INFO_FINISHED)
self.percentage(100)
@catch_pkerror
@lock_cache
def install_packages(self, only_trusted, ids):
"""Install the given packages."""
pklog.info("Installing package with id %s" % ids)
self.status(enums.STATUS_INSTALL)
self.allow_cancel(False)
self.percentage(0)
self._check_init()
pkgs = self._mark_for_installation(ids)
self._check_trusted(only_trusted)
self._commit_changes()
self._open_cache(start=90, end=100)
self.percentage(100)
pklog.debug("Checking success of operation")
for p in pkgs:
if p not in self._cache or not self._cache[p].is_installed:
raise PKError(enums.ERROR_PACKAGE_FAILED_TO_INSTALL,
"%s was not installed" % p)
@catch_pkerror
def simulate_install_packages(self, ids):
"""Emit the changes required for the installation of the given
packages.
"""
pklog.info("Simulating installing package with id %s" % ids)
self.status(enums.STATUS_DEP_RESOLVE)
self.allow_cancel(True)
self.percentage(None)
self._check_init(progress=False)
pkgs = self._mark_for_installation(ids)
self._emit_changes(pkgs)
def _mark_for_installation(self, ids):
"""Resolve the given package ids and mark the packages for
installation.
"""
pkgs = []
with self._cache.actiongroup():
resolver = apt.cache.ProblemResolver(self._cache)
for id in ids:
version = self._get_version_by_id(id)
pkg = version.package
pkg.candidate = version
if pkg.installed == version:
raise PKError(enums.ERROR_PACKAGE_ALREADY_INSTALLED,
"Package %s is already installed" % pkg.name)
pkgs.append(pkg.name[:])
pkg.mark_install(False, True, True)
resolver.clear(pkg)
resolver.protect(pkg)
try:
resolver.resolve()
except SystemError as error:
broken = [pkg.name for pkg in self._cache if pkg.is_inst_broken]
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"The following packages block the installation: "
"%s" % " ".join(broken))
return pkgs
@catch_pkerror
@lock_cache
def install_files(self, only_trusted, inst_files):
"""Install local Debian package files."""
pklog.info("Installing package files: %s" % inst_files)
self.status(enums.STATUS_INSTALL)
self.allow_cancel(False)
self.percentage(0)
self._check_init()
packages = []
# Collect all dependencies which need to be installed
self.status(enums.STATUS_DEP_RESOLVE)
for path in inst_files:
deb = apt.debfile.DebPackage(path, self._cache)
packages.append(deb)
if not deb.check():
raise PKError(enums.ERROR_LOCAL_INSTALL_FAILED,
format_string(deb._failure_string))
(install, remove, unauthenticated) = deb.required_changes
pklog.debug("Changes: Install %s, Remove %s, Unauthenticated "
"%s" % (install, remove, unauthenticated))
if len(remove) > 0:
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"Remove the following packages "
"before: %s" % remove)
if (deb.compare_to_version_in_cache() ==
apt.debfile.DebPackage.VERSION_OUTDATED):
self.message(enums.MESSAGE_NEWER_PACKAGE_EXISTS,
"There is a later version of %s "
"available in the repositories." % deb.pkgname)
if self._cache.get_changes():
self._check_trusted(only_trusted)
self._commit_changes(fetch_start=10, fetch_end=25,
install_start=25, install_end=50)
# Install the Debian package files
progress = PackageKitDpkgInstallProgress(self, start=50, end=90)
try:
progress.start_update()
progress.install(inst_files)
progress.finish_update()
except PKError as error:
self._recover()
raise error
except Exception as error:
self._recover()
raise PKError(enums.ERROR_INTERNAL_ERROR, format_string(str(error)))
self.percentage(100)
@catch_pkerror
def simulate_install_files(self, inst_files):
"""Emit the change required for the installation of the given package
files.
"""
pklog.info("Simulating installation of the package files: "
"%s" % inst_files)
self.status(enums.STATUS_DEP_RESOLVE)
self.allow_cancel(True)
self.percentage(None)
self._check_init(progress=False)
pkgs = []
for path in inst_files:
deb = apt.debfile.DebPackage(path, self._cache)
pkgs.append(deb.pkgname)
if not deb.check():
raise PKError(enums.ERROR_LOCAL_INSTALL_FAILED,
format_string(deb._failure_string))
self._emit_changes(pkgs)
@catch_pkerror
@lock_cache
def refresh_cache(self, force):
"""Update the package cache."""
# TODO: use force ?
pklog.info("Refresh cache")
self.status(enums.STATUS_REFRESH_CACHE)
self.last_action_time = time.time()
self.allow_cancel(False);
self.percentage(0)
self._check_init()
progress = PackageKitAcquireRepoProgress(self, start=10, end=95)
try:
ret = self._cache.update(progress)
except Exception as error:
# FIXME: Unluckily python-apt doesn't provide a real good error
# reporting. We only receive a failure string.
# FIXME: Doesn't detect if all downloads failed - bug in python-apt
self.message(enums.MESSAGE_REPO_METADATA_DOWNLOAD_FAILED,
format_string(str(error)))
self._open_cache(start=95, end=100)
self.percentage(100)
@catch_pkerror
def get_packages(self, filters):
"""Get packages."""
pklog.info("Get all packages")
self.status(enums.STATUS_QUERY)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(True)
total = len(self._cache)
for count, pkg in enumerate(self._cache):
self.percentage(count / 100 * total)
if self._is_package_visible(pkg, filters):
self._emit_package(pkg)
@catch_pkerror
def resolve(self, filters, names):
"""<|fim▁hole|> self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(False)
for name in names:
try:
self._emit_visible_package(filters, self._cache[name])
except KeyError:
raise PKError(enums.ERROR_PACKAGE_NOT_FOUND,
"Package name %s could not be resolved" % name)
@catch_pkerror
def get_depends(self, filters, ids, recursive):
"""Emit all dependencies of the given package ids.
Doesn't support recursive dependency resolution.
"""
def emit_blocked_dependency(base_dependency, pkg=None,
filters=[]):
"""Send a blocked package signal for the given
apt.package.BaseDependency.
"""
if enums.FILTER_INSTALLED in filters:
return
if pkg:
summary = pkg.summary
try:
filters.remove(enums.FILTER_NOT_INSTALLED)
except ValueError:
pass
if not self._is_package_visible(pkg, filters):
return
else:
summary = ""
if base_dependency.relation:
version = "%s%s" % (base_dependency.relation,
base_dependency.version)
else:
version = base_dependency.version
self.package("%s;%s;;" % (base_dependency.name, version),
enums.INFO_BLOCKED, summary)
def check_dependency(pkg, base_dep):
"""Check if the given apt.package.Package can satisfy the
BaseDepenendcy and emit the corresponding package signals.
"""
if not self._is_package_visible(pkg, filters):
return
if base_dep.version:
satisfied = False
# Sort the version list to check the installed
# and candidate before the other ones
ver_list = list(pkg.versions)
if pkg.installed:
ver_list.remove(pkg.installed)
ver_list.insert(0, pkg.installed)
if pkg.candidate:
ver_list.remove(pkg.candidate)
ver_list.insert(0, pkg.candidate)
for dep_ver in ver_list:
if apt_pkg.check_dep(dep_ver.version,
base_dep.relation,
base_dep.version):
self._emit_pkg_version(dep_ver)
satisfied = True
break
if not satisfied:
emit_blocked_dependency(base_dep, pkg, filters)
else:
self._emit_package(pkg)
# Setup the transaction
pklog.info("Get depends (%s,%s,%s)" % (filter, ids, recursive))
self.status(enums.STATUS_QUERY)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(True)
dependency_types = ["PreDepends", "Depends"]
if apt_pkg.config["APT::Install-Recommends"]:
dependency_types.append("Recommends")
total = len(ids)
for count, id in enumerate(ids):
self.percentage(count / 100 * total)
version = self._get_version_by_id(id)
for dependency in version.get_dependencies(*dependency_types):
# Walk through all or_dependencies
for base_dep in dependency.or_dependencies:
if self._cache.is_virtual_package(base_dep.name):
# Check each proivider of a virtual package
for provider in \
self._cache.get_providing_packages(base_dep.name):
check_dependency(provider, base_dep)
elif base_dep.name in self._cache:
check_dependency(self._cache[base_dep.name], base_dep)
else:
# The dependency does not exist
emit_blocked_dependency(base_dep, filters=filters)
@catch_pkerror
def get_requires(self, filters, ids, recursive):
"""Emit all packages which depend on the given ids.
Recursive searching is not supported.
"""
pklog.info("Get requires (%s,%s,%s)" % (filter, ids, recursive))
self.status(enums.STATUS_DEP_RESOLVE)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(True)
total = len(ids)
for count, id in enumerate(ids):
self.percentage(count / 100 * total)
version = self._get_version_by_id(id)
for pkg in self._cache:
if not self._is_package_visible(pkg, filters):
continue
if pkg.is_installed:
pkg_ver = pkg.installed
elif pkg.candidate:
pkg_ver = pkg.candidate
for dependency in pkg_ver.dependencies:
satisfied = False
for base_dep in dependency.or_dependencies:
if version.package.name == base_dep.name or \
base_dep.name in version.provides:
satisfied = True
break
if satisfied:
self._emit_package(pkg)
break
@catch_pkerror
def what_provides(self, filters, provides_type, search):
def get_mapping_db(path):
"""
Return the gdbm database at the given path or send an
appropriate error message
"""
if not os.access(path, os.R_OK):
pklog.warning("list of applications that can handle files of the given type %s does not exist")
return None
try:
db = gdbm.open(path)
except:
raise PKError(enums.ERROR_INTERNAL_ERROR,
"The list of applications that can handle "
"files of the given type cannot be opened.\n"
"Try to reinstall the package "
"app-install-data.")
else:
return db
def extract_gstreamer_request(search):
# The search term from PackageKit daemon:
# gstreamer0.10(urisource-foobar)
# gstreamer0.10(decoder-audio/x-wma)(wmaversion=3)
match = re.match("^gstreamer(?P<version>[0-9\.]+)"
"\((?P<kind>.+?)-(?P<data>.+?)\)"
"(\((?P<opt>.*)\))?",
search)
caps = None
if not match:
raise PKError(enums.ERROR_NOT_SUPPORTED,
"The search term is invalid: %s" % search)
if match.group("opt"):
caps_str = "%s, %s" % (match.group("data"), match.group("opt"))
# gst.Caps.__init__ cannot handle unicode instances
caps = gst.Caps(str(caps_str))
record = GSTREAMER_RECORD_MAP[match.group("kind")]
return match.group("version"), record, match.group("data"), caps
self.status(enums.STATUS_QUERY)
self.percentage(None)
self._check_init(progress=False)
self.allow_cancel(False)
supported_type = False
if provides_type in (enums.PROVIDES_CODEC, enums.PROVIDES_ANY):
supported_type = True
# Search for privided gstreamer plugins using the package
# metadata
import gst
GSTREAMER_RECORD_MAP = {"encoder": "Gstreamer-Encoders",
"decoder": "Gstreamer-Decoders",
"urisource": "Gstreamer-Uri-Sources",
"urisink": "Gstreamer-Uri-Sinks",
"element": "Gstreamer-Elements"}
for search_item in search:
try:
gst_version, gst_record, gst_data, gst_caps = \
extract_gstreamer_request(search_item)
except PKError:
if provides_type == enums.PROVIDES_ANY:
break # ignore invalid codec query, probably for other types
else:
raise
for pkg in self._cache:
if pkg.installed:
version = pkg.installed
elif pkg.candidate:
version = pkg.candidate
else:
continue
if not "Gstreamer-Version" in version.record:
continue
if version.record["Gstreamer-Version"] != gst_version:
continue
if gst_caps:
try:
pkg_caps = gst.Caps(version.record[gst_record])
except KeyError:
continue
if gst_caps.intersect(pkg_caps):
self._emit_visible_package(filters, pkg)
else:
try:
elements = version.record[gst_record]
except KeyError:
continue
if gst_data in elements:
self._emit_visible_package(filters, pkg)
if provides_type in (enums.PROVIDES_MIMETYPE, enums.PROVIDES_ANY):
supported_type = True
# Emit packages that contain an application that can handle
# the given mime type
for search_item in search:
handlers = set()
db = get_mapping_db("/var/lib/PackageKit/mime-map.gdbm")
if db == None:
if provides_type != enums.PROVIDES_ANY:
raise PKError(enums.ERROR_INTERNAL_ERROR,
"The list of applications that can handle "
"files of the given type cannot be opened.")
else:
break
if search_item in db:
pklog.debug("Mime type is registered: %s" % db[search_item])
# The mime type handler db stores the packages as a string
# separated by spaces. Each package has its section
# prefixed and separated by a slash
# FIXME: Should make use of the section and emit a
# RepositoryRequired signal if the package does not exist
handlers = [s.split("/")[1] for s in db[search_item].split(" ")]
self._emit_visible_packages_by_name(filters, handlers)
if provides_type in (enums.PROVIDES_MODALIAS, enums.PROVIDES_ANY):
supported_type = True
system_architecture = apt_pkg.get_architectures()[0]
# Emit packages that contain an application that can handle
# the given modalias
valid_modalias_re = re.compile('^[a-z0-9]+:')
for search_item in search:
if not valid_modalias_re.match(search_item):
if provides_type != enums.PROVIDES_ANY:
raise PKError(enums.ERROR_NOT_SUPPORTED,
"The search term is invalid: %s" % search_item)
else:
continue
for package in self._cache:
# skip foreign architectures, we usually only want native
# driver packages
if (not package.candidate or
package.candidate.architecture not in ("all",
system_architecture)):
continue
try:
m = package.candidate.record['Modaliases']
except (KeyError, AttributeError):
continue
try:
pkg_matches = False
for part in m.split(')'):
part = part.strip(', ')
if not part:
continue
module, lst = part.split('(')
for alias in lst.split(','):
alias = alias.strip()
if fnmatch.fnmatch(search_item, alias):
self._emit_visible_package(filters, package)
pkg_matches = True
break
if pkg_matches:
break
except ValueError:
pklog.warning("Package %s has invalid modalias header: %s" % (
package.name, m))
# run plugins
for plugin in self.plugins.get("what_provides", []):
pklog.debug("calling what_provides plugin %s" % str(plugin))
for search_item in search:
try:
for package in plugin(self._cache, provides_type, search_item):
self._emit_visible_package(filters, package)
supported_type = True
except NotImplementedError:
pass # keep supported_type as False
if not supported_type and provides_type != enums.PROVIDES_ANY:
raise PKError(enums.ERROR_NOT_SUPPORTED,
"This function is not implemented in this backend")
@catch_pkerror
def get_files(self, package_ids):
"""Emit the Files signal which includes the files included in a package
Apt only supports this for installed packages
"""
self.status(enums.STATUS_INFO)
total = len(package_ids)
self._check_init(progress=False)
for count, id in enumerate(package_ids):
self.percentage(count / 100 * total)
pkg = self._get_package_by_id(id)
files = ";".join(pkg.installed_files)
self.files(id, files)
# Helpers
def _init_plugins(self):
"""Initialize plugins."""
self.plugins = {} # plugin_name -> [plugin_fn1, ...]
if not pkg_resources:
return
# just look in standard Python paths for now
dists, errors = pkg_resources.working_set.find_plugins(pkg_resources.Environment())
for dist in dists:
pkg_resources.working_set.add(dist)
for plugin_name in ["what_provides"]:
for entry_point in pkg_resources.iter_entry_points(
"packagekit.apt.plugins", plugin_name):
try:
plugin = entry_point.load()
except Exception as e:
pklog.warning("Failed to load %s from plugin %s: %s" % (
plugin_name, str(entry_point.dist), str(e)))
continue
pklog.debug("Loaded %s from plugin %s" % (
plugin_name, str(entry_point.dist)))
self.plugins.setdefault(plugin_name, []).append(plugin)
def _unlock_cache(self):
"""Unlock the system package cache."""
try:
apt_pkg.pkgsystem_unlock()
except SystemError:
return False
return True
def _open_cache(self, start=0, end=100, progress=True, fail_broken=True):
"""(Re)Open the APT cache."""
pklog.debug("Open APT cache")
self.status(enums.STATUS_LOADING_CACHE)
rootdir = os.getenv("ROOT", "/")
if rootdir == "/":
rootdir = None
try:
self._cache = apt.Cache(PackageKitOpProgress(self, start, end,
progress),
rootdir=rootdir)
except Exception as error:
raise PKError(enums.ERROR_NO_CACHE,
"Package cache could not be opened:%s" % error)
if self._cache.broken_count > 0 and fail_broken:
raise PKError(enums.ERROR_DEP_RESOLUTION_FAILED,
"There are broken dependecies on your system. "
"Please use an advanced package manage e.g. "
"Synaptic or aptitude to resolve this situation.")
if rootdir:
apt_pkg.config.clear("DPkg::Post-Invoke")
apt_pkg.config.clear("DPkg::Options")
apt_pkg.config["DPkg::Options::"] = "--root=%s" % rootdir
dpkg_log = "--log=%s/var/log/dpkg.log" % rootdir
apt_pkg.config["DPkg::Options::"] = dpkg_log
self._last_cache_refresh = time.time()
def _recover(self, start=95, end=100):
"""Try to recover from a package manager failure."""
self.status(enums.STATUS_CLEANUP)
self.percentage(None)
try:
d = PackageKitDpkgInstallProgress(self)
d.start_update()
d.recover()
d.finish_update()
except:
pass
self._open_cache(start=95, end=100)
def _check_trusted(self, only_trusted):
"""Check if only trusted packages are allowed and fail if
untrusted packages would be installed in this case.
"""
untrusted = []
if only_trusted:
for pkg in self._cache:
if (pkg.marked_install or pkg.marked_upgrade or
pkg.marked_downgrade or pkg.marked_reinstall):
trusted = False
for origin in pkg.candidate.origins:
trusted |= origin.trusted
if not trusted:
untrusted.append(pkg.name)
if untrusted:
raise PKError(enums.ERROR_MISSING_GPG_SIGNATURE,
" ".join(untrusted))
def _commit_changes(self, fetch_start=10, fetch_end=50,
install_start=50, install_end=90):
"""Commit changes to the system."""
acquire_prog = PackageKitAcquireProgress(self, fetch_start, fetch_end)
inst_prog = PackageKitInstallProgress(self, install_start, install_end)
try:
self._cache.commit(acquire_prog, inst_prog)
except apt.cache.FetchFailedException as err:
if acquire_prog.media_change_required:
raise PKError(enums.ERROR_MEDIA_CHANGE_REQUIRED,
format_string(err.message))
else:
pklog.critical(format_string(err.message))
raise PKError(enums.ERROR_PACKAGE_DOWNLOAD_FAILED,
format_string(err.message))
except apt.cache.FetchCancelledException:
raise PKError(enums.TRANSACTION_CANCELLED)
except PKError as error:
self._recover()
raise error
except SystemError as error:
self._recover()
raise PKError(enums.ERROR_INTERNAL_ERROR,
format_string("%s\n%s" % (str(error),
inst_prog.output)))
def _get_id_from_version(self, version):
"""Return the package id of an apt.package.Version instance."""
if version.origins:
origin = version.origins[0].label
else:
origin = ""
id = "%s;%s;%s;%s" % (version.package.name, version.version,
version.architecture, origin)
return id
def _check_init(self, start=0, end=10, progress=True, fail_broken=True):
"""Check if the backend was initialized well and try to recover from
a broken setup.
"""
pklog.debug("Checking apt cache and xapian database")
pkg_cache = os.path.join(apt_pkg.config["Dir"],
apt_pkg.config["Dir::Cache"],
apt_pkg.config["Dir::Cache::pkgcache"])
src_cache = os.path.join(apt_pkg.config["Dir"],
apt_pkg.config["Dir::Cache"],
apt_pkg.config["Dir::Cache::srcpkgcache"])
# Check if the cache instance is of the coorect class type, contains
# any broken packages and if the dpkg status or apt cache files have
# been changed since the last refresh
if not isinstance(self._cache, apt.cache.Cache) or \
(self._cache.broken_count > 0) or \
(os.stat(apt_pkg.config["Dir::State::status"])[stat.ST_MTIME] > \
self._last_cache_refresh) or \
(os.stat(pkg_cache)[stat.ST_MTIME] > self._last_cache_refresh) or \
(os.stat(src_cache)[stat.ST_MTIME] > self._last_cache_refresh):
pklog.debug("Reloading the cache is required")
self._open_cache(start, end, progress, fail_broken)
else:
pass
# Read the pin file of Synaptic if available
self._cache._depcache.read_pinfile()
if os.path.exists(SYNAPTIC_PIN_FILE):
self._cache._depcache.read_pinfile(SYNAPTIC_PIN_FILE)
# Reset the depcache
self._cache.clear()
def _emit_package(self, pkg, info=None, force_candidate=False):
"""Send the Package signal for a given APT package."""
if (not pkg.is_installed or force_candidate) and pkg.candidate:
self._emit_pkg_version(pkg.candidate, info)
elif pkg.is_installed:
self._emit_pkg_version(pkg.installed, info)
else:
pklog.debug("Package %s hasn't got any version." % pkg.name)
def _emit_pkg_version(self, version, info=None):
"""Emit the Package signal of the given apt.package.Version."""
id = self._get_id_from_version(version)
section = version.section.split("/")[-1]
if not info:
if version == version.package.installed:
if section == "metapackages":
info = enums.INFO_COLLECTION_INSTALLED
else:
info = enums.INFO_INSTALLED
else:
if section == "metapackages":
info = enums.INFO_COLLECTION_AVAILABLE
else:
info = enums.INFO_AVAILABLE
self.package(id, info, version.summary)
def _emit_all_visible_pkg_versions(self, filters, pkg):
"""Emit all available versions of a package."""
if self._is_package_visible(pkg, filters):
if enums.FILTER_NEWEST in filters:
if pkg.candidate:
self._emit_pkg_version(pkg.candidate)
elif pkg.installed:
self._emit_pkg_version(pkg.installed)
else:
for version in pkg.versions:
self._emit_pkg_version(version)
def _emit_visible_package(self, filters, pkg, info=None):
"""Filter and emit a package."""
if self._is_package_visible(pkg, filters):
self._emit_package(pkg, info)
def _emit_visible_packages(self, filters, pkgs, info=None):
"""Filter and emit packages."""
for p in pkgs:
if self._is_package_visible(p, filters):
self._emit_package(p, info)
def _emit_visible_packages_by_name(self, filters, pkgs, info=None):
"""Find the packages with the given namens. Afterwards filter and emit
them.
"""
for name in pkgs:
pkg = self._cache[name]
if self._is_package_visible(pkg, filters):
self._emit_package(pkg, info)
def _emit_changes(self, ignore_pkgs=[]):
"""Emit all changed packages."""
for pkg in self._cache:
if pkg.name in ignore_pkgs:
continue
if pkg.marked_delete:
self._emit_package(pkg, enums.INFO_REMOVING, False)
elif pkg.marked_install:
self._emit_package(pkg, enums.INFO_INSTALLING, True)
elif pkg.marked_upgrade:
self._emit_package(pkg, enums.INFO_UPDATING, True)
elif pkg.marked_downgrade:
self._emit_package(pkg, enums.INFO_DOWNGRADING, True)
elif pkg.marked_reinstall:
self._emit_package(pkg, enums.INFO_REINSTALLING, True)
def _is_package_visible(self, pkg, filters):
"""Return True if the package should be shown in the user
interface.
"""
if filters == [enums.FILTER_NONE]:
return True
for filter in filters:
if ((filter == enums.FILTER_INSTALLED and not pkg.is_installed) or
(filter == enums.FILTER_NOT_INSTALLED and pkg.is_installed) or
(filter == enums.FILTER_SUPPORTED and not
self._is_package_supported(pkg)) or
(filter == enums.FILTER_NOT_SUPPORTED and
self._is_package_supported(pkg)) or
(filter == enums.FILTER_FREE and not
not self._is_package_free(pkg)) or
(filter == enums.FILTER_NOT_FREE and
not self._is_package_not_free(pkg)) or
(filter == enums.FILTER_GUI and
not self._has_package_gui(pkg)) or
(filter == enums.FILTER_NOT_GUI and
self._has_package_gui(pkg)) or
(filter == enums.FILTER_COLLECTIONS and not
self._is_package_collection(pkg)) or
(filter == enums.FILTER_NOT_COLLECTIONS and
self._is_package_collection(pkg)) or
(filter == enums.FILTER_DEVELOPMENT and not
self._is_package_devel(pkg)) or
(filter == enums.FILTER_NOT_DEVELOPMENT and
self._is_package_devel(pkg))):
return False
return True
def _is_package_not_free(self, pkg):
"""Return True if we can be sure that the package's license isn't any
free one
"""
#FIXME: Should check every origin
origins = pkg.candidate.origins
return (origins != None and \
((origins[0].origin == "Ubuntu" and
origins[0].component in ["multiverse", "restricted"]) or
(origins[0].origin == "Debian" and
origins[0].component in ["contrib", "non-free"])) and
origins[0].trusted == True)
def _is_package_collection(self, pkg):
"""Return True if the package is a metapackge."""
section = pkg.section.split("/")[-1]
return section == "metapackages"
def _is_package_free(self, pkg):
"""Return True if we can be sure that the package has got a free
license.
"""
#FIXME: Should check every origin
origins = pkg.candidate.origins
return (origins[0] != None and
((origins[0].origin == "Ubuntu" and
origins[0].component in ["main", "universe"]) or
(origins[0].origin == "Debian" and
origins[0].component == "main")) and
origins[0].trusted == True)
def _has_package_gui(self, pkg):
#FIXME: should go to a modified Package class
#FIXME: take application data into account. perhaps checking for
# property in the xapian database
return pkg.section.split('/')[-1].lower() in ['x11', 'gnome', 'kde']
def _is_package_devel(self, pkg):
#FIXME: should go to a modified Package class
return pkg.name.endswith("-dev") or pkg.name.endswith("-dbg") or \
pkg.section.split('/')[-1].lower() in ['devel', 'libdevel']
def _is_package_supported(self, pkg):
origins = pkg.candidate.origins
#FIXME: iterate on all origins
return (origins != None and
origins[0].origin == "Ubuntu" and
origins[0].component in ["main", "restricted"] and
origins[0].trusted == True)
def _get_pkg_version_by_id(self, id):
"""Return a package version matching the given package id or None."""
name, version, arch, data = id.split(";", 4)
try:
for pkg_ver in self._cache[name].versions:
if pkg_ver.version == version and \
pkg_ver.architecture == arch:
return pkg_ver
except KeyError:
pass
return None
def _get_package_by_id(self, id):
"""Return the apt.package.Package corresponding to the given
package id.
If the package isn't available error out.
"""
version = self._get_version_by_id(id)
return version.package
def _get_version_by_id(self, id):
"""Return the apt.package.Version corresponding to the given
package id.
If the version isn't available error out.
"""
name, version_string, arch, data = id.split(";", 4)
try:
pkg = self._cache[name]
except:
raise PKError(enums.ERROR_PACKAGE_NOT_FOUND,
"There isn't any package named %s" % name)
try:
version = pkg.versions[version_string]
except:
raise PKError(enums.ERROR_PACKAGE_NOT_FOUND,
"There isn't any verion %s of %s" % (version_string,
name))
if version.architecture != arch:
raise PKError(enums.ERROR_PACKAGE_NOT_FOUND,
"Version %s of %s isn't available for architecture "
"%s" % (pkg.name, version.version, arch))
return version
def _get_package_group(self, pkg):
"""
Return the packagekit group corresponding to the package's section
"""
section = pkg.section.split("/")[-1]
if section in SECTION_GROUP_MAP:
return SECTION_GROUP_MAP[section]
else:
pklog.debug("Unkown package section %s of %s" % (pkg.section,
pkg.name))
return enums.GROUP_UNKNOWN
def _sigquit(self, signum, frame):
self._unlock_cache()
sys.exit(1)
def main():
backend = PackageKitAptBackend()
backend.dispatcher(sys.argv[1:])
if __name__ == '__main__':
main()
# vim: ts=4 et sts=4<|fim▁end|> | Implement the apt2-resolve functionality
"""
pklog.info("Resolve")
self.status(enums.STATUS_QUERY) |
<|file_name|>marquise_throughput.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''use marquise_telemetry to build throughput info as visible from the client
e.g.:
$ marquse_telemetry broker | marquise_throughput.py
'''
import sys
from time import *
import os
import fcntl
class TimeAware(object):
'''simple timing aware mixin
The default implementation of on_tick_change() is to call every function
passed to the constructor in tick_handlers
'''
def __init__(self, ticklen=1, tick_handlers=[]):
self.last_tick = self.start_time = time()
self.ticklen = ticklen
self.tick_handlers = tick_handlers
self.n_ticks = 0
self.totalticktime = 0
def check_for_tick_changed(self):
'''run on_tick_change once for every ticklen seconds that has passed since last_tick
'''
tnow = time()
while tnow - self.last_tick >= self.ticklen:
self.n_ticks += 1
self.totalticktime += self.ticklen
self.last_tick += self.ticklen
self.on_tick_change()
def on_tick_change(self):
'''handler for a tick change
the timestamp marking the 'tick' being handled is in self.last_tick
The current time may however be significantly after self.last_tick if
check_for_tick_changed is not called more often than self.ticklen
'''
for f in self.tick_handlers: f()
def run_forever(self,sleep_time=None):
'''run in a loop regularly calling on_tick_change
'''
if sleep_time == None: sleep_time = self.ticklen/10.0
while True:
self.check_for_tick_changed()
sleep(sleep_time)
class TimeHistogram(TimeAware):
'''implements a rolling histogram'''
def __init__(self, nbins, seconds_per_bin=1):
TimeAware.__init__(self, seconds_per_bin)
self.nbins = nbins
self._bins = [0 for n in range(nbins)]
self.current_bin = 0
def on_tick_change(self):
self.current_bin = (self.current_bin + 1) % self.nbins
self._bins[self.current_bin] = 0
def add(self, n=1):
'''add 'n' to the current histogram bin
'''
self.check_for_tick_changed()
self._bins[self.current_bin] += n
def sum(self, k=60):
'''return the total entries per second over the last k seconds
'''
bins_to_check = k/self.ticklen
return sum(self.bins[-bins_to_check:])
def mean(self, k=60):
'''return the mean entries per second over the last k seconds
'''
if self.totalticktime < k:
k = self.totalticktime # Only average over the time we've been running
bins_to_check = k/self.ticklen
return self.sum(k) / float(bins_to_check) if bins_to_check else 0
@property
def bins(self):
'''get bins in time order, oldest to newest'''
self.check_for_tick_changed()
return self._bins[self.current_bin+1:]+self._bins[:self.current_bin+1]
class ThroughputCounter(object):
def __init__(self, input_stream=sys.stdin):
self.input_stream=input_stream
self.point_hist = TimeHistogram(600)
self.burst_hist = TimeHistogram(600)
self.acked_burst_hist = TimeHistogram(600)
self.latency_hist = TimeHistogram(600)
self.ack_hist = TimeHistogram(600)
self.defer_write_points_hist = TimeHistogram(600)
self.defer_read_points_hist = TimeHistogram(600)
self.timed_out_points_hist = TimeHistogram(600)
self.outstanding_points = 0
self.outstanding_bursts = {} # burstid -> start timestamp,points
self._reader_state = {}
self.using_marquised = set() # Hosts that relay through marquised
def get_outstanding(self,last_n_seconds=[600,60,1]):
total_burst_counts = map(self.point_hist.sum, last_n_seconds)
total_ack_counts = map(self.ack_hist.sum, last_n_seconds)
return [nbursts-nacks for nbursts,nacks in zip(total_burst_counts,total_ack_counts)]
def get_total_outstanding_points(self):
return sum(points for timestamp,points in self.outstanding_bursts.itervalues())
def get_points_per_seconds(self,over_seconds=[600,60,1]):
return map(self.point_hist.mean, over_seconds)
def get_total_bursts(self,over_seconds=[600,60,1]):
return map(self.burst_hist.mean, over_seconds)
def get_acks_per_second(self,over_seconds=[600,60,1]):
return map(self.ack_hist.mean, over_seconds)
def get_deferred_points_written_per_second(self,over_seconds=[600,60,1]):
return map(self.defer_write_points_hist.mean, over_seconds)
def get_timed_out_points_per_second(self,over_seconds=[600,60,1]):
return map(self.timed_out_points_hist.mean, over_seconds)
def get_deferred_points_read_per_second(self,over_seconds=[600,60,1]):
return map(self.defer_read_points_hist.mean, over_seconds)
def get_average_latencies(self,over_seconds=[600,60,1]):
burst_counts = map(self.acked_burst_hist.sum, over_seconds)
latency_sums = map(self.latency_hist.sum, over_seconds)
return [latencysum/float(nbursts) if nbursts > 0 else 0 for latencysum,nbursts in zip(latency_sums,burst_counts)]
def process_burst(self, data):
if not all(k in data for k in ('identity','message id','points')):
print >> sys.stderr, 'malformed databurst info. ignoring'
return
msgtag = data['identity']+data['message id']
points = int(data['points'])
timestamp = time()
self.outstanding_bursts[msgtag] = timestamp,points
self.outstanding_points += points
self.burst_hist.add(1)
self.point_hist.add(points)
def _msg_tag_from_data(self, data):
return (data['identity'].replace('marquised:',''))+data['message id']
def process_deferred_write(self, data):
msgtag = self._msg_tag_from_data(data)
burst_timestamp,points = self.outstanding_bursts.get(msgtag,(None,None))
if burst_timestamp is not None:
self.defer_write_points_hist.add(points)
def process_deferred_read(self, data):
msgtag = self._msg_tag_from_data(data)
burst_timestamp,points = self.outstanding_bursts.get(msgtag,(None,None))
if burst_timestamp is not None:
self.defer_read_points_hist.add(points)
def process_send_timeout(self, data):
msgtag = self._msg_tag_from_data(data)
burst_timestamp,points = self.outstanding_bursts.get(msgtag,(None,None))
if burst_timestamp is not None:
self.timed_out_points_hist.add(points)
def process_ack(self, data):
if not all(k in data for k in ('identity','message id')):
print >> sys.stderr, 'malformed ack info. ignoring'
return
if data['identity'][:10] == 'marquised:':
# ACK is coming back to marquised from the broker
host = data['identity'][10:]
self.using_marquised.add(host)
else:
host = data['identity']
if host in self.using_marquised:
# If a client is using marquised, that client will
# recieve an ack back from marquised immediately.
#
# We ignore this ack here, and wait for the one
# received by marquised
return
msgtag = host+data['message id']
burst_timestamp,points = self.outstanding_bursts.pop(msgtag,(None,None))
if burst_timestamp == None:
# Got an ACK we didn't see the burst for. Ignoring it.
return
latency = time() - burst_timestamp
self.ack_hist.add(points)
self.acked_burst_hist.add(1)
self.latency_hist.add(latency)
self.outstanding_points -= points
def process_line(self, line):
'''process a line of marquise telemetry
At the moment, only look at bursts being created by the collate_thread
and acked by the marquise poller_thread
sample:
fishhook.engineroom.anchor.net.au 1395212041732118000 8c087c0b collator_thread created_databurst frames = 1618 compressed_bytes = 16921
....
marquised:astrolabe.syd1.anchor.net.au 1395375377705126042 c87ba112 poller_thread rx_msg_from collate_thread
....
fishhook.engineroom.anchor.net.au 1395212082492520000 8c087c0b poller_thread rx_ack_from broker msg_id = 5553
CAVEAT: In the above, the marquised 'collate_thread' is actually the
collate thread in a different process, received by marquised. We can
use the knowledge that this is happening to note that astrolabe is
passing stuff through marquised, and to ignore the ACK that marquised
sends back to the original client on astrolabe when tracking end-to-end
latency
'''
fields = line.strip().split()
if len(fields) < 4: return
# Keep track of hosts using marquised. This is a bit bruteforce, but we need to catch this
# sort of thing early to not accidentally double-track ACKs
#
if fields[0][:10] == 'marquised:':
self.using_marquised.add(fields[0][10:])
key = ' '.join(fields[3:6])
if key == 'collator_thread created_databurst frames':
identity,message_id,points = fields[0],fields[2],int(fields[7])
self.process_burst({ 'identity': identity, 'message id': message_id, 'points': points })
# Anything past here is only in the poller thread. Skips a lot of stuff
if fields[3] != 'poller_thread': return
if key == 'poller_thread rx_ack_from broker':
identity,message_id = fields[0],fields[2]
self.process_ack({ 'identity': identity, 'message id': message_id })
elif fields[4] == 'defer_to_disk':
identity,message_id = fields[0],fields[2]<|fim▁hole|> data = { 'identity': identity, 'message id': message_id }
self.process_deferred_write({ 'identity': identity, 'message id': message_id })
if fields[5] == 'timeout_waiting_for_ack':
self.process_send_timeout({ 'identity': identity, 'message id': message_id })
elif fields[4] == 'read_from_disk':
identity,message_id = fields[0],fields[2]
self.process_deferred_read({ 'identity': identity, 'message id': message_id })
def process_lines_from_stream(self):
'''process any lines from our streams that are available to read'''
while True:
try:
l = self.input_stream.readline()
self.process_line(l)
except IOError:
# Nothing left to read at the moment
return
class ThroughputPrinter(object):
def __init__(self, counter, outstream=sys.stdout, avgtimes=(600,60,1)):
self.counter = counter
self.outstream = outstream
self.avgtimes = avgtimes
self.lines_printed = 0
def print_header(self):
colbreak = " " * 3
header = '#'
header += "mean points per second".center(29) + colbreak
header += "mean acks per second".center(30) + colbreak
header += "mean latency per point".center(30) + colbreak
header += "deferred points written/s".center(30) + colbreak
header += "deferred points read/s".center(30) + colbreak
header += "points timed out sending/s".center(30) + colbreak
header += "unacked".rjust(10) + '\n'
header += "#"
header += "".join(("(%dsec)" % secs).rjust(10) for secs in self.avgtimes)[1:]
header += colbreak
header += "".join(("(%dsec)" % secs).rjust(10) for secs in self.avgtimes)
header += colbreak
header += "".join(("(%dsec)" % secs).rjust(10) for secs in self.avgtimes)
header += colbreak
header += "".join(("(%dsec)" % secs).rjust(10) for secs in self.avgtimes)
header += colbreak
header += "".join(("(%dsec)" % secs).rjust(10) for secs in self.avgtimes)
header += colbreak
header += "".join(("(%dsec)" % secs).rjust(10) for secs in self.avgtimes)
header += colbreak + "points".rjust(10) + '\n'
header += '# ' + '-'*28 + colbreak + '-'*30 + colbreak + '-'*30
header += colbreak + '-'*30 + colbreak + '-'*30 + colbreak + '-'*30
header += colbreak + '-'*10 + '\n'
self.outstream.write(header)
self.outstream.flush()
def print_throughput(self):
bursts_per_second = self.counter.get_points_per_seconds(self.avgtimes)
acks_per_second = self.counter.get_acks_per_second(self.avgtimes)
mean_latencies = self.counter.get_average_latencies(self.avgtimes)
outstanding_points = self.counter.get_total_outstanding_points()
points_deferred_to_disk = self.counter.get_deferred_points_written_per_second(self.avgtimes)
points_read_from_disk = self.counter.get_deferred_points_read_per_second(self.avgtimes)
points_timed_out_sending = self.counter.get_timed_out_points_per_second(self.avgtimes)
# RENDER ALL THE THINGS!
out = ""
colbreak = " " * 3
out += "".join((" %9.2f" % b for b in bursts_per_second))
out += colbreak
out += "".join((" %9.2f" % b for b in acks_per_second))
out += colbreak
out += "".join((" %9.2f" % b for b in mean_latencies))
out += colbreak
out += "".join((" %9.2f" % b for b in points_deferred_to_disk))
out += colbreak
out += "".join((" %9.2f" % b for b in points_read_from_disk))
out += colbreak
out += "".join((" %9.2f" % b for b in points_timed_out_sending))
out += colbreak
out += "%10d" % outstanding_points + '\n'
if self.lines_printed % 20 == 0:
self.print_header()
self.outstream.write(out)
self.outstream.flush()
self.lines_printed += 1
if __name__ == '__main__':
# Make stdin non-blocking
fd = sys.stdin.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
reader = ThroughputCounter(sys.stdin)
writer = ThroughputPrinter(reader, sys.stdout)
# Run an event loop to process outstanding input every second
# and then output the processed data
event_loop = TimeAware(1, [ reader.process_lines_from_stream,
writer.print_throughput ])
event_loop.run_forever()
# vim: set tabstop=4 expandtab:<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Tests for sample currency service with inflation. Similar to the `test_currency`
//! integration test, with the difference that the balance of each created wallet increases by 1
//! on each block. Correspondingly, the initial wallet balance is set to 0.
use exonum::{
crypto::{KeyPair, PublicKey},
helpers::Height,
};
use exonum_testkit::{ApiKind, Spec, TestKit, TestKitApi, TestKitBuilder};
use futures::{
stream::{self, StreamExt},
FutureExt,
};
use pretty_assertions::assert_eq;
use rand::Rng;
use crate::inflating_cryptocurrency::{
CreateWallet, CurrencyInterface, CurrencyService, Transfer, SERVICE_ID,
};
mod inflating_cryptocurrency;
fn init_testkit() -> TestKit {
TestKitBuilder::validator()
.with_validators(4)
.with(Spec::new(CurrencyService).with_default_instance())
.build()
}
fn create_wallet(testkit: &mut TestKit, name: &str) -> KeyPair {
let keypair = KeyPair::random();
// Create a pre-signed transaction
let tx = keypair.create_wallet(SERVICE_ID, CreateWallet::new(name));
let block = testkit.create_block_with_transaction(tx);
block[0].status().unwrap();
keypair
}
async fn get_balance(api: &TestKitApi, pubkey: &PublicKey) -> u64 {
api.public(ApiKind::Service("cryptocurrency"))
.get(&format!("v1/balance?pub_key={}", pubkey.to_hex()))
.await
.unwrap()
}
#[tokio::test]
async fn test_inflation() {
let mut testkit = init_testkit();
let alice = create_wallet(&mut testkit, "Alice");
let api = testkit.api();
assert_eq!(get_balance(&api, &alice.public_key()).await, 1);
testkit.create_blocks_until(Height(10));
assert_eq!(get_balance(&api, &alice.public_key()).await, 10);
}
#[tokio::test]
async fn test_transfer_scenarios() {
let mut testkit = init_testkit();
let api = testkit.api();
// Create 2 wallets
let alice = KeyPair::random();
let tx_alice = alice.create_wallet(SERVICE_ID, CreateWallet::new("alice"));
let bob = KeyPair::random();
let tx_bob = bob.create_wallet(SERVICE_ID, CreateWallet::new("Bob"));
testkit.create_block_with_transactions(vec![tx_alice, tx_bob]);
testkit.create_blocks_until(Height(9));
// Check that the initial Alice's and Bob's balances are persisted by the service
assert_eq!(get_balance(&api, &alice.public_key()).await, 9);
assert_eq!(get_balance(&api, &bob.public_key()).await, 9);
// Transfer funds
let tx_a_to_b = alice.transfer(
SERVICE_ID,
Transfer {
to: bob.public_key(),
amount: 5,
seed: 0,
},
);
let next_tx_a_to_b = alice.transfer(
SERVICE_ID,
Transfer {
to: bob.public_key(),
amount: 6,
seed: 1,
},
);
// Put transactions from A to B in separate blocks, allowing them both to succeed.
testkit.checkpoint();
testkit.create_block_with_transactions(vec![tx_a_to_b.clone()]); // A: 4 + 1, B: 14 + 1
testkit.create_block_with_transactions(vec![]); // A: 4 + 2, B: 14 + 2
testkit.create_block_with_transactions(vec![next_tx_a_to_b.clone()]); // A: 0 + 1, B: 20 + 3
assert_eq!(get_balance(&api, &alice.public_key()).await, 1); // 0 + 1
assert_eq!(get_balance(&api, &bob.public_key()).await, 23); // 20 + 3
testkit.rollback();
// If there is no block separating transactions, Alice's balance is insufficient
// to complete the second transaction.
testkit.checkpoint();
testkit.create_block_with_transactions(vec![tx_a_to_b.clone()]); // A: 4 + 1, B: 14 + 1
testkit.create_block_with_transactions(vec![next_tx_a_to_b.clone()]); // fails
assert_eq!(get_balance(&api, &alice.public_key()).await, 6); // 4 + 2<|fim▁hole|> assert_eq!(get_balance(&api, &bob.public_key()).await, 16); // 14 + 2
testkit.rollback();
testkit.checkpoint();
testkit.create_block_with_transactions(vec![next_tx_a_to_b.clone()]); // A: 3 + 1, B: 15 + 1
testkit.create_block_with_transactions(vec![tx_a_to_b.clone()]); // fails
assert_eq!(get_balance(&api, &alice.public_key()).await, 5); // 3 + 2
assert_eq!(get_balance(&api, &bob.public_key()).await, 17); // 15 + 2
testkit.rollback();
// If the transactions are put in the same block, only the first transaction should succeed
testkit.checkpoint();
testkit.create_block_with_transactions(vec![tx_a_to_b.clone(), next_tx_a_to_b.clone()]);
assert_eq!(get_balance(&api, &alice.public_key()).await, 5); // 4 + 1
assert_eq!(get_balance(&api, &bob.public_key()).await, 15); // 14 + 1
testkit.rollback();
// Same here
testkit.checkpoint();
testkit.create_block_with_transactions(vec![next_tx_a_to_b, tx_a_to_b]);
assert_eq!(get_balance(&api, &alice.public_key()).await, 4); // 3 + 1
assert_eq!(get_balance(&api, &bob.public_key()).await, 16); // 15 + 1
testkit.rollback();
}
/// Test randomly generated transfers among users without blockchain rollbacks.
#[tokio::test]
async fn test_fuzz_transfers() {
const USERS: usize = 10;
let mut rng = rand::thread_rng();
let mut testkit = init_testkit();
let api = testkit.api();
// First, create users
let keys_and_txs: Vec<_> = (0..USERS)
.map(|i| {
let keypair = KeyPair::random();
let tx = keypair.create_wallet(SERVICE_ID, CreateWallet::new(format!("User #{}", i)));
(keypair, tx)
})
.collect();
let pubkeys: Vec<_> = keys_and_txs.iter().map(|(_, tx)| tx.author()).collect();
testkit.create_block_with_transactions(keys_and_txs.iter().map(|(_, tx)| tx.clone()));
for _ in 0..64 {
let total_balance: u64 = stream::iter(&pubkeys)
.fold(0, |acc, key| get_balance(&api, key).map(move |x| x + acc))
.await;
assert_eq!(total_balance, (USERS as u64) * testkit.height().0);
let tx_count = rng.gen::<u32>() & 15;
let height = testkit.height().0;
let txs = (0..tx_count).map(|_| {
let sender_idx = rng.gen_range(0..USERS);
let (sender, _) = &keys_and_txs[sender_idx];
let receiver = &pubkeys[rng.gen_range(0..USERS)];
let amount = rng.gen_range(1..2 * height);
sender.transfer(
SERVICE_ID,
Transfer {
to: *receiver,
amount,
seed: rng.gen::<u64>(),
},
)
});
testkit.create_block_with_transactions(txs);
}
}<|fim▁end|> | |
<|file_name|>cssparse.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|>
/// Some little helpers for hooking up the HTML parser with the CSS parser.
use std::cell::Cell;
use std::comm;
use std::comm::Port;
use std::task;
use style::Stylesheet;
use servo_net::resource_task::{Load, ProgressMsg, Payload, Done, ResourceTask};
use extra::url::Url;
/// Where a style sheet comes from.
pub enum StylesheetProvenance {
UrlProvenance(Url),
InlineProvenance(Url, ~str),
}
pub fn spawn_css_parser(provenance: StylesheetProvenance,
resource_task: ResourceTask)
-> Port<Stylesheet> {
let (result_port, result_chan) = comm::stream();
let provenance_cell = Cell::new(provenance);
do task::spawn {
// TODO: CSS parsing should take a base URL.
let _url = do provenance_cell.with_ref |p| {
match *p {
UrlProvenance(ref the_url) => (*the_url).clone(),
InlineProvenance(ref the_url, _) => (*the_url).clone()
}
};
let sheet = match provenance_cell.take() {
UrlProvenance(url) => {
debug!("cssparse: loading style sheet at {:s}", url.to_str());
let (input_port, input_chan) = comm::stream();
resource_task.send(Load(url, input_chan));
Stylesheet::from_iter(ProgressMsgPortIterator {
progress_port: input_port.recv().progress_port
})
}
InlineProvenance(_, data) => {
Stylesheet::from_str(data)
}
};
result_chan.send(sheet);
}
return result_port;
}
struct ProgressMsgPortIterator {
progress_port: Port<ProgressMsg>
}
impl Iterator<~[u8]> for ProgressMsgPortIterator {
fn next(&mut self) -> Option<~[u8]> {
match self.progress_port.recv() {
Payload(data) => Some(data),
Done(*) => None
}
}
}<|fim▁end|> | * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
<|file_name|>zbytes.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub struct Bytes {
/// The underlying data
pub bytes: Vec<u8>
}
impl Bytes {
/// Returns the length of the byte array.
pub fn len(&self) -> usize {
self.bytes.len()
}
/// Writes the byte (u8) to the index specified.
///
/// If the vector isn't large enough it fills everything up to the index with zeros.
pub fn write_byte(&mut self, byte: u8, index: usize) {
while self.len() <= index {
self.bytes.push(0);
}
self.bytes[index] = byte;
}
/// Appends a byte to the end of the data.
pub fn append_byte(&mut self, byte: u8) {
let index: usize = self.bytes.len();
self.write_byte(byte, index);
}
/// Writes a u16 in two bytes with the correct byte-order for the Z-Machine at the specified
/// index.
pub fn write_u16(&mut self, value: u16, index: usize) {
self.write_byte((value >> 8) as u8, index);
self.write_byte((value & 0xff) as u8, index + 1);
}
/// Appends a u16 to the end of the data.
pub fn append_u16(&mut self, value: u16) {
let index: usize = self.bytes.len();
self.write_u16(value, index);
}
/// Writes multiple bytes at the specified index.
pub fn write_bytes(&mut self, bytes: &[u8], to_index: usize) {
for i in 0..bytes.len() {
self.write_byte(bytes[i], to_index+i);
}
}
/// Appends an array of bytes at the end of the data.
pub fn append_bytes(&mut self, bytes: &[u8]) {
let index: usize = self.bytes.len();
self.write_bytes(bytes, index);
}
/// Fills everything with zeros until but not including the index.
///
/// `=> [index-1] == 0; [index] == nil;`
pub fn write_zero_until(&mut self, index: usize) {
while self.len() < index {
self.bytes.push(0);
}
}
/// Prints the underlying byte array
pub fn print(&self) {
debug!("bytes: {:?}", self.bytes);
}
}<|fim▁end|> | //! The `zbyte` module contains code
//! to deal with opcodes and zcode.
/// A struct that holds an array of bytes and provides some convenience functions. |
<|file_name|>mainEventbus.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
const eventbus = new TyphonLoggedEvents();
eventbus.setEventbusName('mainEventbus');
/**
* Exports an instance of `TyphonLoggedEvents` which adds asynchronous capabilities to `Backbone.Events` which is used
* as a main eventbus. Note that an instance of `TyphonLoggedEvents` is exported and is also associated to a mapped
* path, `mainEventbus` in the SystemJS extra configuration data loaded by all TyphonJS repos in
* `./config/config-app-paths.js`. By using a mapped path any other module may import the main eventbus via:
* `import eventbus from 'mainEventbus';`
*
* Note the above creation of `const eventbus` is a workaround for an ESDoc bug:
* https://github.com/esdoc/esdoc/issues/166
*
* Normally we can write `export default new TyphonLoggedEvents();`, but this currently breaks ESDoc.
*/
export default eventbus;<|fim▁end|> | 'use strict';
import TyphonLoggedEvents from './TyphonLoggedEvents.js'; |
<|file_name|>compat.hpp<|end_file_name|><|fim▁begin|>#ifndef UTILITY_COMPAT_HPP
#define UTILITY_COMPAT_HPP
<|fim▁hole|>namespace utility {
struct tm strptime( const char* timestamp );
}
#endif<|fim▁end|> | #include <boost/date_time/posix_time/posix_time.hpp>
|
<|file_name|>generate.go<|end_file_name|><|fim▁begin|>// Package generate implements functions generating container config files.
package generate
import (
"encoding/json"
"fmt"
"io"
"os"
"strings"
rspec "github.com/opencontainers/runtime-spec/specs-go"
"github.com/opencontainers/runtime-tools/generate/seccomp"
"github.com/opencontainers/runtime-tools/validate"
"github.com/syndtr/gocapability/capability"
)
var (
// Namespaces include the names of supported namespaces.
Namespaces = []string{"network", "pid", "mount", "ipc", "uts", "user", "cgroup"}
// we don't care about order...and this is way faster...
removeFunc = func(s []string, i int) []string {
s[i] = s[len(s)-1]
return s[:len(s)-1]
}
)
// Generator represents a generator for a container config.
type Generator struct {
Config *rspec.Spec
HostSpecific bool
// This is used to keep a cache of the ENVs added to improve
// performance when adding a huge number of ENV variables
envMap map[string]int
}
// ExportOptions have toggles for exporting only certain parts of the specification
type ExportOptions struct {
Seccomp bool // seccomp toggles if only seccomp should be exported
}
// New creates a configuration Generator with the default
// configuration for the target operating system.
func New(os string) (generator Generator, err error) {
if os != "linux" && os != "solaris" && os != "windows" {
return generator, fmt.Errorf("no defaults configured for %s", os)
}
config := rspec.Spec{
Version: rspec.Version,
Hostname: "mrsdalloway",
}
if os == "windows" {
config.Process = &rspec.Process{
Args: []string{
"cmd",
},
Cwd: `C:\`,
}
config.Windows = &rspec.Windows{}
} else {
config.Root = &rspec.Root{
Path: "rootfs",
Readonly: false,
}
config.Process = &rspec.Process{
Terminal: false,
Args: []string{
"sh",
},
}
}
if os == "linux" || os == "solaris" {
config.Process.User = rspec.User{}
config.Process.Env = []string{
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"TERM=xterm",
}
config.Process.Cwd = "/"
config.Process.Rlimits = []rspec.POSIXRlimit{
{
Type: "RLIMIT_NOFILE",
Hard: uint64(1024),
Soft: uint64(1024),
},
}
}
if os == "linux" {
config.Process.Capabilities = &rspec.LinuxCapabilities{
Bounding: []string{
"CAP_CHOWN",
"CAP_DAC_OVERRIDE",
"CAP_FSETID",
"CAP_FOWNER",
"CAP_MKNOD",
"CAP_NET_RAW",
"CAP_SETGID",
"CAP_SETUID",
"CAP_SETFCAP",
"CAP_SETPCAP",
"CAP_NET_BIND_SERVICE",<|fim▁hole|> },
Permitted: []string{
"CAP_CHOWN",
"CAP_DAC_OVERRIDE",
"CAP_FSETID",
"CAP_FOWNER",
"CAP_MKNOD",
"CAP_NET_RAW",
"CAP_SETGID",
"CAP_SETUID",
"CAP_SETFCAP",
"CAP_SETPCAP",
"CAP_NET_BIND_SERVICE",
"CAP_SYS_CHROOT",
"CAP_KILL",
"CAP_AUDIT_WRITE",
},
Inheritable: []string{
"CAP_CHOWN",
"CAP_DAC_OVERRIDE",
"CAP_FSETID",
"CAP_FOWNER",
"CAP_MKNOD",
"CAP_NET_RAW",
"CAP_SETGID",
"CAP_SETUID",
"CAP_SETFCAP",
"CAP_SETPCAP",
"CAP_NET_BIND_SERVICE",
"CAP_SYS_CHROOT",
"CAP_KILL",
"CAP_AUDIT_WRITE",
},
Effective: []string{
"CAP_CHOWN",
"CAP_DAC_OVERRIDE",
"CAP_FSETID",
"CAP_FOWNER",
"CAP_MKNOD",
"CAP_NET_RAW",
"CAP_SETGID",
"CAP_SETUID",
"CAP_SETFCAP",
"CAP_SETPCAP",
"CAP_NET_BIND_SERVICE",
"CAP_SYS_CHROOT",
"CAP_KILL",
"CAP_AUDIT_WRITE",
},
Ambient: []string{
"CAP_CHOWN",
"CAP_DAC_OVERRIDE",
"CAP_FSETID",
"CAP_FOWNER",
"CAP_MKNOD",
"CAP_NET_RAW",
"CAP_SETGID",
"CAP_SETUID",
"CAP_SETFCAP",
"CAP_SETPCAP",
"CAP_NET_BIND_SERVICE",
"CAP_SYS_CHROOT",
"CAP_KILL",
"CAP_AUDIT_WRITE",
},
}
config.Mounts = []rspec.Mount{
{
Destination: "/proc",
Type: "proc",
Source: "proc",
Options: []string{"nosuid", "noexec", "nodev"},
},
{
Destination: "/dev",
Type: "tmpfs",
Source: "tmpfs",
Options: []string{"nosuid", "noexec", "strictatime", "mode=755", "size=65536k"},
},
{
Destination: "/dev/pts",
Type: "devpts",
Source: "devpts",
Options: []string{"nosuid", "noexec", "newinstance", "ptmxmode=0666", "mode=0620", "gid=5"},
},
{
Destination: "/dev/shm",
Type: "tmpfs",
Source: "shm",
Options: []string{"nosuid", "noexec", "nodev", "mode=1777", "size=65536k"},
},
{
Destination: "/dev/mqueue",
Type: "mqueue",
Source: "mqueue",
Options: []string{"nosuid", "noexec", "nodev"},
},
{
Destination: "/sys",
Type: "sysfs",
Source: "sysfs",
Options: []string{"nosuid", "noexec", "nodev", "ro"},
},
}
config.Linux = &rspec.Linux{
Resources: &rspec.LinuxResources{
Devices: []rspec.LinuxDeviceCgroup{
{
Allow: false,
Access: "rwm",
},
},
},
Namespaces: []rspec.LinuxNamespace{
{
Type: "pid",
},
{
Type: "network",
},
{
Type: "ipc",
},
{
Type: "uts",
},
{
Type: "mount",
},
},
Seccomp: seccomp.DefaultProfile(&config),
}
}
envCache := map[string]int{}
if config.Process != nil {
envCache = createEnvCacheMap(config.Process.Env)
}
return Generator{Config: &config, envMap: envCache}, nil
}
// NewFromSpec creates a configuration Generator from a given
// configuration.
//
// Deprecated: Replace with:
//
// generator := Generator{Config: config}
func NewFromSpec(config *rspec.Spec) Generator {
envCache := map[string]int{}
if config != nil && config.Process != nil {
envCache = createEnvCacheMap(config.Process.Env)
}
return Generator{
Config: config,
envMap: envCache,
}
}
// NewFromFile loads the template specified in a file into a
// configuration Generator.
func NewFromFile(path string) (Generator, error) {
cf, err := os.Open(path)
if err != nil {
if os.IsNotExist(err) {
return Generator{}, fmt.Errorf("template configuration at %s not found", path)
}
return Generator{}, err
}
defer cf.Close()
return NewFromTemplate(cf)
}
// NewFromTemplate loads the template from io.Reader into a
// configuration Generator.
func NewFromTemplate(r io.Reader) (Generator, error) {
var config rspec.Spec
if err := json.NewDecoder(r).Decode(&config); err != nil {
return Generator{}, err
}
envCache := map[string]int{}
if config.Process != nil {
envCache = createEnvCacheMap(config.Process.Env)
}
return Generator{
Config: &config,
envMap: envCache,
}, nil
}
// createEnvCacheMap creates a hash map with the ENV variables given by the config
func createEnvCacheMap(env []string) map[string]int {
envMap := make(map[string]int, len(env))
for i, val := range env {
envMap[val] = i
}
return envMap
}
// SetSpec sets the configuration in the Generator g.
//
// Deprecated: Replace with:
//
// Use generator.Config = config
func (g *Generator) SetSpec(config *rspec.Spec) {
g.Config = config
}
// Spec gets the configuration from the Generator g.
//
// Deprecated: Replace with generator.Config.
func (g *Generator) Spec() *rspec.Spec {
return g.Config
}
// Save writes the configuration into w.
func (g *Generator) Save(w io.Writer, exportOpts ExportOptions) (err error) {
var data []byte
if g.Config.Linux != nil {
buf, err := json.Marshal(g.Config.Linux)
if err != nil {
return err
}
if string(buf) == "{}" {
g.Config.Linux = nil
}
}
if exportOpts.Seccomp {
data, err = json.MarshalIndent(g.Config.Linux.Seccomp, "", "\t")
} else {
data, err = json.MarshalIndent(g.Config, "", "\t")
}
if err != nil {
return err
}
_, err = w.Write(data)
if err != nil {
return err
}
return nil
}
// SaveToFile writes the configuration into a file.
func (g *Generator) SaveToFile(path string, exportOpts ExportOptions) error {
f, err := os.Create(path)
if err != nil {
return err
}
defer f.Close()
return g.Save(f, exportOpts)
}
// SetVersion sets g.Config.Version.
func (g *Generator) SetVersion(version string) {
g.initConfig()
g.Config.Version = version
}
// SetRootPath sets g.Config.Root.Path.
func (g *Generator) SetRootPath(path string) {
g.initConfigRoot()
g.Config.Root.Path = path
}
// SetRootReadonly sets g.Config.Root.Readonly.
func (g *Generator) SetRootReadonly(b bool) {
g.initConfigRoot()
g.Config.Root.Readonly = b
}
// SetHostname sets g.Config.Hostname.
func (g *Generator) SetHostname(s string) {
g.initConfig()
g.Config.Hostname = s
}
// SetOCIVersion sets g.Config.Version.
func (g *Generator) SetOCIVersion(s string) {
g.initConfig()
g.Config.Version = s
}
// ClearAnnotations clears g.Config.Annotations.
func (g *Generator) ClearAnnotations() {
if g.Config == nil {
return
}
g.Config.Annotations = make(map[string]string)
}
// AddAnnotation adds an annotation into g.Config.Annotations.
func (g *Generator) AddAnnotation(key, value string) {
g.initConfigAnnotations()
g.Config.Annotations[key] = value
}
// RemoveAnnotation remove an annotation from g.Config.Annotations.
func (g *Generator) RemoveAnnotation(key string) {
if g.Config == nil || g.Config.Annotations == nil {
return
}
delete(g.Config.Annotations, key)
}
// RemoveHostname removes g.Config.Hostname, setting it to an empty string.
func (g *Generator) RemoveHostname() {
if g.Config == nil {
return
}
g.Config.Hostname = ""
}
// SetProcessConsoleSize sets g.Config.Process.ConsoleSize.
func (g *Generator) SetProcessConsoleSize(width, height uint) {
g.initConfigProcessConsoleSize()
g.Config.Process.ConsoleSize.Width = width
g.Config.Process.ConsoleSize.Height = height
}
// SetProcessUID sets g.Config.Process.User.UID.
func (g *Generator) SetProcessUID(uid uint32) {
g.initConfigProcess()
g.Config.Process.User.UID = uid
}
// SetProcessUsername sets g.Config.Process.User.Username.
func (g *Generator) SetProcessUsername(username string) {
g.initConfigProcess()
g.Config.Process.User.Username = username
}
// SetProcessUmask sets g.Config.Process.User.Umask.
func (g *Generator) SetProcessUmask(umask uint32) {
g.initConfigProcess()
u := umask
g.Config.Process.User.Umask = &u
}
// SetProcessGID sets g.Config.Process.User.GID.
func (g *Generator) SetProcessGID(gid uint32) {
g.initConfigProcess()
g.Config.Process.User.GID = gid
}
// SetProcessCwd sets g.Config.Process.Cwd.
func (g *Generator) SetProcessCwd(cwd string) {
g.initConfigProcess()
g.Config.Process.Cwd = cwd
}
// SetProcessNoNewPrivileges sets g.Config.Process.NoNewPrivileges.
func (g *Generator) SetProcessNoNewPrivileges(b bool) {
g.initConfigProcess()
g.Config.Process.NoNewPrivileges = b
}
// SetProcessTerminal sets g.Config.Process.Terminal.
func (g *Generator) SetProcessTerminal(b bool) {
g.initConfigProcess()
g.Config.Process.Terminal = b
}
// SetProcessApparmorProfile sets g.Config.Process.ApparmorProfile.
func (g *Generator) SetProcessApparmorProfile(prof string) {
g.initConfigProcess()
g.Config.Process.ApparmorProfile = prof
}
// SetProcessArgs sets g.Config.Process.Args.
func (g *Generator) SetProcessArgs(args []string) {
g.initConfigProcess()
g.Config.Process.Args = args
}
// ClearProcessEnv clears g.Config.Process.Env.
func (g *Generator) ClearProcessEnv() {
if g.Config == nil || g.Config.Process == nil {
return
}
g.Config.Process.Env = []string{}
// Clear out the env cache map as well
g.envMap = map[string]int{}
}
// AddProcessEnv adds name=value into g.Config.Process.Env, or replaces an
// existing entry with the given name.
func (g *Generator) AddProcessEnv(name, value string) {
if name == "" {
return
}
g.initConfigProcess()
g.addEnv(fmt.Sprintf("%s=%s", name, value), name)
}
// AddMultipleProcessEnv adds multiple name=value into g.Config.Process.Env, or replaces
// existing entries with the given name.
func (g *Generator) AddMultipleProcessEnv(envs []string) {
g.initConfigProcess()
for _, val := range envs {
split := strings.SplitN(val, "=", 2)
g.addEnv(val, split[0])
}
}
// addEnv looks through adds ENV to the Process and checks envMap for
// any duplicates
// This is called by both AddMultipleProcessEnv and AddProcessEnv
func (g *Generator) addEnv(env, key string) {
if idx, ok := g.envMap[key]; ok {
// The ENV exists in the cache, so change its value in g.Config.Process.Env
g.Config.Process.Env[idx] = env
} else {
// else the env doesn't exist, so add it and add it's index to g.envMap
g.Config.Process.Env = append(g.Config.Process.Env, env)
g.envMap[key] = len(g.Config.Process.Env) - 1
}
}
// AddProcessRlimits adds rlimit into g.Config.Process.Rlimits.
func (g *Generator) AddProcessRlimits(rType string, rHard uint64, rSoft uint64) {
g.initConfigProcess()
for i, rlimit := range g.Config.Process.Rlimits {
if rlimit.Type == rType {
g.Config.Process.Rlimits[i].Hard = rHard
g.Config.Process.Rlimits[i].Soft = rSoft
return
}
}
newRlimit := rspec.POSIXRlimit{
Type: rType,
Hard: rHard,
Soft: rSoft,
}
g.Config.Process.Rlimits = append(g.Config.Process.Rlimits, newRlimit)
}
// RemoveProcessRlimits removes a rlimit from g.Config.Process.Rlimits.
func (g *Generator) RemoveProcessRlimits(rType string) {
if g.Config == nil || g.Config.Process == nil {
return
}
for i, rlimit := range g.Config.Process.Rlimits {
if rlimit.Type == rType {
g.Config.Process.Rlimits = append(g.Config.Process.Rlimits[:i], g.Config.Process.Rlimits[i+1:]...)
return
}
}
}
// ClearProcessRlimits clear g.Config.Process.Rlimits.
func (g *Generator) ClearProcessRlimits() {
if g.Config == nil || g.Config.Process == nil {
return
}
g.Config.Process.Rlimits = []rspec.POSIXRlimit{}
}
// ClearProcessAdditionalGids clear g.Config.Process.AdditionalGids.
func (g *Generator) ClearProcessAdditionalGids() {
if g.Config == nil || g.Config.Process == nil {
return
}
g.Config.Process.User.AdditionalGids = []uint32{}
}
// AddProcessAdditionalGid adds an additional gid into g.Config.Process.AdditionalGids.
func (g *Generator) AddProcessAdditionalGid(gid uint32) {
g.initConfigProcess()
for _, group := range g.Config.Process.User.AdditionalGids {
if group == gid {
return
}
}
g.Config.Process.User.AdditionalGids = append(g.Config.Process.User.AdditionalGids, gid)
}
// SetProcessSelinuxLabel sets g.Config.Process.SelinuxLabel.
func (g *Generator) SetProcessSelinuxLabel(label string) {
g.initConfigProcess()
g.Config.Process.SelinuxLabel = label
}
// SetLinuxCgroupsPath sets g.Config.Linux.CgroupsPath.
func (g *Generator) SetLinuxCgroupsPath(path string) {
g.initConfigLinux()
g.Config.Linux.CgroupsPath = path
}
// SetLinuxIntelRdtClosID sets g.Config.Linux.IntelRdt.ClosID
func (g *Generator) SetLinuxIntelRdtClosID(clos string) {
g.initConfigLinuxIntelRdt()
g.Config.Linux.IntelRdt.ClosID = clos
}
// SetLinuxIntelRdtL3CacheSchema sets g.Config.Linux.IntelRdt.L3CacheSchema
func (g *Generator) SetLinuxIntelRdtL3CacheSchema(schema string) {
g.initConfigLinuxIntelRdt()
g.Config.Linux.IntelRdt.L3CacheSchema = schema
}
// SetLinuxMountLabel sets g.Config.Linux.MountLabel.
func (g *Generator) SetLinuxMountLabel(label string) {
g.initConfigLinux()
g.Config.Linux.MountLabel = label
}
// SetProcessOOMScoreAdj sets g.Config.Process.OOMScoreAdj.
func (g *Generator) SetProcessOOMScoreAdj(adj int) {
g.initConfigProcess()
g.Config.Process.OOMScoreAdj = &adj
}
// SetLinuxResourcesBlockIOLeafWeight sets g.Config.Linux.Resources.BlockIO.LeafWeight.
func (g *Generator) SetLinuxResourcesBlockIOLeafWeight(weight uint16) {
g.initConfigLinuxResourcesBlockIO()
g.Config.Linux.Resources.BlockIO.LeafWeight = &weight
}
// AddLinuxResourcesBlockIOLeafWeightDevice adds or sets g.Config.Linux.Resources.BlockIO.WeightDevice.LeafWeight.
func (g *Generator) AddLinuxResourcesBlockIOLeafWeightDevice(major int64, minor int64, weight uint16) {
g.initConfigLinuxResourcesBlockIO()
for i, weightDevice := range g.Config.Linux.Resources.BlockIO.WeightDevice {
if weightDevice.Major == major && weightDevice.Minor == minor {
g.Config.Linux.Resources.BlockIO.WeightDevice[i].LeafWeight = &weight
return
}
}
weightDevice := new(rspec.LinuxWeightDevice)
weightDevice.Major = major
weightDevice.Minor = minor
weightDevice.LeafWeight = &weight
g.Config.Linux.Resources.BlockIO.WeightDevice = append(g.Config.Linux.Resources.BlockIO.WeightDevice, *weightDevice)
}
// DropLinuxResourcesBlockIOLeafWeightDevice drops a item form g.Config.Linux.Resources.BlockIO.WeightDevice.LeafWeight
func (g *Generator) DropLinuxResourcesBlockIOLeafWeightDevice(major int64, minor int64) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.BlockIO == nil {
return
}
for i, weightDevice := range g.Config.Linux.Resources.BlockIO.WeightDevice {
if weightDevice.Major == major && weightDevice.Minor == minor {
if weightDevice.Weight != nil {
newWeightDevice := new(rspec.LinuxWeightDevice)
newWeightDevice.Major = major
newWeightDevice.Minor = minor
newWeightDevice.Weight = weightDevice.Weight
g.Config.Linux.Resources.BlockIO.WeightDevice[i] = *newWeightDevice
} else {
g.Config.Linux.Resources.BlockIO.WeightDevice = append(g.Config.Linux.Resources.BlockIO.WeightDevice[:i], g.Config.Linux.Resources.BlockIO.WeightDevice[i+1:]...)
}
return
}
}
}
// SetLinuxResourcesBlockIOWeight sets g.Config.Linux.Resources.BlockIO.Weight.
func (g *Generator) SetLinuxResourcesBlockIOWeight(weight uint16) {
g.initConfigLinuxResourcesBlockIO()
g.Config.Linux.Resources.BlockIO.Weight = &weight
}
// AddLinuxResourcesBlockIOWeightDevice adds or sets g.Config.Linux.Resources.BlockIO.WeightDevice.Weight.
func (g *Generator) AddLinuxResourcesBlockIOWeightDevice(major int64, minor int64, weight uint16) {
g.initConfigLinuxResourcesBlockIO()
for i, weightDevice := range g.Config.Linux.Resources.BlockIO.WeightDevice {
if weightDevice.Major == major && weightDevice.Minor == minor {
g.Config.Linux.Resources.BlockIO.WeightDevice[i].Weight = &weight
return
}
}
weightDevice := new(rspec.LinuxWeightDevice)
weightDevice.Major = major
weightDevice.Minor = minor
weightDevice.Weight = &weight
g.Config.Linux.Resources.BlockIO.WeightDevice = append(g.Config.Linux.Resources.BlockIO.WeightDevice, *weightDevice)
}
// DropLinuxResourcesBlockIOWeightDevice drops a item form g.Config.Linux.Resources.BlockIO.WeightDevice.Weight
func (g *Generator) DropLinuxResourcesBlockIOWeightDevice(major int64, minor int64) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.BlockIO == nil {
return
}
for i, weightDevice := range g.Config.Linux.Resources.BlockIO.WeightDevice {
if weightDevice.Major == major && weightDevice.Minor == minor {
if weightDevice.LeafWeight != nil {
newWeightDevice := new(rspec.LinuxWeightDevice)
newWeightDevice.Major = major
newWeightDevice.Minor = minor
newWeightDevice.LeafWeight = weightDevice.LeafWeight
g.Config.Linux.Resources.BlockIO.WeightDevice[i] = *newWeightDevice
} else {
g.Config.Linux.Resources.BlockIO.WeightDevice = append(g.Config.Linux.Resources.BlockIO.WeightDevice[:i], g.Config.Linux.Resources.BlockIO.WeightDevice[i+1:]...)
}
return
}
}
}
// AddLinuxResourcesBlockIOThrottleReadBpsDevice adds or sets g.Config.Linux.Resources.BlockIO.ThrottleReadBpsDevice.
func (g *Generator) AddLinuxResourcesBlockIOThrottleReadBpsDevice(major int64, minor int64, rate uint64) {
g.initConfigLinuxResourcesBlockIO()
throttleDevices := addOrReplaceBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleReadBpsDevice, major, minor, rate)
g.Config.Linux.Resources.BlockIO.ThrottleReadBpsDevice = throttleDevices
}
// DropLinuxResourcesBlockIOThrottleReadBpsDevice drops a item from g.Config.Linux.Resources.BlockIO.ThrottleReadBpsDevice.
func (g *Generator) DropLinuxResourcesBlockIOThrottleReadBpsDevice(major int64, minor int64) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.BlockIO == nil {
return
}
throttleDevices := dropBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleReadBpsDevice, major, minor)
g.Config.Linux.Resources.BlockIO.ThrottleReadBpsDevice = throttleDevices
}
// AddLinuxResourcesBlockIOThrottleReadIOPSDevice adds or sets g.Config.Linux.Resources.BlockIO.ThrottleReadIOPSDevice.
func (g *Generator) AddLinuxResourcesBlockIOThrottleReadIOPSDevice(major int64, minor int64, rate uint64) {
g.initConfigLinuxResourcesBlockIO()
throttleDevices := addOrReplaceBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleReadIOPSDevice, major, minor, rate)
g.Config.Linux.Resources.BlockIO.ThrottleReadIOPSDevice = throttleDevices
}
// DropLinuxResourcesBlockIOThrottleReadIOPSDevice drops a item from g.Config.Linux.Resources.BlockIO.ThrottleReadIOPSDevice.
func (g *Generator) DropLinuxResourcesBlockIOThrottleReadIOPSDevice(major int64, minor int64) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.BlockIO == nil {
return
}
throttleDevices := dropBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleReadIOPSDevice, major, minor)
g.Config.Linux.Resources.BlockIO.ThrottleReadIOPSDevice = throttleDevices
}
// AddLinuxResourcesBlockIOThrottleWriteBpsDevice adds or sets g.Config.Linux.Resources.BlockIO.ThrottleWriteBpsDevice.
func (g *Generator) AddLinuxResourcesBlockIOThrottleWriteBpsDevice(major int64, minor int64, rate uint64) {
g.initConfigLinuxResourcesBlockIO()
throttleDevices := addOrReplaceBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleWriteBpsDevice, major, minor, rate)
g.Config.Linux.Resources.BlockIO.ThrottleWriteBpsDevice = throttleDevices
}
// DropLinuxResourcesBlockIOThrottleWriteBpsDevice drops a item from g.Config.Linux.Resources.BlockIO.ThrottleWriteBpsDevice.
func (g *Generator) DropLinuxResourcesBlockIOThrottleWriteBpsDevice(major int64, minor int64) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.BlockIO == nil {
return
}
throttleDevices := dropBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleWriteBpsDevice, major, minor)
g.Config.Linux.Resources.BlockIO.ThrottleWriteBpsDevice = throttleDevices
}
// AddLinuxResourcesBlockIOThrottleWriteIOPSDevice adds or sets g.Config.Linux.Resources.BlockIO.ThrottleWriteIOPSDevice.
func (g *Generator) AddLinuxResourcesBlockIOThrottleWriteIOPSDevice(major int64, minor int64, rate uint64) {
g.initConfigLinuxResourcesBlockIO()
throttleDevices := addOrReplaceBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleWriteIOPSDevice, major, minor, rate)
g.Config.Linux.Resources.BlockIO.ThrottleWriteIOPSDevice = throttleDevices
}
// DropLinuxResourcesBlockIOThrottleWriteIOPSDevice drops a item from g.Config.Linux.Resources.BlockIO.ThrottleWriteIOPSDevice.
func (g *Generator) DropLinuxResourcesBlockIOThrottleWriteIOPSDevice(major int64, minor int64) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.BlockIO == nil {
return
}
throttleDevices := dropBlockIOThrottleDevice(g.Config.Linux.Resources.BlockIO.ThrottleWriteIOPSDevice, major, minor)
g.Config.Linux.Resources.BlockIO.ThrottleWriteIOPSDevice = throttleDevices
}
// SetLinuxResourcesCPUShares sets g.Config.Linux.Resources.CPU.Shares.
func (g *Generator) SetLinuxResourcesCPUShares(shares uint64) {
g.InitConfigLinuxResourcesCPU()
g.Config.Linux.Resources.CPU.Shares = &shares
}
// SetLinuxResourcesCPUQuota sets g.Config.Linux.Resources.CPU.Quota.
func (g *Generator) SetLinuxResourcesCPUQuota(quota int64) {
g.InitConfigLinuxResourcesCPU()
g.Config.Linux.Resources.CPU.Quota = "a
}
// SetLinuxResourcesCPUPeriod sets g.Config.Linux.Resources.CPU.Period.
func (g *Generator) SetLinuxResourcesCPUPeriod(period uint64) {
g.InitConfigLinuxResourcesCPU()
g.Config.Linux.Resources.CPU.Period = &period
}
// SetLinuxResourcesCPURealtimeRuntime sets g.Config.Linux.Resources.CPU.RealtimeRuntime.
func (g *Generator) SetLinuxResourcesCPURealtimeRuntime(time int64) {
g.InitConfigLinuxResourcesCPU()
g.Config.Linux.Resources.CPU.RealtimeRuntime = &time
}
// SetLinuxResourcesCPURealtimePeriod sets g.Config.Linux.Resources.CPU.RealtimePeriod.
func (g *Generator) SetLinuxResourcesCPURealtimePeriod(period uint64) {
g.InitConfigLinuxResourcesCPU()
g.Config.Linux.Resources.CPU.RealtimePeriod = &period
}
// SetLinuxResourcesCPUCpus sets g.Config.Linux.Resources.CPU.Cpus.
func (g *Generator) SetLinuxResourcesCPUCpus(cpus string) {
g.InitConfigLinuxResourcesCPU()
g.Config.Linux.Resources.CPU.Cpus = cpus
}
// SetLinuxResourcesCPUMems sets g.Config.Linux.Resources.CPU.Mems.
func (g *Generator) SetLinuxResourcesCPUMems(mems string) {
g.InitConfigLinuxResourcesCPU()
g.Config.Linux.Resources.CPU.Mems = mems
}
// AddLinuxResourcesHugepageLimit adds or sets g.Config.Linux.Resources.HugepageLimits.
func (g *Generator) AddLinuxResourcesHugepageLimit(pageSize string, limit uint64) {
hugepageLimit := rspec.LinuxHugepageLimit{
Pagesize: pageSize,
Limit: limit,
}
g.initConfigLinuxResources()
for i, pageLimit := range g.Config.Linux.Resources.HugepageLimits {
if pageLimit.Pagesize == pageSize {
g.Config.Linux.Resources.HugepageLimits[i].Limit = limit
return
}
}
g.Config.Linux.Resources.HugepageLimits = append(g.Config.Linux.Resources.HugepageLimits, hugepageLimit)
}
// DropLinuxResourcesHugepageLimit drops a hugepage limit from g.Config.Linux.Resources.HugepageLimits.
func (g *Generator) DropLinuxResourcesHugepageLimit(pageSize string) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil {
return
}
for i, pageLimit := range g.Config.Linux.Resources.HugepageLimits {
if pageLimit.Pagesize == pageSize {
g.Config.Linux.Resources.HugepageLimits = append(g.Config.Linux.Resources.HugepageLimits[:i], g.Config.Linux.Resources.HugepageLimits[i+1:]...)
return
}
}
}
// AddLinuxResourcesUnified sets the g.Config.Linux.Resources.Unified
func (g *Generator) SetLinuxResourcesUnified(unified map[string]string) {
g.initConfigLinuxResourcesUnified()
for k, v := range unified {
g.Config.Linux.Resources.Unified[k] = v
}
}
// AddLinuxResourcesUnified adds or updates the key-value pair from g.Config.Linux.Resources.Unified
func (g *Generator) AddLinuxResourcesUnified(key, val string) {
g.initConfigLinuxResourcesUnified()
g.Config.Linux.Resources.Unified[key] = val
}
// DropLinuxResourcesUnified drops a key-value pair from g.Config.Linux.Resources.Unified
func (g *Generator) DropLinuxResourcesUnified(key string) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.Unified == nil {
return
}
delete(g.Config.Linux.Resources.Unified, key)
}
// SetLinuxResourcesMemoryLimit sets g.Config.Linux.Resources.Memory.Limit.
func (g *Generator) SetLinuxResourcesMemoryLimit(limit int64) {
g.initConfigLinuxResourcesMemory()
g.Config.Linux.Resources.Memory.Limit = &limit
}
// SetLinuxResourcesMemoryReservation sets g.Config.Linux.Resources.Memory.Reservation.
func (g *Generator) SetLinuxResourcesMemoryReservation(reservation int64) {
g.initConfigLinuxResourcesMemory()
g.Config.Linux.Resources.Memory.Reservation = &reservation
}
// SetLinuxResourcesMemorySwap sets g.Config.Linux.Resources.Memory.Swap.
func (g *Generator) SetLinuxResourcesMemorySwap(swap int64) {
g.initConfigLinuxResourcesMemory()
g.Config.Linux.Resources.Memory.Swap = &swap
}
// SetLinuxResourcesMemoryKernel sets g.Config.Linux.Resources.Memory.Kernel.
func (g *Generator) SetLinuxResourcesMemoryKernel(kernel int64) {
g.initConfigLinuxResourcesMemory()
g.Config.Linux.Resources.Memory.Kernel = &kernel
}
// SetLinuxResourcesMemoryKernelTCP sets g.Config.Linux.Resources.Memory.KernelTCP.
func (g *Generator) SetLinuxResourcesMemoryKernelTCP(kernelTCP int64) {
g.initConfigLinuxResourcesMemory()
g.Config.Linux.Resources.Memory.KernelTCP = &kernelTCP
}
// SetLinuxResourcesMemorySwappiness sets g.Config.Linux.Resources.Memory.Swappiness.
func (g *Generator) SetLinuxResourcesMemorySwappiness(swappiness uint64) {
g.initConfigLinuxResourcesMemory()
g.Config.Linux.Resources.Memory.Swappiness = &swappiness
}
// SetLinuxResourcesMemoryDisableOOMKiller sets g.Config.Linux.Resources.Memory.DisableOOMKiller.
func (g *Generator) SetLinuxResourcesMemoryDisableOOMKiller(disable bool) {
g.initConfigLinuxResourcesMemory()
g.Config.Linux.Resources.Memory.DisableOOMKiller = &disable
}
// SetLinuxResourcesNetworkClassID sets g.Config.Linux.Resources.Network.ClassID.
func (g *Generator) SetLinuxResourcesNetworkClassID(classid uint32) {
g.initConfigLinuxResourcesNetwork()
g.Config.Linux.Resources.Network.ClassID = &classid
}
// AddLinuxResourcesNetworkPriorities adds or sets g.Config.Linux.Resources.Network.Priorities.
func (g *Generator) AddLinuxResourcesNetworkPriorities(name string, prio uint32) {
g.initConfigLinuxResourcesNetwork()
for i, netPriority := range g.Config.Linux.Resources.Network.Priorities {
if netPriority.Name == name {
g.Config.Linux.Resources.Network.Priorities[i].Priority = prio
return
}
}
interfacePrio := new(rspec.LinuxInterfacePriority)
interfacePrio.Name = name
interfacePrio.Priority = prio
g.Config.Linux.Resources.Network.Priorities = append(g.Config.Linux.Resources.Network.Priorities, *interfacePrio)
}
// DropLinuxResourcesNetworkPriorities drops one item from g.Config.Linux.Resources.Network.Priorities.
func (g *Generator) DropLinuxResourcesNetworkPriorities(name string) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil || g.Config.Linux.Resources.Network == nil {
return
}
for i, netPriority := range g.Config.Linux.Resources.Network.Priorities {
if netPriority.Name == name {
g.Config.Linux.Resources.Network.Priorities = append(g.Config.Linux.Resources.Network.Priorities[:i], g.Config.Linux.Resources.Network.Priorities[i+1:]...)
return
}
}
}
// SetLinuxResourcesPidsLimit sets g.Config.Linux.Resources.Pids.Limit.
func (g *Generator) SetLinuxResourcesPidsLimit(limit int64) {
g.initConfigLinuxResourcesPids()
g.Config.Linux.Resources.Pids.Limit = limit
}
// ClearLinuxSysctl clears g.Config.Linux.Sysctl.
func (g *Generator) ClearLinuxSysctl() {
if g.Config == nil || g.Config.Linux == nil {
return
}
g.Config.Linux.Sysctl = make(map[string]string)
}
// AddLinuxSysctl adds a new sysctl config into g.Config.Linux.Sysctl.
func (g *Generator) AddLinuxSysctl(key, value string) {
g.initConfigLinuxSysctl()
g.Config.Linux.Sysctl[key] = value
}
// RemoveLinuxSysctl removes a sysctl config from g.Config.Linux.Sysctl.
func (g *Generator) RemoveLinuxSysctl(key string) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Sysctl == nil {
return
}
delete(g.Config.Linux.Sysctl, key)
}
// ClearLinuxUIDMappings clear g.Config.Linux.UIDMappings.
func (g *Generator) ClearLinuxUIDMappings() {
if g.Config == nil || g.Config.Linux == nil {
return
}
g.Config.Linux.UIDMappings = []rspec.LinuxIDMapping{}
}
// AddLinuxUIDMapping adds uidMap into g.Config.Linux.UIDMappings.
func (g *Generator) AddLinuxUIDMapping(hid, cid, size uint32) {
idMapping := rspec.LinuxIDMapping{
HostID: hid,
ContainerID: cid,
Size: size,
}
g.initConfigLinux()
g.Config.Linux.UIDMappings = append(g.Config.Linux.UIDMappings, idMapping)
}
// ClearLinuxGIDMappings clear g.Config.Linux.GIDMappings.
func (g *Generator) ClearLinuxGIDMappings() {
if g.Config == nil || g.Config.Linux == nil {
return
}
g.Config.Linux.GIDMappings = []rspec.LinuxIDMapping{}
}
// AddLinuxGIDMapping adds gidMap into g.Config.Linux.GIDMappings.
func (g *Generator) AddLinuxGIDMapping(hid, cid, size uint32) {
idMapping := rspec.LinuxIDMapping{
HostID: hid,
ContainerID: cid,
Size: size,
}
g.initConfigLinux()
g.Config.Linux.GIDMappings = append(g.Config.Linux.GIDMappings, idMapping)
}
// SetLinuxRootPropagation sets g.Config.Linux.RootfsPropagation.
func (g *Generator) SetLinuxRootPropagation(rp string) error {
switch rp {
case "":
case "private":
case "rprivate":
case "slave":
case "rslave":
case "shared":
case "rshared":
case "unbindable":
case "runbindable":
default:
return fmt.Errorf("rootfs-propagation %q must be empty or one of (r)private|(r)slave|(r)shared|(r)unbindable", rp)
}
g.initConfigLinux()
g.Config.Linux.RootfsPropagation = rp
return nil
}
// ClearPreStartHooks clear g.Config.Hooks.Prestart.
func (g *Generator) ClearPreStartHooks() {
if g.Config == nil || g.Config.Hooks == nil {
return
}
g.Config.Hooks.Prestart = []rspec.Hook{}
}
// AddPreStartHook add a prestart hook into g.Config.Hooks.Prestart.
func (g *Generator) AddPreStartHook(preStartHook rspec.Hook) {
g.initConfigHooks()
g.Config.Hooks.Prestart = append(g.Config.Hooks.Prestart, preStartHook)
}
// ClearPostStopHooks clear g.Config.Hooks.Poststop.
func (g *Generator) ClearPostStopHooks() {
if g.Config == nil || g.Config.Hooks == nil {
return
}
g.Config.Hooks.Poststop = []rspec.Hook{}
}
// AddPostStopHook adds a poststop hook into g.Config.Hooks.Poststop.
func (g *Generator) AddPostStopHook(postStopHook rspec.Hook) {
g.initConfigHooks()
g.Config.Hooks.Poststop = append(g.Config.Hooks.Poststop, postStopHook)
}
// ClearPostStartHooks clear g.Config.Hooks.Poststart.
func (g *Generator) ClearPostStartHooks() {
if g.Config == nil || g.Config.Hooks == nil {
return
}
g.Config.Hooks.Poststart = []rspec.Hook{}
}
// AddPostStartHook adds a poststart hook into g.Config.Hooks.Poststart.
func (g *Generator) AddPostStartHook(postStartHook rspec.Hook) {
g.initConfigHooks()
g.Config.Hooks.Poststart = append(g.Config.Hooks.Poststart, postStartHook)
}
// AddMount adds a mount into g.Config.Mounts.
func (g *Generator) AddMount(mnt rspec.Mount) {
g.initConfig()
g.Config.Mounts = append(g.Config.Mounts, mnt)
}
// RemoveMount removes a mount point on the dest directory
func (g *Generator) RemoveMount(dest string) {
g.initConfig()
for index, mount := range g.Config.Mounts {
if mount.Destination == dest {
g.Config.Mounts = append(g.Config.Mounts[:index], g.Config.Mounts[index+1:]...)
return
}
}
}
// Mounts returns the list of mounts
func (g *Generator) Mounts() []rspec.Mount {
g.initConfig()
return g.Config.Mounts
}
// ClearMounts clear g.Config.Mounts
func (g *Generator) ClearMounts() {
if g.Config == nil {
return
}
g.Config.Mounts = []rspec.Mount{}
}
// SetupPrivileged sets up the privilege-related fields inside g.Config.
func (g *Generator) SetupPrivileged(privileged bool) {
if privileged { // Add all capabilities in privileged mode.
var finalCapList []string
for _, cap := range capability.List() {
if g.HostSpecific && cap > validate.LastCap() {
continue
}
finalCapList = append(finalCapList, fmt.Sprintf("CAP_%s", strings.ToUpper(cap.String())))
}
g.initConfigLinux()
g.initConfigProcessCapabilities()
g.ClearProcessCapabilities()
g.Config.Process.Capabilities.Bounding = append(g.Config.Process.Capabilities.Bounding, finalCapList...)
g.Config.Process.Capabilities.Effective = append(g.Config.Process.Capabilities.Effective, finalCapList...)
g.Config.Process.Capabilities.Inheritable = append(g.Config.Process.Capabilities.Inheritable, finalCapList...)
g.Config.Process.Capabilities.Permitted = append(g.Config.Process.Capabilities.Permitted, finalCapList...)
g.Config.Process.Capabilities.Ambient = append(g.Config.Process.Capabilities.Ambient, finalCapList...)
g.Config.Process.SelinuxLabel = ""
g.Config.Process.ApparmorProfile = ""
g.Config.Linux.Seccomp = nil
}
}
// ClearProcessCapabilities clear g.Config.Process.Capabilities.
func (g *Generator) ClearProcessCapabilities() {
if g.Config == nil || g.Config.Process == nil || g.Config.Process.Capabilities == nil {
return
}
g.Config.Process.Capabilities.Bounding = []string{}
g.Config.Process.Capabilities.Effective = []string{}
g.Config.Process.Capabilities.Inheritable = []string{}
g.Config.Process.Capabilities.Permitted = []string{}
g.Config.Process.Capabilities.Ambient = []string{}
}
// AddProcessCapability adds a process capability into all 5 capability sets.
func (g *Generator) AddProcessCapability(c string) error {
cp := strings.ToUpper(c)
if err := validate.CapValid(cp, g.HostSpecific); err != nil {
return err
}
g.initConfigProcessCapabilities()
var foundAmbient, foundBounding, foundEffective, foundInheritable, foundPermitted bool
for _, cap := range g.Config.Process.Capabilities.Ambient {
if strings.ToUpper(cap) == cp {
foundAmbient = true
break
}
}
if !foundAmbient {
g.Config.Process.Capabilities.Ambient = append(g.Config.Process.Capabilities.Ambient, cp)
}
for _, cap := range g.Config.Process.Capabilities.Bounding {
if strings.ToUpper(cap) == cp {
foundBounding = true
break
}
}
if !foundBounding {
g.Config.Process.Capabilities.Bounding = append(g.Config.Process.Capabilities.Bounding, cp)
}
for _, cap := range g.Config.Process.Capabilities.Effective {
if strings.ToUpper(cap) == cp {
foundEffective = true
break
}
}
if !foundEffective {
g.Config.Process.Capabilities.Effective = append(g.Config.Process.Capabilities.Effective, cp)
}
for _, cap := range g.Config.Process.Capabilities.Inheritable {
if strings.ToUpper(cap) == cp {
foundInheritable = true
break
}
}
if !foundInheritable {
g.Config.Process.Capabilities.Inheritable = append(g.Config.Process.Capabilities.Inheritable, cp)
}
for _, cap := range g.Config.Process.Capabilities.Permitted {
if strings.ToUpper(cap) == cp {
foundPermitted = true
break
}
}
if !foundPermitted {
g.Config.Process.Capabilities.Permitted = append(g.Config.Process.Capabilities.Permitted, cp)
}
return nil
}
// AddProcessCapabilityAmbient adds a process capability into g.Config.Process.Capabilities.Ambient.
func (g *Generator) AddProcessCapabilityAmbient(c string) error {
cp := strings.ToUpper(c)
if err := validate.CapValid(cp, g.HostSpecific); err != nil {
return err
}
g.initConfigProcessCapabilities()
var foundAmbient bool
for _, cap := range g.Config.Process.Capabilities.Ambient {
if strings.ToUpper(cap) == cp {
foundAmbient = true
break
}
}
if !foundAmbient {
g.Config.Process.Capabilities.Ambient = append(g.Config.Process.Capabilities.Ambient, cp)
}
return nil
}
// AddProcessCapabilityBounding adds a process capability into g.Config.Process.Capabilities.Bounding.
func (g *Generator) AddProcessCapabilityBounding(c string) error {
cp := strings.ToUpper(c)
if err := validate.CapValid(cp, g.HostSpecific); err != nil {
return err
}
g.initConfigProcessCapabilities()
var foundBounding bool
for _, cap := range g.Config.Process.Capabilities.Bounding {
if strings.ToUpper(cap) == cp {
foundBounding = true
break
}
}
if !foundBounding {
g.Config.Process.Capabilities.Bounding = append(g.Config.Process.Capabilities.Bounding, cp)
}
return nil
}
// AddProcessCapabilityEffective adds a process capability into g.Config.Process.Capabilities.Effective.
func (g *Generator) AddProcessCapabilityEffective(c string) error {
cp := strings.ToUpper(c)
if err := validate.CapValid(cp, g.HostSpecific); err != nil {
return err
}
g.initConfigProcessCapabilities()
var foundEffective bool
for _, cap := range g.Config.Process.Capabilities.Effective {
if strings.ToUpper(cap) == cp {
foundEffective = true
break
}
}
if !foundEffective {
g.Config.Process.Capabilities.Effective = append(g.Config.Process.Capabilities.Effective, cp)
}
return nil
}
// AddProcessCapabilityInheritable adds a process capability into g.Config.Process.Capabilities.Inheritable.
func (g *Generator) AddProcessCapabilityInheritable(c string) error {
cp := strings.ToUpper(c)
if err := validate.CapValid(cp, g.HostSpecific); err != nil {
return err
}
g.initConfigProcessCapabilities()
var foundInheritable bool
for _, cap := range g.Config.Process.Capabilities.Inheritable {
if strings.ToUpper(cap) == cp {
foundInheritable = true
break
}
}
if !foundInheritable {
g.Config.Process.Capabilities.Inheritable = append(g.Config.Process.Capabilities.Inheritable, cp)
}
return nil
}
// AddProcessCapabilityPermitted adds a process capability into g.Config.Process.Capabilities.Permitted.
func (g *Generator) AddProcessCapabilityPermitted(c string) error {
cp := strings.ToUpper(c)
if err := validate.CapValid(cp, g.HostSpecific); err != nil {
return err
}
g.initConfigProcessCapabilities()
var foundPermitted bool
for _, cap := range g.Config.Process.Capabilities.Permitted {
if strings.ToUpper(cap) == cp {
foundPermitted = true
break
}
}
if !foundPermitted {
g.Config.Process.Capabilities.Permitted = append(g.Config.Process.Capabilities.Permitted, cp)
}
return nil
}
// DropProcessCapability drops a process capability from all 5 capability sets.
func (g *Generator) DropProcessCapability(c string) error {
if g.Config == nil || g.Config.Process == nil || g.Config.Process.Capabilities == nil {
return nil
}
cp := strings.ToUpper(c)
for i, cap := range g.Config.Process.Capabilities.Ambient {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Ambient = removeFunc(g.Config.Process.Capabilities.Ambient, i)
}
}
for i, cap := range g.Config.Process.Capabilities.Bounding {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Bounding = removeFunc(g.Config.Process.Capabilities.Bounding, i)
}
}
for i, cap := range g.Config.Process.Capabilities.Effective {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Effective = removeFunc(g.Config.Process.Capabilities.Effective, i)
}
}
for i, cap := range g.Config.Process.Capabilities.Inheritable {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Inheritable = removeFunc(g.Config.Process.Capabilities.Inheritable, i)
}
}
for i, cap := range g.Config.Process.Capabilities.Permitted {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Permitted = removeFunc(g.Config.Process.Capabilities.Permitted, i)
}
}
return validate.CapValid(cp, false)
}
// DropProcessCapabilityAmbient drops a process capability from g.Config.Process.Capabilities.Ambient.
func (g *Generator) DropProcessCapabilityAmbient(c string) error {
if g.Config == nil || g.Config.Process == nil || g.Config.Process.Capabilities == nil {
return nil
}
cp := strings.ToUpper(c)
for i, cap := range g.Config.Process.Capabilities.Ambient {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Ambient = removeFunc(g.Config.Process.Capabilities.Ambient, i)
}
}
return validate.CapValid(cp, false)
}
// DropProcessCapabilityBounding drops a process capability from g.Config.Process.Capabilities.Bounding.
func (g *Generator) DropProcessCapabilityBounding(c string) error {
if g.Config == nil || g.Config.Process == nil || g.Config.Process.Capabilities == nil {
return nil
}
cp := strings.ToUpper(c)
for i, cap := range g.Config.Process.Capabilities.Bounding {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Bounding = removeFunc(g.Config.Process.Capabilities.Bounding, i)
}
}
return validate.CapValid(cp, false)
}
// DropProcessCapabilityEffective drops a process capability from g.Config.Process.Capabilities.Effective.
func (g *Generator) DropProcessCapabilityEffective(c string) error {
if g.Config == nil || g.Config.Process == nil || g.Config.Process.Capabilities == nil {
return nil
}
cp := strings.ToUpper(c)
for i, cap := range g.Config.Process.Capabilities.Effective {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Effective = removeFunc(g.Config.Process.Capabilities.Effective, i)
}
}
return validate.CapValid(cp, false)
}
// DropProcessCapabilityInheritable drops a process capability from g.Config.Process.Capabilities.Inheritable.
func (g *Generator) DropProcessCapabilityInheritable(c string) error {
if g.Config == nil || g.Config.Process == nil || g.Config.Process.Capabilities == nil {
return nil
}
cp := strings.ToUpper(c)
for i, cap := range g.Config.Process.Capabilities.Inheritable {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Inheritable = removeFunc(g.Config.Process.Capabilities.Inheritable, i)
}
}
return validate.CapValid(cp, false)
}
// DropProcessCapabilityPermitted drops a process capability from g.Config.Process.Capabilities.Permitted.
func (g *Generator) DropProcessCapabilityPermitted(c string) error {
if g.Config == nil || g.Config.Process == nil || g.Config.Process.Capabilities == nil {
return nil
}
cp := strings.ToUpper(c)
for i, cap := range g.Config.Process.Capabilities.Permitted {
if strings.ToUpper(cap) == cp {
g.Config.Process.Capabilities.Permitted = removeFunc(g.Config.Process.Capabilities.Permitted, i)
}
}
return validate.CapValid(cp, false)
}
func mapStrToNamespace(ns string, path string) (rspec.LinuxNamespace, error) {
switch ns {
case "network":
return rspec.LinuxNamespace{Type: rspec.NetworkNamespace, Path: path}, nil
case "pid":
return rspec.LinuxNamespace{Type: rspec.PIDNamespace, Path: path}, nil
case "mount":
return rspec.LinuxNamespace{Type: rspec.MountNamespace, Path: path}, nil
case "ipc":
return rspec.LinuxNamespace{Type: rspec.IPCNamespace, Path: path}, nil
case "uts":
return rspec.LinuxNamespace{Type: rspec.UTSNamespace, Path: path}, nil
case "user":
return rspec.LinuxNamespace{Type: rspec.UserNamespace, Path: path}, nil
case "cgroup":
return rspec.LinuxNamespace{Type: rspec.CgroupNamespace, Path: path}, nil
default:
return rspec.LinuxNamespace{}, fmt.Errorf("unrecognized namespace %q", ns)
}
}
// ClearLinuxNamespaces clear g.Config.Linux.Namespaces.
func (g *Generator) ClearLinuxNamespaces() {
if g.Config == nil || g.Config.Linux == nil {
return
}
g.Config.Linux.Namespaces = []rspec.LinuxNamespace{}
}
// AddOrReplaceLinuxNamespace adds or replaces a namespace inside
// g.Config.Linux.Namespaces.
func (g *Generator) AddOrReplaceLinuxNamespace(ns string, path string) error {
namespace, err := mapStrToNamespace(ns, path)
if err != nil {
return err
}
g.initConfigLinux()
for i, ns := range g.Config.Linux.Namespaces {
if ns.Type == namespace.Type {
g.Config.Linux.Namespaces[i] = namespace
return nil
}
}
g.Config.Linux.Namespaces = append(g.Config.Linux.Namespaces, namespace)
return nil
}
// RemoveLinuxNamespace removes a namespace from g.Config.Linux.Namespaces.
func (g *Generator) RemoveLinuxNamespace(ns string) error {
namespace, err := mapStrToNamespace(ns, "")
if err != nil {
return err
}
if g.Config == nil || g.Config.Linux == nil {
return nil
}
for i, ns := range g.Config.Linux.Namespaces {
if ns.Type == namespace.Type {
g.Config.Linux.Namespaces = append(g.Config.Linux.Namespaces[:i], g.Config.Linux.Namespaces[i+1:]...)
return nil
}
}
return nil
}
// AddDevice - add a device into g.Config.Linux.Devices
func (g *Generator) AddDevice(device rspec.LinuxDevice) {
g.initConfigLinux()
for i, dev := range g.Config.Linux.Devices {
if dev.Path == device.Path {
g.Config.Linux.Devices[i] = device
return
}
}
g.Config.Linux.Devices = append(g.Config.Linux.Devices, device)
}
// RemoveDevice remove a device from g.Config.Linux.Devices
func (g *Generator) RemoveDevice(path string) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Devices == nil {
return
}
for i, device := range g.Config.Linux.Devices {
if device.Path == path {
g.Config.Linux.Devices = append(g.Config.Linux.Devices[:i], g.Config.Linux.Devices[i+1:]...)
return
}
}
}
// ClearLinuxDevices clears g.Config.Linux.Devices
func (g *Generator) ClearLinuxDevices() {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Devices == nil {
return
}
g.Config.Linux.Devices = []rspec.LinuxDevice{}
}
// AddLinuxResourcesDevice - add a device into g.Config.Linux.Resources.Devices
func (g *Generator) AddLinuxResourcesDevice(allow bool, devType string, major, minor *int64, access string) {
g.initConfigLinuxResources()
device := rspec.LinuxDeviceCgroup{
Allow: allow,
Type: devType,
Access: access,
Major: major,
Minor: minor,
}
g.Config.Linux.Resources.Devices = append(g.Config.Linux.Resources.Devices, device)
}
// RemoveLinuxResourcesDevice - remove a device from g.Config.Linux.Resources.Devices
func (g *Generator) RemoveLinuxResourcesDevice(allow bool, devType string, major, minor *int64, access string) {
if g.Config == nil || g.Config.Linux == nil || g.Config.Linux.Resources == nil {
return
}
for i, device := range g.Config.Linux.Resources.Devices {
if device.Allow == allow &&
(devType == device.Type || (devType != "" && device.Type != "" && devType == device.Type)) &&
(access == device.Access || (access != "" && device.Access != "" && access == device.Access)) &&
(major == device.Major || (major != nil && device.Major != nil && *major == *device.Major)) &&
(minor == device.Minor || (minor != nil && device.Minor != nil && *minor == *device.Minor)) {
g.Config.Linux.Resources.Devices = append(g.Config.Linux.Resources.Devices[:i], g.Config.Linux.Resources.Devices[i+1:]...)
return
}
}
}
// SetSyscallAction adds rules for syscalls with the specified action
func (g *Generator) SetSyscallAction(arguments seccomp.SyscallOpts) error {
g.initConfigLinuxSeccomp()
return seccomp.ParseSyscallFlag(arguments, g.Config.Linux.Seccomp)
}
// SetDefaultSeccompAction sets the default action for all syscalls not defined
// and then removes any syscall rules with this action already specified.
func (g *Generator) SetDefaultSeccompAction(action string) error {
g.initConfigLinuxSeccomp()
return seccomp.ParseDefaultAction(action, g.Config.Linux.Seccomp)
}
// SetDefaultSeccompActionForce only sets the default action for all syscalls not defined
func (g *Generator) SetDefaultSeccompActionForce(action string) error {
g.initConfigLinuxSeccomp()
return seccomp.ParseDefaultActionForce(action, g.Config.Linux.Seccomp)
}
// SetSeccompArchitecture sets the supported seccomp architectures
func (g *Generator) SetSeccompArchitecture(architecture string) error {
g.initConfigLinuxSeccomp()
return seccomp.ParseArchitectureFlag(architecture, g.Config.Linux.Seccomp)
}
// RemoveSeccompRule removes rules for any specified syscalls
func (g *Generator) RemoveSeccompRule(arguments string) error {
g.initConfigLinuxSeccomp()
return seccomp.RemoveAction(arguments, g.Config.Linux.Seccomp)
}
// RemoveAllSeccompRules removes all syscall rules
func (g *Generator) RemoveAllSeccompRules() error {
g.initConfigLinuxSeccomp()
return seccomp.RemoveAllSeccompRules(g.Config.Linux.Seccomp)
}
// AddLinuxMaskedPaths adds masked paths into g.Config.Linux.MaskedPaths.
func (g *Generator) AddLinuxMaskedPaths(path string) {
g.initConfigLinux()
g.Config.Linux.MaskedPaths = append(g.Config.Linux.MaskedPaths, path)
}
// AddLinuxReadonlyPaths adds readonly paths into g.Config.Linux.MaskedPaths.
func (g *Generator) AddLinuxReadonlyPaths(path string) {
g.initConfigLinux()
g.Config.Linux.ReadonlyPaths = append(g.Config.Linux.ReadonlyPaths, path)
}
func addOrReplaceBlockIOThrottleDevice(tmpList []rspec.LinuxThrottleDevice, major int64, minor int64, rate uint64) []rspec.LinuxThrottleDevice {
throttleDevices := tmpList
for i, throttleDevice := range throttleDevices {
if throttleDevice.Major == major && throttleDevice.Minor == minor {
throttleDevices[i].Rate = rate
return throttleDevices
}
}
throttleDevice := new(rspec.LinuxThrottleDevice)
throttleDevice.Major = major
throttleDevice.Minor = minor
throttleDevice.Rate = rate
throttleDevices = append(throttleDevices, *throttleDevice)
return throttleDevices
}
func dropBlockIOThrottleDevice(tmpList []rspec.LinuxThrottleDevice, major int64, minor int64) []rspec.LinuxThrottleDevice {
throttleDevices := tmpList
for i, throttleDevice := range throttleDevices {
if throttleDevice.Major == major && throttleDevice.Minor == minor {
throttleDevices = append(throttleDevices[:i], throttleDevices[i+1:]...)
return throttleDevices
}
}
return throttleDevices
}
// AddSolarisAnet adds network into g.Config.Solaris.Anet
func (g *Generator) AddSolarisAnet(anet rspec.SolarisAnet) {
g.initConfigSolaris()
g.Config.Solaris.Anet = append(g.Config.Solaris.Anet, anet)
}
// SetSolarisCappedCPUNcpus sets g.Config.Solaris.CappedCPU.Ncpus
func (g *Generator) SetSolarisCappedCPUNcpus(ncpus string) {
g.initConfigSolarisCappedCPU()
g.Config.Solaris.CappedCPU.Ncpus = ncpus
}
// SetSolarisCappedMemoryPhysical sets g.Config.Solaris.CappedMemory.Physical
func (g *Generator) SetSolarisCappedMemoryPhysical(physical string) {
g.initConfigSolarisCappedMemory()
g.Config.Solaris.CappedMemory.Physical = physical
}
// SetSolarisCappedMemorySwap sets g.Config.Solaris.CappedMemory.Swap
func (g *Generator) SetSolarisCappedMemorySwap(swap string) {
g.initConfigSolarisCappedMemory()
g.Config.Solaris.CappedMemory.Swap = swap
}
// SetSolarisLimitPriv sets g.Config.Solaris.LimitPriv
func (g *Generator) SetSolarisLimitPriv(limitPriv string) {
g.initConfigSolaris()
g.Config.Solaris.LimitPriv = limitPriv
}
// SetSolarisMaxShmMemory sets g.Config.Solaris.MaxShmMemory
func (g *Generator) SetSolarisMaxShmMemory(memory string) {
g.initConfigSolaris()
g.Config.Solaris.MaxShmMemory = memory
}
// SetSolarisMilestone sets g.Config.Solaris.Milestone
func (g *Generator) SetSolarisMilestone(milestone string) {
g.initConfigSolaris()
g.Config.Solaris.Milestone = milestone
}
// SetVMHypervisorPath sets g.Config.VM.Hypervisor.Path
func (g *Generator) SetVMHypervisorPath(path string) error {
if !strings.HasPrefix(path, "/") {
return fmt.Errorf("hypervisorPath %v is not an absolute path", path)
}
g.initConfigVM()
g.Config.VM.Hypervisor.Path = path
return nil
}
// SetVMHypervisorParameters sets g.Config.VM.Hypervisor.Parameters
func (g *Generator) SetVMHypervisorParameters(parameters []string) {
g.initConfigVM()
g.Config.VM.Hypervisor.Parameters = parameters
}
// SetVMKernelPath sets g.Config.VM.Kernel.Path
func (g *Generator) SetVMKernelPath(path string) error {
if !strings.HasPrefix(path, "/") {
return fmt.Errorf("kernelPath %v is not an absolute path", path)
}
g.initConfigVM()
g.Config.VM.Kernel.Path = path
return nil
}
// SetVMKernelParameters sets g.Config.VM.Kernel.Parameters
func (g *Generator) SetVMKernelParameters(parameters []string) {
g.initConfigVM()
g.Config.VM.Kernel.Parameters = parameters
}
// SetVMKernelInitRD sets g.Config.VM.Kernel.InitRD
func (g *Generator) SetVMKernelInitRD(initrd string) error {
if !strings.HasPrefix(initrd, "/") {
return fmt.Errorf("kernelInitrd %v is not an absolute path", initrd)
}
g.initConfigVM()
g.Config.VM.Kernel.InitRD = initrd
return nil
}
// SetVMImagePath sets g.Config.VM.Image.Path
func (g *Generator) SetVMImagePath(path string) error {
if !strings.HasPrefix(path, "/") {
return fmt.Errorf("imagePath %v is not an absolute path", path)
}
g.initConfigVM()
g.Config.VM.Image.Path = path
return nil
}
// SetVMImageFormat sets g.Config.VM.Image.Format
func (g *Generator) SetVMImageFormat(format string) error {
switch format {
case "raw":
case "qcow2":
case "vdi":
case "vmdk":
case "vhd":
default:
return fmt.Errorf("Commonly supported formats are: raw, qcow2, vdi, vmdk, vhd")
}
g.initConfigVM()
g.Config.VM.Image.Format = format
return nil
}
// SetWindowsHypervUntilityVMPath sets g.Config.Windows.HyperV.UtilityVMPath.
func (g *Generator) SetWindowsHypervUntilityVMPath(path string) {
g.initConfigWindowsHyperV()
g.Config.Windows.HyperV.UtilityVMPath = path
}
// SetWindowsIgnoreFlushesDuringBoot sets g.Config.Windows.IgnoreFlushesDuringBoot.
func (g *Generator) SetWindowsIgnoreFlushesDuringBoot(ignore bool) {
g.initConfigWindows()
g.Config.Windows.IgnoreFlushesDuringBoot = ignore
}
// AddWindowsLayerFolders adds layer folders into g.Config.Windows.LayerFolders.
func (g *Generator) AddWindowsLayerFolders(folder string) {
g.initConfigWindows()
g.Config.Windows.LayerFolders = append(g.Config.Windows.LayerFolders, folder)
}
// AddWindowsDevices adds or sets g.Config.Windwos.Devices
func (g *Generator) AddWindowsDevices(id, idType string) error {
if idType != "class" {
return fmt.Errorf("Invalid idType value: %s. Windows only supports a value of class", idType)
}
device := rspec.WindowsDevice{
ID: id,
IDType: idType,
}
g.initConfigWindows()
for i, device := range g.Config.Windows.Devices {
if device.ID == id {
g.Config.Windows.Devices[i].IDType = idType
return nil
}
}
g.Config.Windows.Devices = append(g.Config.Windows.Devices, device)
return nil
}
// SetWindowsNetwork sets g.Config.Windows.Network.
func (g *Generator) SetWindowsNetwork(network rspec.WindowsNetwork) {
g.initConfigWindows()
g.Config.Windows.Network = &network
}
// SetWindowsNetworkAllowUnqualifiedDNSQuery sets g.Config.Windows.Network.AllowUnqualifiedDNSQuery
func (g *Generator) SetWindowsNetworkAllowUnqualifiedDNSQuery(setting bool) {
g.initConfigWindowsNetwork()
g.Config.Windows.Network.AllowUnqualifiedDNSQuery = setting
}
// SetWindowsNetworkNamespace sets g.Config.Windows.Network.NetworkNamespace
func (g *Generator) SetWindowsNetworkNamespace(path string) {
g.initConfigWindowsNetwork()
g.Config.Windows.Network.NetworkNamespace = path
}
// SetWindowsResourcesCPU sets g.Config.Windows.Resources.CPU.
func (g *Generator) SetWindowsResourcesCPU(cpu rspec.WindowsCPUResources) {
g.initConfigWindowsResources()
g.Config.Windows.Resources.CPU = &cpu
}
// SetWindowsResourcesMemoryLimit sets g.Config.Windows.Resources.Memory.Limit.
func (g *Generator) SetWindowsResourcesMemoryLimit(limit uint64) {
g.initConfigWindowsResourcesMemory()
g.Config.Windows.Resources.Memory.Limit = &limit
}
// SetWindowsResourcesStorage sets g.Config.Windows.Resources.Storage.
func (g *Generator) SetWindowsResourcesStorage(storage rspec.WindowsStorageResources) {
g.initConfigWindowsResources()
g.Config.Windows.Resources.Storage = &storage
}
// SetWindowsServicing sets g.Config.Windows.Servicing.
func (g *Generator) SetWindowsServicing(servicing bool) {
g.initConfigWindows()
g.Config.Windows.Servicing = servicing
}<|fim▁end|> | "CAP_SYS_CHROOT",
"CAP_KILL",
"CAP_AUDIT_WRITE", |
<|file_name|>request.js<|end_file_name|><|fim▁begin|>/* jshint sub: true */
/* global exports: true */
'use strict';
// //////////////////////////////////////////////////////////////////////////////
// / @brief node-request-style HTTP requests
// /
// / @file
// /
// / DISCLAIMER
// /
// / Copyright 2015 triAGENS GmbH, Cologne, Germany
// /
// / Licensed under the Apache License, Version 2.0 (the "License")
// / you may not use this file except in compliance with the License.
// / You may obtain a copy of the License at
// /
// / http://www.apache.org/licenses/LICENSE-2.0
// /
// / Unless required by applicable law or agreed to in writing, software
// / distributed under the License is distributed on an "AS IS" BASIS,
// / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// / See the License for the specific language governing permissions and
// / limitations under the License.
// /
// / Copyright holder is triAGENS GmbH, Cologne, Germany
// /
// / @author Alan Plum
// / @author Copyright 2015, triAGENS GmbH, Cologne, Germany
// //////////////////////////////////////////////////////////////////////////////
const internal = require('internal');
const Buffer = require('buffer').Buffer;
const extend = require('lodash').extend;
const httperr = require('http-errors');
const is = require('@arangodb/is');
const querystring = require('querystring');
const qs = require('qs');
const url = require('url');
class Response {
throw (msg) {
if (this.status >= 400) {
throw Object.assign(
httperr(this.status, msg || this.message),
{details: this}
);
}
}
constructor (res, encoding, json) {
this.status = this.statusCode = res.code;
this.message = res.message;
this.headers = res.headers ? res.headers : {};
this.body = this.rawBody = res.body;
if (this.body && encoding !== null) {
this.body = this.body.toString(encoding || 'utf-8');
if (json) {
try {
this.json = JSON.parse(this.body);
} catch (e) {<|fim▁hole|> }
}
}
}
}
function querystringify (query, useQuerystring) {
if (!query) {
return '';
}
if (typeof query === 'string') {
return query.charAt(0) === '?' ? query.slice(1) : query;
}
return (useQuerystring ? querystring : qs).stringify(query)
.replace(/[!'()*]/g, function (c) {
// Stricter RFC 3986 compliance
return '%' + c.charCodeAt(0).toString(16);
});
}
function request (req) {
if (typeof req === 'string') {
req = {url: req, method: 'GET'};
}
let path = req.url || req.uri;
if (!path) {
throw new Error('Request URL must not be empty.');
}
let pathObj = typeof path === 'string' ? url.parse(path) : path;
if (pathObj.auth) {
let auth = pathObj.auth.split(':');
req = extend({
auth: {
username: decodeURIComponent(auth[0]),
password: decodeURIComponent(auth[1])
}
}, req);
delete pathObj.auth;
}
let query = typeof req.qs === 'string' ? req.qs : querystringify(req.qs, req.useQuerystring);
if (query) {
pathObj.search = query;
}
path = url.format(pathObj);
let contentType;
let body = req.body;
if (req.json) {
body = JSON.stringify(body);
contentType = 'application/json';
} else if (typeof body === 'string') {
contentType = 'text/plain; charset=utf-8';
} else if (typeof body === 'object' && body instanceof Buffer) {
contentType = 'application/octet-stream';
} else if (!body) {
if (req.form) {
contentType = 'application/x-www-form-urlencoded';
body = typeof req.form === 'string' ? req.form : querystringify(req.form, req.useQuerystring);
} else if (req.formData) {
// contentType = 'multipart/form-data'
// body = formData(req.formData)
throw new Error('Multipart form encoding is currently not supported.');
} else if (req.multipart) {
// contentType = 'multipart/related'
// body = multipart(req.multipart)
throw new Error('Multipart encoding is currently not supported.');
}
}
const headers = {};
if (contentType) {
headers['content-type'] = contentType;
}
if (req.headers) {
Object.keys(req.headers).forEach(function (name) {
headers[name.toLowerCase()] = req.headers[name];
});
}
if (req.auth) {
headers['authorization'] = ( // eslint-disable-line dot-notation
req.auth.bearer ?
'Bearer ' + req.auth.bearer :
'Basic ' + new Buffer(
req.auth.username + ':' +
req.auth.password
).toString('base64')
);
}
let options = {
method: (req.method || 'get').toUpperCase(),
headers: headers,
returnBodyAsBuffer: true,
returnBodyOnError: req.returnBodyOnError !== false
};
if (is.existy(req.timeout)) {
options.timeout = req.timeout;
}
if (is.existy(req.followRedirect)) {
options.followRedirects = req.followRedirect; // [sic] node-request compatibility
}
if (is.existy(req.maxRedirects)) {
options.maxRedirects = req.maxRedirects;
} else {
options.maxRedirects = 10;
}
if (req.sslProtocol) {
options.sslProtocol = req.sslProtocol;
}
let result = internal.download(path, body, options);
return new Response(result, req.encoding, req.json);
}
exports = request;
exports.request = request;
exports.Response = Response;
['delete', 'get', 'head', 'patch', 'post', 'put']
.forEach(function (method) {
exports[method.toLowerCase()] = function (url, options) {
if (typeof url === 'object') {
options = url;
url = undefined;
} else if (typeof url === 'string') {
options = extend({}, options, {url: url});
}
return request(extend({method: method.toUpperCase()}, options));
};
});
module.exports = exports;<|fim▁end|> | this.json = undefined; |
<|file_name|>ui_codificadores_POT.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/Users/chernomirdinmacuvele/Documents/workspace/PscArt2.0.X/UserInt/ui_codificadores_POT.ui'
#
# Created by: PyQt5 UI code generator 5.8.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(306, 332)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(Form)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.LECodigo = QtWidgets.QLineEdit(Form)
self.LECodigo.setMaxLength(3)
self.LECodigo.setObjectName("LECodigo")
self.gridLayout.addWidget(self.LECodigo, 0, 1, 1, 1)
self.label_3 = QtWidgets.QLabel(Form)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 1, 0, 1, 1)
self.LENome = QtWidgets.QLineEdit(Form)
self.LENome.setMaxLength(15)
self.LENome.setObjectName("LENome")
self.gridLayout.addWidget(self.LENome, 1, 1, 1, 1)
self.label_4 = QtWidgets.QLabel(Form)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 2, 0, 1, 1)
self.PTEDescricao = QtWidgets.QPlainTextEdit(Form)
self.PTEDescricao.setObjectName("PTEDescricao")
self.gridLayout.addWidget(self.PTEDescricao, 2, 1, 1, 1)
self.label_5 = QtWidgets.QLabel(Form)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 3, 0, 1, 1)
self.PTEComentarios = QtWidgets.QPlainTextEdit(Form)
self.PTEComentarios.setObjectName("PTEComentarios")
self.gridLayout.addWidget(self.PTEComentarios, 3, 1, 1, 1)
self.CHBActivo = QtWidgets.QCheckBox(Form)
self.CHBActivo.setObjectName("CHBActivo")
self.gridLayout.addWidget(self.CHBActivo, 4, 1, 1, 1)
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName("splitter")
self.PBGuardar = QtWidgets.QPushButton(self.splitter)
self.PBGuardar.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/002-save.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.PBGuardar.setIcon(icon)
self.PBGuardar.setObjectName("PBGuardar")
self.PBCancelar = QtWidgets.QPushButton(self.splitter)
self.PBCancelar.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/newPrefix/Icons/003-error.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.PBCancelar.setIcon(icon1)
self.PBCancelar.setObjectName("PBCancelar")
self.gridLayout.addWidget(self.splitter, 5, 0, 1, 2)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Codificador"))
self.label.setText(_translate("Form", "Codigo:"))
self.LECodigo.setPlaceholderText(_translate("Form", "Ex:AAA"))
self.label_3.setText(_translate("Form", "Nome:"))
self.LENome.setPlaceholderText(_translate("Form", "Ex:Qualquer Coisa"))
self.label_4.setText(_translate("Form", "Descricao:"))
self.PTEDescricao.setPlaceholderText(_translate("Form", "Ex:O que faz…"))
self.label_5.setText(_translate("Form", "Comentarios:"))
self.PTEComentarios.setPlaceholderText(_translate("Form", "Ex:Nota, Obs…"))<|fim▁hole|>
import icons_rc<|fim▁end|> | self.CHBActivo.setText(_translate("Form", "Activo")) |
<|file_name|>UnityYAML.js<|end_file_name|><|fim▁begin|>/**
* @author Richard Davey <rich@photonstorm.com>
* @copyright 2019 Photon Storm Ltd.
* @license {@link https://opensource.org/licenses/MIT|MIT License}
*/
var imageHeight = 0;
/**
* @function addFrame
* @private
* @since 3.0.0
*/
var addFrame = function (texture, sourceIndex, name, frame)
{
// The frame values are the exact coordinates to cut the frame out of the atlas from
var y = imageHeight - frame.y - frame.height;
<|fim▁hole|> texture.add(name, sourceIndex, frame.x, y, frame.width, frame.height);
// These are the original (non-trimmed) sprite values
/*
if (src.trimmed)
{
newFrame.setTrim(
src.sourceSize.w,
src.sourceSize.h,
src.spriteSourceSize.x,
src.spriteSourceSize.y,
src.spriteSourceSize.w,
src.spriteSourceSize.h
);
}
*/
};
/**
* Parses a Unity YAML File and creates Frames in the Texture.
* For more details about Sprite Meta Data see https://docs.unity3d.com/ScriptReference/SpriteMetaData.html
*
* @function Phaser.Textures.Parsers.UnityYAML
* @memberof Phaser.Textures.Parsers
* @private
* @since 3.0.0
*
* @param {Phaser.Textures.Texture} texture - The Texture to add the Frames to.
* @param {integer} sourceIndex - The index of the TextureSource.
* @param {object} yaml - The YAML data.
*
* @return {Phaser.Textures.Texture} The Texture modified by this parser.
*/
var UnityYAML = function (texture, sourceIndex, yaml)
{
// Add in a __BASE entry (for the entire atlas)
var source = texture.source[sourceIndex];
texture.add('__BASE', sourceIndex, 0, 0, source.width, source.height);
imageHeight = source.height;
var data = yaml.split('\n');
var lineRegExp = /^[ ]*(- )*(\w+)+[: ]+(.*)/;
var prevSprite = '';
var currentSprite = '';
var rect = { x: 0, y: 0, width: 0, height: 0 };
// var pivot = { x: 0, y: 0 };
// var border = { x: 0, y: 0, z: 0, w: 0 };
for (var i = 0; i < data.length; i++)
{
var results = data[i].match(lineRegExp);
if (!results)
{
continue;
}
var isList = (results[1] === '- ');
var key = results[2];
var value = results[3];
if (isList)
{
if (currentSprite !== prevSprite)
{
addFrame(texture, sourceIndex, currentSprite, rect);
prevSprite = currentSprite;
}
rect = { x: 0, y: 0, width: 0, height: 0 };
}
if (key === 'name')
{
// Start new list
currentSprite = value;
continue;
}
switch (key)
{
case 'x':
case 'y':
case 'width':
case 'height':
rect[key] = parseInt(value, 10);
break;
// case 'pivot':
// pivot = eval('var obj = ' + value);
// break;
// case 'border':
// border = eval('var obj = ' + value);
// break;
}
}
if (currentSprite !== prevSprite)
{
addFrame(texture, sourceIndex, currentSprite, rect);
}
return texture;
};
module.exports = UnityYAML;
/*
Example data:
TextureImporter:
spritePivot: {x: .5, y: .5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
spriteSheet:
sprites:
- name: asteroids_0
rect:
serializedVersion: 2
x: 5
y: 328
width: 65
height: 82
alignment: 0
pivot: {x: 0, y: 0}
border: {x: 0, y: 0, z: 0, w: 0}
- name: asteroids_1
rect:
serializedVersion: 2
x: 80
y: 322
width: 53
height: 88
alignment: 0
pivot: {x: 0, y: 0}
border: {x: 0, y: 0, z: 0, w: 0}
spritePackingTag: Asteroids
*/<|fim▁end|> | |
<|file_name|>provisioning_block.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
from neutron_lib.db import standard_attr
import sqlalchemy as sa<|fim▁hole|>class ProvisioningBlock(model_base.BASEV2):
# the standard attr id of the thing we want to block
standard_attr_id = (
sa.Column(sa.BigInteger().with_variant(sa.Integer(), 'sqlite'),
sa.ForeignKey(standard_attr.StandardAttribute.id,
ondelete="CASCADE"),
primary_key=True))
# the entity that wants to block the status change (e.g. L2 Agent)
entity = sa.Column(sa.String(255), nullable=False, primary_key=True)<|fim▁end|> | |
<|file_name|>build.sahana.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# run as:
# python web2py.py -S eden -M -R applications/eden/static/scripts/tools/build.sahana.py
# or
# python web2py.py -S eden -M -R applications/eden/static/scripts/tools/build.sahana.py -A gis
#
#
# Built with code/inspiration from MapFish, OpenLayers & Michael Crute
#
try:
theme = settings.get_theme()
except:
print "ERROR: File now needs to be run in the web2py environment in order to pick up which theme to build"
exit()
import os
import sys
import shutil
SCRIPTPATH = os.path.join(request.folder, "static", "scripts", "tools")
os.chdir(SCRIPTPATH)
sys.path.append("./")
# For JS
import getopt
import jsmin, mergejs
# For CSS
import re
def mergeCSS(inputFilenames, outputFilename):
output = ""
for inputFilename in inputFilenames:
output += open(inputFilename, "r").read()
open(outputFilename, "w").write(output)
return outputFilename
def cleanline(theLine):
""" Kills line breaks, tabs, and double spaces """
p = re.compile("(\n|\r|\t|\f|\v)+")
m = p.sub("", theLine)
# Kills double spaces
p = re.compile("( )+")
m = p.sub(" ", m)
# Removes last semicolon before }
p = re.compile("(; }|;})+")
m = p.sub("}", m)
# Removes space before {
p = re.compile("({ )+")
m = p.sub("{", m)
# Removes all comments
p = re.compile("/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/")
m = p.sub("", m)
# Strip off the Charset
p = re.compile("@CHARSET .*;")
m = p.sub("", m)
# Strip spaces before the {
p = re.compile(" {")
m = p.sub("{", m)
# Strip space after :
p = re.compile(": ")
m = p.sub(":", m)
# Strip space after ,
p = re.compile(", ")
m = p.sub(",", m)
# Strip space after ;
p = re.compile("; ")
m = p.sub(";", m)
return m
def compressCSS(inputFilename, outputFilename):
theFile = open(inputFilename, "r").read()
output = ""
for line in theFile:
output = output + cleanline(line)
# Once more, clean the entire file string
_output = cleanline(output)
open(outputFilename, "w").write(_output)
return
def dojs(dogis = False, warnings = True):
""" Minifies the JavaScript """
# Do we have local version of the Closure Compiler available?
use_compressor = "jsmin" # Fallback
try:
import closure
use_compressor = "closure"
print "using local Closure Compiler"
except Exception, E:
print "No closure (%s)" % E
print "Download from http://closure-compiler.googlecode.com/files/compiler-latest.zip"
try:
import closure_ws
use_compressor = "closure_ws"
print "Using Closure via Web Service - limited to files < 1Mb!"
except ImportError:
print "No closure_ws"
if use_compressor == "closure":
if not warnings:
closure.extra_params = "--warning_level QUIET"
minimize = closure.minimize
elif use_compressor == "closure_ws":
minimize = closure_ws.minimize
elif use_compressor == "jsmin":
minimize = jsmin.jsmin
sourceDirectory = ".."
configFilename = "sahana.js.cfg"
outputFilename = "S3.min.js"
# Merge JS files
print "Merging Core libraries."
merged = mergejs.run(sourceDirectory, None, configFilename)
# Compress JS files
print "Compressing - JS"
minimized = minimize(merged)
# Add license
print "Adding license file."
minimized = open("license.txt").read() + minimized
# Print to output files
print "Writing to %s." % outputFilename
open(outputFilename, "w").write(minimized)
# Remove old JS files
print "Deleting %s." % outputFilename
try:
os.remove("../S3/%s" % outputFilename)
except:
pass
# Move new JS files
print "Moving new JS files"
shutil.move(outputFilename, "../S3")
# dataTables
print "Compressing dataTables"
sourceDirectorydataTables = ".."
configFilenamedataTables = "sahana.js.dataTables.cfg"
outputFilenamedataTables = "s3.dataTables.min.js"
mergeddataTables = mergejs.run(sourceDirectorydataTables,
None,
configFilenamedataTables)
minimizeddataTables = minimize(mergeddataTables)
open(outputFilenamedataTables, "w").write(minimizeddataTables)
try:
os.remove("../S3/%s" % outputFilenamedataTables)
except:
pass
shutil.move(outputFilenamedataTables, "../S3")
# Vulnerability
print "Compressing Vulnerability"
sourceDirectoryVulnerability = ".."
configFilenameVulnerability = "sahana.js.vulnerability.cfg"
outputFilenameVulnerability = "s3.vulnerability.min.js"
mergedVulnerability = mergejs.run(sourceDirectoryVulnerability,
None,
configFilenameVulnerability)
minimizedVulnerability = minimize(mergedVulnerability)
open(outputFilenameVulnerability, "w").write(minimizedVulnerability)
try:
os.remove("../S3/%s" % outputFilenameVulnerability)
except:
pass
shutil.move(outputFilenameVulnerability, "../S3")
print "Compressing Vulnerability GIS"
sourceDirectoryVulnerability = "../../themes/Vulnerability/js"
configFilenameVulnerability = "sahana.js.vulnerability_gis.cfg"
outputFilenameVulnerability = "OpenLayers.js"
mergedVulnerability = mergejs.run(sourceDirectoryVulnerability,
None,
configFilenameVulnerability)
minimizedVulnerability = minimize(mergedVulnerability)
open(outputFilenameVulnerability, "w").write(minimizedVulnerability)
try:
os.remove("../../themes/Vulnerability/js/%s" % outputFilenameVulnerability)
except:
pass
shutil.move(outputFilenameVulnerability, "../../themes/Vulnerability/js")
# Single scripts
for filename in [<|fim▁hole|> "popup",
"report",
"select_person",
"timeline",
]:
print "Compressing s3.%s.js" % filename
inputFilename = os.path.join("..", "S3", "s3.%s.js" % filename)
outputFilename = "s3.%s.min.js" % filename
input = open(inputFilename, "r").read()
minimized = minimize(input)
open(outputFilename, "w").write(minimized)
try:
os.remove("../S3/%s" % outputFilename)
except:
pass
shutil.move(outputFilename, "../S3")
if dogis:
sourceDirectoryGIS = "../S3"
sourceDirectoryOpenLayers = "../gis/openlayers/lib"
sourceDirectoryOpenLayersExten = "../gis"
sourceDirectoryMGRS = "../gis"
sourceDirectoryGeoExt = "../gis/GeoExt/lib"
sourceDirectoryGeoExtux = "../gis/GeoExt/ux"
sourceDirectoryGxp = "../gis/gxp"
#sourceDirectoryGeoExplorer = "../gis/GeoExplorer"
configFilenameGIS = "sahana.js.gis.cfg"
configFilenameOpenLayers = "sahana.js.ol.cfg"
configFilenameOpenLayersExten = "sahana.js.ol_exten.cfg"
configFilenameMGRS = "sahana.js.mgrs.cfg"
configFilenameGeoExt = "sahana.js.geoext.cfg"
configFilenameGeoExtux = "sahana.js.geoextux.cfg"
configFilenameGxpMin = "sahana.js.gxp.cfg"
configFilenameGxpFull = "sahana.js.gxpfull.cfg"
#configFilenameGeoExplorer = "sahana.js.geoexplorer.cfg"
outputFilenameGIS = "s3.gis.min.js"
outputFilenameOpenLayers = "OpenLayers.js"
outputFilenameMGRS = "MGRS.min.js"
outputFilenameGeoExt = "GeoExt.js"
outputFilenameGxp = "gxp.js"
#outputFilenameGeoExplorer = "GeoExplorer.js"
# Merge GIS JS Files
print "Merging GIS scripts."
mergedGIS = mergejs.run(sourceDirectoryGIS,
None,
configFilenameGIS)
print "Merging OpenLayers libraries."
mergedOpenLayers = mergejs.run(sourceDirectoryOpenLayers,
None,
configFilenameOpenLayers)
mergedOpenLayersExten = mergejs.run(sourceDirectoryOpenLayersExten,
None,
configFilenameOpenLayersExten)
print "Merging MGRS libraries."
mergedMGRS = mergejs.run(sourceDirectoryMGRS,
None,
configFilenameMGRS)
print "Merging GeoExt libraries."
mergedGeoExt = mergejs.run(sourceDirectoryGeoExt,
None,
configFilenameGeoExt)
mergedGeoExtux = mergejs.run(sourceDirectoryGeoExtux,
None,
configFilenameGeoExtux)
print "Merging gxp libraries."
mergedGxpMin = mergejs.run(sourceDirectoryGxp,
None,
configFilenameGxpMin)
mergedGxpFull = mergejs.run(sourceDirectoryGxp,
None,
configFilenameGxpFull)
#print "Merging GeoExplorer libraries."
#mergedGeoExplorer = mergejs.run(sourceDirectoryGeoExplorer,
# None,
# configFilenameGeoExplorer)
# Compress JS files
print "Compressing - GIS JS"
minimizedGIS = minimize(mergedGIS)
print "Compressing - OpenLayers JS"
if use_compressor == "closure_ws":
# Limited to files < 1Mb!
minimizedOpenLayers = jsmin.jsmin("%s\n%s" % (mergedOpenLayers,
mergedOpenLayersExten))
else:
minimizedOpenLayers = minimize("%s\n%s" % (mergedOpenLayers,
mergedOpenLayersExten))
print "Compressing - MGRS JS"
minimizedMGRS = minimize(mergedMGRS)
print "Compressing - GeoExt JS"
minimizedGeoExt = minimize("%s\n%s\n%s" % (mergedGeoExt,
mergedGeoExtux,
mergedGxpMin))
print "Compressing - gxp JS"
minimizedGxp = minimize(mergedGxpFull)
#print "Compressing - GeoExplorer JS"
#minimizedGeoExplorer = minimize(mergedGeoExplorer)
# Add license
#minimizedGIS = open("license.gis.txt").read() + minimizedGIS
# Print to output files
print "Writing to %s." % outputFilenameGIS
open(outputFilenameGIS, "w").write(minimizedGIS)
print "Writing to %s." % outputFilenameOpenLayers
open(outputFilenameOpenLayers, "w").write(minimizedOpenLayers)
print "Writing to %s." % outputFilenameMGRS
open(outputFilenameMGRS, "w").write(minimizedMGRS)
print "Writing to %s." % outputFilenameGeoExt
open(outputFilenameGeoExt, "w").write(minimizedGeoExt)
print "Writing to %s." % outputFilenameGxp
open(outputFilenameGxp, "w").write(minimizedGxp)
#print "Writing to %s." % outputFilenameGeoExplorer
#open(outputFilenameGeoExplorer, "w").write(minimizedGeoExplorer)
# Move new JS files
print "Deleting %s." % outputFilenameGIS
try:
os.remove("../S3/%s" % outputFilenameGIS)
except:
pass
print "Moving new GIS JS files"
shutil.move(outputFilenameGIS, "../S3")
print "Deleting %s." % outputFilenameOpenLayers
try:
os.remove("../gis/%s" % outputFilenameOpenLayers)
except:
pass
print "Moving new OpenLayers JS files"
shutil.move(outputFilenameOpenLayers, "../gis")
print "Deleting %s." % outputFilenameMGRS
try:
os.remove("../gis/%s" % outputFilenameMGRS)
except:
pass
print "Moving new MGRS JS files"
shutil.move(outputFilenameMGRS, "../gis")
print "Deleting %s." % outputFilenameGeoExt
try:
os.remove("../gis/%s" % outputFilenameGeoExt)
except:
pass
print "Moving new GeoExt JS files"
shutil.move(outputFilenameGeoExt, "../gis")
print "Deleting %s." % outputFilenameGxp
try:
os.remove("../gis/%s" % outputFilenameGxp)
except:
pass
print "Moving new gxp JS files"
shutil.move(outputFilenameGxp, "../gis")
#print "Deleting %s." % outputFilenameGeoExplorer
#try:
# os.remove("../gis/%s" % outputFilenameGeoExplorer)
#except:
# pass
#print "Moving new GeoExplorer JS files"
#shutil.move(outputFilenameGeoExplorer, "../gis")
def docss():
""" Compresses the CSS files """
listCSS = []
theme = settings.get_theme()
print "Using theme %s" % theme
css_cfg = os.path.join("..", "..", "..", "private", "templates", theme, "css.cfg")
f = open(css_cfg, "r")
files = f.readlines()
f.close()
for file in files[:-1]:
p = re.compile("(\n|\r|\t|\f|\v)+")
file = p.sub("", file)
listCSS.append("../../styles/%s" % file)
outputFilenameCSS = "eden.min.css"
# Merge CSS files
print "Merging Core styles."
mergedCSS = mergeCSS(listCSS, outputFilenameCSS)
# Compress CSS files
print "Writing to %s." % outputFilenameCSS
compressCSS(mergedCSS, outputFilenameCSS)
# Move files to correct locations
print "Deleting %s." % outputFilenameCSS
try:
os.remove("../../themes/%s/%s" % (theme, outputFilenameCSS))
except:
pass
print "Moving new %s." % outputFilenameCSS
shutil.move(outputFilenameCSS, "../../themes/%s" % theme)
def main(argv):
try:
parameter1 = argv[0]
except:
parameter1 = "ALL"
try:
if(argv[1] == "DOGIS"):
parameter2 = True
else:
parameter2 = False
except:
parameter2 = True
closure_warnings = True
if "NOWARN" in argv:
closure_warnings = False
if parameter1 in ("ALL", "NOWARN"):
dojs(warnings=closure_warnings)
docss()
else:
if parameter1 == "CSS":
docss()
else:
dojs(parameter2, warnings=closure_warnings)
docss()
print "Done."
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))<|fim▁end|> | "contacts",
"embed_component",
"inline_component",
"locationselector.widget", |
<|file_name|>fe_series_05.cc<|end_file_name|><|fim▁begin|>// ---------------------------------------------------------------------
//
// Copyright (C) 2016 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
// Test Legendre expansion in 2D and 3D for a function given using Legendre
// coefficients.
#include "../tests.h"
#include <iostream>
#include <deal.II/base/function.h>
#include <deal.II/base/quadrature_lib.h>
#include <deal.II/grid/tria.h>
#include <deal.II/grid/grid_generator.h>
#include <deal.II/hp/dof_handler.h>
#include <deal.II/fe/fe_series.h>
#include <deal.II/fe/fe_q.h>
#include <deal.II/lac/vector.h>
#include <deal.II/hp/q_collection.h>
#include <deal.II/numerics/vector_tools.h>
#include <gsl/gsl_sf_legendre.h>
using namespace dealii;
template <int dim>
class LegendreFunction : public Function<dim>
{
public:
LegendreFunction(const Table<dim,double> &coefficients)
:
dealii::Function<dim>(1),
coefficients(coefficients)
{
}
virtual double value(const Point<dim> &point,
const unsigned int component = 0 ) const;
const Table<dim,double> &get_coefficients() const
{
return coefficients;
}
private:
const Table<dim,double> coefficients;
};
// copy-paste from fe_series.cc
template <int dim>
double Lh(const Point<dim> &x_q,
const TableIndices<dim> &indices)
{
double res = 1.0;
for (unsigned int d = 0; d < dim; d++)
{
const double x = 2.0*(x_q[d]-0.5);
Assert ( (x_q[d] <= 1.0) && (x_q[d] >= 0.),
ExcMessage("x_q is not in [0,1]" +
Utilities::to_string(x_q[d])));
const int ind = indices[d];
res *= sqrt(2.0) * gsl_sf_legendre_Pl (ind, x);
}
return res;
}
template <>
double LegendreFunction<2>::value(const dealii::Point<2> &point,
const unsigned int ) const
{
double f = 0.0;
for (unsigned int i = 0; i < coefficients.size(0); i++)
for (unsigned int j = 0; j < coefficients.size(1); j++)
f+= Lh(point, TableIndices<2>(i,j)) * coefficients(i,j);
return f;
}
template <>
double LegendreFunction<3>::value(const dealii::Point<3> &point,
const unsigned int ) const
{
double f = 0.0;
for (unsigned int i = 0; i < coefficients.size(0); i++)
for (unsigned int j = 0; j < coefficients.size(1); j++)
for (unsigned int k = 0; k < coefficients.size(2); k++)
f+= Lh(point, TableIndices<3>(i,j,k)) * coefficients(i,j,k);
return f;
}
void print(const Table<2,double> &coeff)
{
for (unsigned int i = 0; i < coeff.size(0); i++)
for (unsigned int j = 0; j < coeff.size(1); j++)
deallog << coeff(i,j) << " ";
deallog << std::endl;
}
void print(const Table<3,double> &coeff)
{
for (unsigned int i = 0; i < coeff.size(0); i++)
for (unsigned int j = 0; j < coeff.size(1); j++)
for (unsigned int k = 0; k < coeff.size(2); k++)
deallog << coeff(i,j,k) << " ";
deallog << std::endl;
}
void resize(Table<2,double> &coeff, const unsigned int N)
{
coeff.reinit(N,N);
}
void resize(Table<3,double> &coeff, const unsigned int N)
{
TableIndices<3> size;
for (unsigned int d=0; d<3; d++)
size[d] = N;
coeff.reinit(size);
}
template <int dim>
void test(const LegendreFunction<dim> &func,
const unsigned int poly_degree)
{
const unsigned int max_poly = poly_degree+3;
deallog <<"-----------------------------------"<<std::endl;
deallog << dim <<"d, p="<<poly_degree<<", max_p="<<max_poly<<std::endl;
deallog <<"-----------------------------------"<<std::endl;
Triangulation<dim> triangulation;
hp::DoFHandler<dim> dof_handler(triangulation);
hp::FECollection<dim> fe_collection;
hp::QCollection<dim> quadrature_formula;
// add some extra FEs in fe_collection
for (unsigned int p = 1; p <= max_poly; p++)
{
fe_collection.push_back(FE_Q<dim>(p));
quadrature_formula.push_back(QGauss<dim>(p+1+5));
}
GridGenerator::hyper_cube (triangulation,0.0,1.0); // reference cell
const unsigned int fe_index = poly_degree-1;
dof_handler.begin_active()->set_active_fe_index(fe_index);
dof_handler.distribute_dofs (fe_collection);
Vector<double> values(dof_handler.n_dofs());
VectorTools::interpolate (dof_handler,func,values);
const unsigned int N = poly_degree+1;
FESeries::Legendre<dim> legendre(N,<|fim▁hole|> quadrature_formula);
const Table<dim,double> &coeff_in = func.get_coefficients();
Table<dim,double> coeff_out;
resize(coeff_out,N);
Vector<double> local_dof_values;
typename hp::DoFHandler<dim>::active_cell_iterator
cell = dof_handler.begin_active();
{
const unsigned int cell_n_dofs = cell->get_fe().dofs_per_cell;
const unsigned int cell_active_fe_index = cell->active_fe_index();
local_dof_values.reinit (cell_n_dofs);
cell->get_dof_values (values, local_dof_values);
legendre.calculate(local_dof_values,
cell_active_fe_index,
coeff_out);
}
deallog << "calculated:" << std::endl;
print(coeff_out);
deallog <<"exact:"<<std::endl;
print(coeff_in);
dof_handler.clear();
}
int main ()
{
std::ofstream logfile("output");
dealii::deallog.attach(logfile,/*do not print job id*/false);
dealii::deallog.depth_console(0);
{
const unsigned int dim = 2;
const unsigned int coeff_1d = 2;
const unsigned int p = 1;
Table<dim,double> coeff_in(coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
coeff_in(i,j) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
{
const unsigned int dim = 2;
const unsigned int coeff_1d = 3;
const unsigned int p = 2;
Table<dim,double> coeff_in(coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
coeff_in(i,j) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
{
const unsigned int dim = 3;
const unsigned int coeff_1d = 2;
const unsigned int p = 1;
Table<dim,double> coeff_in(coeff_1d,coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
for (unsigned int k = 0; k < coeff_1d; k++)
coeff_in(i,j,k) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
{
const unsigned int dim = 3;
const unsigned int coeff_1d = 3;
const unsigned int p = 2;
Table<dim,double> coeff_in(coeff_1d,coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
for (unsigned int k = 0; k < coeff_1d; k++)
coeff_in(i,j,k) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
dealii::deallog << "Ok"<<std::endl;
}<|fim▁end|> | fe_collection, |
<|file_name|>DB.py<|end_file_name|><|fim▁begin|>from Timeline.Server.Constants import TIMELINE_LOGGER
from twisted.internet.defer import Deferred, inlineCallbacks, returnValue
from twistar.dbobject import DBObject
from twistar.registry import Registry
from collections import deque
import logging, time, json
class Penguin(DBObject):
HASONE = ['avatar', 'currency', 'ninja']
HASMANY = ['assets', 'bans', 'careItems', 'coins', 'friends', 'ignores', 'requests', 'inventories', 'mails', 'memberships',
'musicTracks', 'puffles', 'stamps', 'stampCovers', 'igloos']
class Coin(DBObject):
pass
class Igloo(DBObject):
HASMANY = ['iglooFurnitures', 'iglooLikes']
@inlineCallbacks
def get_likes_count(self):
likes = yield Registry.getConfig().execute("SELECT COALESCE(SUM(likes), 0) FROM igloo_likes where "
"igloo_id = %s" % (self.id))
returnValue(likes[0][0])
@inlineCallbacks
def get_furnitures(self):
furnitures = yield self.iglooFurnitures.get()
returnValue(furnitures)
@inlineCallbacks
def get_furnitures_string(self):
furnitures = yield self.get_furnitures()
furn_data = map(lambda i: '|'.join(map(str, map(int, [i.furn_id, i.x, i.y, i.rotate, i.frame]))), furnitures)
returnValue(','.join(furn_data))
@inlineCallbacks
def updateFurnitures(self, furnitures):
yield self.refresh()
yield IglooFurniture.deleteAll(where=['igloo_id = ?', self.id])
furn = [IglooFurniture(igloo_id=self.id, furn_id=x[0], x=x[1], y=x[2], rotate=x[3], frame=x[4])
for x in furnitures]
[(yield i.save()) for i in furn]
yield self.iglooFurnitures.set(furn)
class IglooFurniture(DBObject):
pass
class IglooLike(DBObject):
def get_time(self):
return int(time.mktime(self.time.timetuple()))
class Avatar(DBObject):
pass
class Currency(DBObject):
pass
class Ninja(DBObject):
pass
class Asset(DBObject):
def getPurchasedTimestamp(self):
return int(time.mktime(self.purchased.timetuple()))
class Ban(DBObject):
def banned(self):
return hours > 0
def hours(self):
expire = int(time.mktime(self.expire.timetuple()))
hours = (expire - time.time()) / (60 * 60.0) if expire > time.time() else 0
return hours
class CareItem(DBObject):
pass
class Friend(DBObject):
friend_id = -1
class Ignore(DBObject):
pass
class Request(DBObject):
pass
class Inventory(DBObject):
pass
class Mail(DBObject):
def get_sent_on(self):
return int(time.mktime(self.sent_on.timetuple()))
class Membership(DBObject):
pass
class MusicTrack(DBObject):
shared = False
def __len__(self):
return self.length
<|fim▁hole|> return '%'.join(map(str, [self.id, self.name, int(self.shared), self.notes, self.hash, self.likes]))
def __int__(self):
return self.id
class Puffle(DBObject):
state = x = y = 0
def __str__(self):
# puffle id|type|sub_type|name|adoption|food|play|rest|clean|hat|x|y|is_walking
return '|'.join(map(str, [int(self.id), int(self.type), self.subtype if int(self.subtype) != 0 else '',
self.name, self.adopt(), int(self.food), int(self.play), int(self.rest),
int(self.clean), int(self.hat), int(self.x), int(self.y), int(self.walking)]))
def adopt(self):
return int(time.mktime(self.adopted.timetuple()))
def updatePuffleStats(self, engine):
care_history = json.loads(self.lastcare)
now = time.time()
if care_history is None or len(care_history) < 1 or bool(int(self.backyard)) or self.walking:
care_history['food'] = care_history['play'] = care_history['bath'] = now
self.lastcare = json.dumps(care_history)
self.save()
return # ULTIMATE PUFFLE <indefinite health and energy>
last_fed = care_history['food']
last_played = care_history['play']
last_bathed = care_history['bath']
food, play, clean = int(self.food), int(self.play), int(self.clean)
puffleCrumb = engine.puffleCrumbs[self.subtype]
max_food, max_play, max_clean = puffleCrumb.hunger, 100, puffleCrumb.health
self.rest = 100 # It's in the igloo all this time?
self.save()
''' It afterall is a poor creature to be taken care of.
if not int(puffle.id) in self.penguin.engine.puffleCrumbs.defautPuffles:
return # They aren't to be taken care of
'''
'''
if remaining % < 10 : send a postcard blaming (hungry, dirty, or unhappy)
if remaining % < 2 : move puffle to pet store, delete puffle, send a postcard, sue 1000 coins as penalty
'''
fed_percent = food - 5 * ((now - last_fed)/86400) # delta_food = -5% per day
play_percent = play - 5 * ((now - last_played)/86400) # delta_play = -5% per day
clean_percent = clean - 10 * ((now - last_bathed)/86400) # delta_clean = -10% per day
total_percent = (fed_percent + play_percent + clean_percent) / 3.0
if fed_percent < 3 or total_percent < 6:
self.backyard = 1
self.food = 100
self.play = 100
self.clean = 100
self.save()
return
if fed_percent < 10:
pid = self.penguin_id
pname = self.name
def sendMail(mail):
if mail is not None:
sent = mail.sent_on
delta = (time.time() - sent)/3600/12
if delta < 1:
return
Mail(penguin_id=pid, from_user=0, type=110, description=str(pname)).save()
last_mail = Mail.find(where=['penguin_id = ? AND type = 110 AND description = ?', self.penguin_id, self.name], orderby='sent_on DESC', limit=1).addCallback(sendMail)
self.food = fed_percent
self.play = play_percent
self.clean = clean_percent
care_history['food'] = care_history['play'] = care_history['bath'] = now
self.lastcare = json.dumps(care_history)
self.save()
class Stamp(DBObject):
def __int__(self):
return int(self.stamp)
class StampCover(DBObject):
pass
class EPFCom(DBObject):
TABLENAME = 'epfcoms'
def getTime(self):
return int(time.mktime(self.time.timetuple()))
def __str__(self):
return '|'.join(map(str, [self.message, self.getTime(), self.mascot]))
class PenguinDB(object):
"""
<Server.Penguin> will extend this to get db operations
Syntax:
def db_<FunctionName> (*a, **kwa): << must be deferred and mustreturn a defer
> recommended to use with inlineCallbacks
"""
def __init__(self):
self.logger = logging.getLogger(TIMELINE_LOGGER)
self.dbpenguin = None
@inlineCallbacks
def db_init(self):
if self.dbpenguin is None:
column, value = 'username', self.penguin.username
if not self.penguin.id is None:
column, value = 'ID', self.penguin.id
elif not self.penguin.swid is None:
column, value = 'swid', self.penguin.swid
self.dbpenguin = yield Penguin.find(where = ['%s = ?' % column, value], limit = 1)
if self.dbpenguin is None:
raise Exception("[TE201] Penguin not found with {1} - {0}".format(value, column))
returnValue(True)
@inlineCallbacks
def db_nicknameUpdate(self, nick):
p_nickname = self.dbpenguin.nickname
self.dbpenguin.nickname = nick
done = self.dbpenguin.save()
if len(done.errors) > 0:
self.dbpenguin.nickname = p_nickname
for error in done.errors:
self.log('error', "[TE200] MySQL update nickname failed. Error :", error)
returnValue(False)
else:
returnValue(True)
@inlineCallbacks
def db_penguinExists(self, criteria = 'ID', value = None):
exists = yield Penguin.exists(["`%s` = ?" % criteria, value])
returnValue(exists)
@inlineCallbacks
def db_getPenguin(self, criteria, *values):
wh = [criteria] + list(values)
p = yield Penguin.find(where = wh, limit = 1)
returnValue(p)
@inlineCallbacks
def db_refresh(self):
yield self.dbpenguin.refresh()<|fim▁end|> | def __str__(self, withNotes = False):
if not withNotes:
return '|'.join(map(str, [self.id, self.name, int(self.shared), self.likes]))
|
<|file_name|>test_content_app.py<|end_file_name|><|fim▁begin|>"""
To run this test, type this in command line <kolibri manage test -- kolibri.core.content>
"""
import datetime
import unittest
import uuid
import mock
import requests
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils import timezone
from le_utils.constants import content_kinds
from rest_framework import status
from rest_framework.test import APITestCase
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.test.helpers import provision_device
from kolibri.core.content import models as content
from kolibri.core.content.test.test_channel_upgrade import ChannelBuilder
from kolibri.core.device.models import DevicePermissions
from kolibri.core.device.models import DeviceSettings
from kolibri.core.logger.models import ContentSessionLog
from kolibri.core.logger.models import ContentSummaryLog
DUMMY_PASSWORD = "password"
class ContentNodeTestBase(object):
"""
Basecase for content metadata methods
"""
def test_get_prerequisites_for(self):
"""
test the directional characteristic of prerequisite relationship
"""
c1 = content.ContentNode.objects.get(title="c1")
root = content.ContentNode.objects.get(title="root")
# if root is the prerequisite of c1
expected_output = content.ContentNode.objects.filter(title__in=["root"])
actual_output = content.ContentNode.objects.filter(prerequisite_for=c1)
self.assertEqual(set(expected_output), set(actual_output))
# then c1 should not be the prerequisite of root
unexpected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = content.ContentNode.objects.filter(prerequisite_for=root)
self.assertNotEqual(set(actual_output), set(unexpected_output))
def test_get_has_prerequisites(self):
"""
test the directional characteristic of prerequisite relationship
"""
c1 = content.ContentNode.objects.get(title="c1")
root = content.ContentNode.objects.get(title="root")
# if root is the prerequisite of c1
expected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = content.ContentNode.objects.filter(has_prerequisite=root)
self.assertEqual(set(expected_output), set(actual_output))
# then c1 should not be the prerequisite of root
unexpected_output = content.ContentNode.objects.filter(title__in=["root"])
actual_output = content.ContentNode.objects.filter(has_prerequisite=c1)
self.assertNotEqual(set(actual_output), set(unexpected_output))
def test_get_all_related(self):
"""
test the nondirectional characteristic of related relationship
"""
c1 = content.ContentNode.objects.get(title="c1")
c2 = content.ContentNode.objects.get(title="c2")
# if c1 is related to c2
expected_output = content.ContentNode.objects.filter(title__in=["c2"])
actual_output = content.ContentNode.objects.filter(related=c1)
self.assertEqual(set(expected_output), set(actual_output))
# then c2 should be related to c1
expected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = content.ContentNode.objects.filter(related=c2)
self.assertEqual(set(expected_output), set(actual_output))
def test_descendants_of_kind(self):
p = content.ContentNode.objects.get(title="root")
expected_output = content.ContentNode.objects.filter(title__in=["c1"])
actual_output = p.get_descendants(include_self=False).filter(
kind=content_kinds.VIDEO
)
self.assertEqual(set(expected_output), set(actual_output))
def test_get_top_level_topics(self):
p = content.ContentNode.objects.get(title="root")
expected_output = content.ContentNode.objects.filter(
parent=p, kind=content_kinds.TOPIC
)
actual_output = (
content.ContentNode.objects.get(title="root")
.get_children()
.filter(kind=content_kinds.TOPIC)
)
self.assertEqual(set(expected_output), set(actual_output))
def test_tag_str(self):
# test for ContentTag __str__
p = content.ContentTag.objects.get(tag_name="tag_2")
self.assertEqual(str(p), "tag_2")
def test_lang_str(self):
# test for Language __str__
p = content.Language.objects.get(lang_code="en")
self.assertEqual(str(p), "English-Test")
def test_channelmetadata_str(self):
# test for ChannelMetadata __str__
p = content.ChannelMetadata.objects.get(name="testing")
self.assertEqual(str(p), "testing")
def test_tags(self):
root_tag_count = content.ContentNode.objects.get(title="root").tags.count()
self.assertEqual(root_tag_count, 3)
c1_tag_count = content.ContentNode.objects.get(title="c1").tags.count()
self.assertEqual(c1_tag_count, 1)
c2_tag_count = content.ContentNode.objects.get(title="c2").tags.count()
self.assertEqual(c2_tag_count, 1)
c2c1_tag_count = content.ContentNode.objects.get(title="c2c1").tags.count()
self.assertEqual(c2c1_tag_count, 0)
def test_local_files(self):
self.assertTrue(
content.LocalFile.objects.filter(
id="9f9438fe6b0d42dd8e913d7d04cfb2b2"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="725257a0570044acbd59f8cf6a68b2be"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="e00699f859624e0f875ac6fe1e13d648"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="4c30dc7619f74f97ae2ccd4fffd09bf2"
).exists()
)
self.assertTrue(
content.LocalFile.objects.filter(
id="8ad3fffedf144cba9492e16daec1e39a"
).exists()
)
def test_delete_tree(self):
channel = content.ChannelMetadata.objects.first()
channel_id = channel.id
channel.delete_content_tree_and_files()
self.assertFalse(
content.ContentNode.objects.filter(channel_id=channel_id).exists()
)
self.assertFalse(content.File.objects.all().exists())
class ContentNodeQuerysetTestCase(TestCase):
fixtures = ["content_test.json"]
the_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
@classmethod
def setUpTestData(cls):
provision_device()
cls.facility = Facility.objects.create(name="facility")
cls.admin = FacilityUser.objects.create(username="admin", facility=cls.facility)
cls.admin.set_password(DUMMY_PASSWORD)
cls.admin.save()
cls.facility.add_admin(cls.admin)
def test_filter_uuid(self):
content_ids = content.ContentNode.objects.values_list("id", flat=True)
self.assertEqual(
content.ContentNode.objects.filter_by_uuids(content_ids).count(),
len(content_ids),
)
def test_filter_uuid_bad_uuid(self):
content_ids = list(content.ContentNode.objects.values_list("id", flat=True))
content_ids[0] = '7d1bOR"1"="1"d08e29c36115f1af3da99'
self.assertEqual(
content.ContentNode.objects.filter_by_uuids(content_ids).count(), 0
)
kind_activity_map = {
content_kinds.EXERCISE: "practice",
content_kinds.VIDEO: "watch",
content_kinds.AUDIO: "listen",
content_kinds.DOCUMENT: "read",
content_kinds.HTML5: "explore",
}
def infer_learning_activity(kind):
activity = kind_activity_map.get(kind)
if activity:
return [activity]
return []
class ContentNodeAPITestCase(APITestCase):
"""
Testcase for content API methods
"""
fixtures = ["content_test.json"]
the_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
@classmethod
def setUpTestData(cls):
provision_device()
cls.facility = Facility.objects.create(name="facility")
cls.admin = FacilityUser.objects.create(username="admin", facility=cls.facility)
cls.admin.set_password(DUMMY_PASSWORD)
cls.admin.save()
cls.facility.add_admin(cls.admin)
def test_prerequisite_for_filter(self):
c1_id = content.ContentNode.objects.get(title="c1").id
response = self.client.get(
reverse("kolibri:core:contentnode-list"), data={"prerequisite_for": c1_id}
)
self.assertEqual(response.data[0]["title"], "root")
def test_has_prerequisite_filter(self):
root_id = content.ContentNode.objects.get(title="root").id
response = self.client.get(
reverse("kolibri:core:contentnode-list"), data={"has_prerequisite": root_id}
)
self.assertEqual(response.data[0]["title"], "c1")
def test_related_filter(self):
c1_id = content.ContentNode.objects.get(title="c1").id
response = self.client.get(
reverse("kolibri:core:contentnode-list"), data={"related": c1_id}
)
self.assertEqual(response.data[0]["title"], "c2")
def map_language(self, lang):
if lang:
return {<|fim▁hole|> for f in [
"id",
"lang_code",
"lang_subcode",
"lang_name",
"lang_direction",
]
}
def _assert_node(self, actual, expected):
assessmentmetadata = (
expected.assessmentmetadata.all()
.values(
"assessment_item_ids",
"number_of_assessments",
"mastery_model",
"randomize",
"is_manipulable",
"contentnode",
)
.first()
)
files = []
for f in expected.files.all():
"local_file__id",
"local_file__available",
"local_file__file_size",
"local_file__extension",
"lang_id",
file = {}
for field in [
"id",
"priority",
"preset",
"supplementary",
"thumbnail",
]:
file[field] = getattr(f, field)
file["checksum"] = f.local_file_id
for field in [
"available",
"file_size",
"extension",
]:
file[field] = getattr(f.local_file, field)
file["lang"] = self.map_language(f.lang)
file["storage_url"] = f.get_storage_url()
files.append(file)
self.assertEqual(
actual,
{
"id": expected.id,
"available": expected.available,
"author": expected.author,
"channel_id": expected.channel_id,
"coach_content": expected.coach_content,
"content_id": expected.content_id,
"description": expected.description,
"duration": None,
"learning_activities": infer_learning_activity(expected.kind),
"kind": expected.kind,
"lang": self.map_language(expected.lang),
"license_description": expected.license_description,
"license_name": expected.license_name,
"license_owner": expected.license_owner,
"num_coach_contents": expected.num_coach_contents,
"options": expected.options,
"parent": expected.parent_id,
"sort_order": expected.sort_order,
"title": expected.title,
"lft": expected.lft,
"rght": expected.rght,
"tree_id": expected.tree_id,
"ancestors": list(expected.get_ancestors().values("id", "title")),
"tags": list(
expected.tags.all()
.order_by("tag_name")
.values_list("tag_name", flat=True)
),
"assessmentmetadata": assessmentmetadata,
"is_leaf": expected.kind != "topic",
"files": files,
},
)
def _assert_nodes(self, data, nodes):
for actual, expected in zip(data, nodes):
self._assert_node(actual, expected)
def test_contentnode_list(self):
root = content.ContentNode.objects.get(title="root")
nodes = root.get_descendants(include_self=True).filter(available=True)
expected_output = len(nodes)
response = self.client.get(reverse("kolibri:core:contentnode-list"))
self.assertEqual(len(response.data), expected_output)
self._assert_nodes(response.data, nodes)
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_list_long(self):
# This will make > 1000 nodes which should test our ancestor batching behaviour
builder = ChannelBuilder(num_children=10)
builder.insert_into_default_db()
content.ContentNode.objects.update(available=True)
nodes = content.ContentNode.objects.filter(available=True)
expected_output = len(nodes)
self.assertGreater(expected_output, 1000)
response = self.client.get(reverse("kolibri:core:contentnode-list"))
self.assertEqual(len(response.data), expected_output)
self._assert_nodes(response.data, nodes)
def _recurse_and_assert(self, data, nodes, recursion_depth=0):
for actual, expected in zip(data, nodes):
children = actual.pop("children", None)
self._assert_node(actual, expected)
if children:
child_nodes = content.ContentNode.objects.filter(
available=True, parent=expected
)
if children["more"] is None:
self.assertEqual(len(child_nodes), len(children["results"]))
else:
self.assertGreater(len(child_nodes), len(children["results"]))
self.assertEqual(children["more"]["id"], expected.id)
self.assertEqual(
children["more"]["params"]["lft__gt"], child_nodes[24].rght
)
self.assertEqual(
children["more"]["params"]["depth"], 2 - recursion_depth
)
self._recurse_and_assert(
children["results"],
child_nodes,
recursion_depth=recursion_depth + 1,
)
def test_contentnode_tree(self):
root = content.ContentNode.objects.get(title="root")
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id})
)
self._recurse_and_assert([response.data], [root])
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_tree_long(self):
builder = ChannelBuilder(levels=2, num_children=30)
builder.insert_into_default_db()
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(id=builder.root_node["id"])
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id})
)
self._recurse_and_assert([response.data], [root])
def test_contentnode_tree_depth_1(self):
root = content.ContentNode.objects.get(title="root")
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id}),
data={"depth": 1},
)
self._recurse_and_assert([response.data], [root])
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_tree_lft__gt(self):
builder = ChannelBuilder(levels=2, num_children=30)
builder.insert_into_default_db()
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(id=builder.root_node["id"])
lft__gt = content.ContentNode.objects.filter(parent=root)[24].rght
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id}),
data={"lft__gt": lft__gt},
)
self.assertEqual(len(response.data["children"]["results"]), 5)
self.assertIsNone(response.data["children"]["more"])
first_node = content.ContentNode.objects.filter(parent=root)[25]
self._recurse_and_assert(
[response.data["children"]["results"][0]], [first_node], recursion_depth=1
)
@unittest.skipIf(
getattr(settings, "DATABASES")["default"]["ENGINE"]
== "django.db.backends.postgresql",
"Skipping postgres as not as vulnerable to large queries and large insertions are less performant",
)
def test_contentnode_tree_more(self):
builder = ChannelBuilder(levels=2, num_children=30)
builder.insert_into_default_db()
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(id=builder.root_node["id"])
response = self.client.get(
reverse("kolibri:core:contentnode_tree-detail", kwargs={"pk": root.id})
)
first_child = response.data["children"]["results"][0]
self.assertEqual(first_child["children"]["more"]["params"]["depth"], 1)
nested_page_response = self.client.get(
reverse(
"kolibri:core:contentnode_tree-detail",
kwargs={"pk": first_child["children"]["more"]["id"]},
),
data=first_child["children"]["more"]["params"],
)
self.assertEqual(len(nested_page_response.data["children"]["results"]), 5)
self.assertIsNone(nested_page_response.data["children"]["more"])
@mock.patch("kolibri.core.content.api.get_channel_stats_from_studio")
def test_contentnode_granular_network_import(self, stats_mock):
c1 = content.ContentNode.objects.get(title="root")
c1_id = c1.id
c2_id = content.ContentNode.objects.get(title="c1").id
c3_id = content.ContentNode.objects.get(title="c2").id
content.ContentNode.objects.all().update(available=False)
stats = {
c1_id: {
"total_resources": 2,
"coach_content": False,
"num_coach_contents": 0,
},
c2_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
c3_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
}
stats_mock.return_value = stats
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id})
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "root",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 2,
"on_device_resources": 0,
"coach_content": False,
"importable": True,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
"ancestors": list(c1.get_ancestors().values("id", "title")),
"children": [
{
"id": c2_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
{
"id": c3_id,
"title": "c2",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
],
},
)
@mock.patch("kolibri.core.content.api.get_channel_stats_from_disk")
def test_contentnode_granular_local_import(self, stats_mock):
content.LocalFile.objects.update(available=False)
content.ContentNode.objects.update(available=False)
c1 = content.ContentNode.objects.get(title="root")
c1_id = c1.id
c2_id = content.ContentNode.objects.get(title="c1").id
c3_id = content.ContentNode.objects.get(title="c2").id
stats = {
c1_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
c3_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
}
stats_mock.return_value = stats
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
{"importing_from_drive_id": "123"},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "root",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
"ancestors": list(c1.get_ancestors().values("id", "title")),
"children": [
{
"id": c2_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 0,
"on_device_resources": 0,
"importable": False,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
{
"id": c3_id,
"title": "c2",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
],
},
)
@mock.patch("kolibri.core.content.api.get_channel_stats_from_peer")
def test_contentnode_granular_remote_import(self, stats_mock):
content.LocalFile.objects.update(available=False)
content.ContentNode.objects.update(available=False)
c1 = content.ContentNode.objects.get(title="root")
c1_id = c1.id
c2_id = content.ContentNode.objects.get(title="c1").id
c3_id = content.ContentNode.objects.get(title="c2").id
stats = {
c1_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
c3_id: {
"total_resources": 1,
"coach_content": False,
"num_coach_contents": 0,
},
}
stats_mock.return_value = stats
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
{"importing_from_peer_id": "test"},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "root",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
"ancestors": list(c1.get_ancestors().values("id", "title")),
"children": [
{
"id": c2_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 0,
"on_device_resources": 0,
"importable": False,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
{
"id": c3_id,
"title": "c2",
"kind": "topic",
"is_leaf": False,
"available": False,
"total_resources": 1,
"on_device_resources": 0,
"importable": True,
"coach_content": False,
"num_coach_contents": 0,
"new_resource": False,
"num_new_resources": 0,
"updated_resource": False,
},
],
},
)
def test_contentnode_granular_export_available(self):
c1 = content.ContentNode.objects.get(title="c1")
c1_id = c1.id
content.ContentNode.objects.filter(title="c1").update(on_device_resources=1)
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
data={"for_export": True},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": True,
"total_resources": 1,
"on_device_resources": 1,
"importable": None,
"children": [],
"coach_content": False,
"num_coach_contents": 0,
"new_resource": None,
"num_new_resources": None,
"updated_resource": None,
"ancestors": list(c1.get_ancestors().values("id", "title")),
},
)
def test_contentnode_granular_export_unavailable(self):
c1 = content.ContentNode.objects.get(title="c1")
c1_id = c1.id
content.ContentNode.objects.filter(title="c1").update(available=False)
response = self.client.get(
reverse("kolibri:core:contentnode_granular-detail", kwargs={"pk": c1_id}),
data={"for_export": True},
)
self.assertEqual(
response.data,
{
"id": c1_id,
"title": "c1",
"kind": "video",
"is_leaf": True,
"available": False,
"total_resources": 0,
"on_device_resources": 0,
"importable": None,
"children": [],
"coach_content": False,
"num_coach_contents": 0,
"new_resource": None,
"num_new_resources": None,
"updated_resource": None,
"ancestors": list(c1.get_ancestors().values("id", "title")),
},
)
def test_contentnode_retrieve(self):
c1_id = content.ContentNode.objects.get(title="c1").id
response = self.client.get(
reverse("kolibri:core:contentnode-detail", kwargs={"pk": c1_id})
)
self.assertEqual(response.data["id"], c1_id.__str__())
def test_contentnode_descendants_assessments_exercise_node(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
c1_id = c1.id
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": c1_id},
)
self.assertEqual(
next(
item["num_assessments"] for item in response.data if item["id"] == c1_id
),
c1.assessmentmetadata.first().number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_parent(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
parent = c1.parent
parent_id = parent.id
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": parent_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == parent_id
),
c1.assessmentmetadata.first().number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_root(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
root = content.ContentNode.objects.get(parent__isnull=True)
root_id = root.id
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": root_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == root_id
),
c1.assessmentmetadata.first().number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_parent_sum_siblings(self):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
parent = c1.parent
parent_id = parent.id
sibling = content.ContentNode.objects.create(
pk="6a406ac66b224106aa2e93f73a94333d",
channel_id=c1.channel_id,
content_id="ded4a083e75f4689b386fd2b706e792a",
kind=content_kinds.EXERCISE,
parent=parent,
title="sibling exercise",
available=True,
)
sibling_assessment_metadata = content.AssessmentMetaData.objects.create(
id="6a406ac66b224106aa2e93f73a94333d",
contentnode=sibling,
number_of_assessments=5,
)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": parent_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == parent_id
),
c1.assessmentmetadata.first().number_of_assessments
+ sibling_assessment_metadata.number_of_assessments,
)
def test_contentnode_descendants_assessments_exercise_parent_sum_siblings_one_unavailable(
self,
):
c1 = content.ContentNode.objects.filter(kind=content_kinds.EXERCISE).first()
c1.available = False
c1.save()
parent = c1.parent
parent_id = parent.id
sibling = content.ContentNode.objects.create(
pk="6a406ac66b224106aa2e93f73a94333d",
channel_id=c1.channel_id,
content_id="ded4a083e75f4689b386fd2b706e792a",
kind=content_kinds.EXERCISE,
parent=parent,
title="sibling exercise",
available=True,
)
sibling_assessment_metadata = content.AssessmentMetaData.objects.create(
id="6a406ac66b224106aa2e93f73a94333d",
contentnode=sibling,
number_of_assessments=5,
)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants-assessments"),
data={"ids": parent_id},
)
self.assertEqual(
next(
item["num_assessments"]
for item in response.data
if item["id"] == parent_id
),
sibling_assessment_metadata.number_of_assessments,
)
def test_contentnode_descendants_topic_siblings_ancestor_ids(self):
root = content.ContentNode.objects.get(parent__isnull=True)
topics = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC
)
topic_ids = topics.values_list("id", flat=True)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={"ids": ",".join(topic_ids)},
)
for datum in response.data:
topic = topics.get(id=datum["ancestor_id"])
self.assertTrue(topic.get_descendants().filter(id=datum["id"]).exists())
def test_contentnode_descendants_topic_siblings_kind_filter(self):
root = content.ContentNode.objects.get(parent__isnull=True)
topics = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC
)
topic_ids = topics.values_list("id", flat=True)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={
"ids": ",".join(topic_ids),
"descendant_kind": content_kinds.EXERCISE,
},
)
for datum in response.data:
topic = topics.get(id=datum["ancestor_id"])
self.assertTrue(
topic.get_descendants()
.filter(id=datum["id"], kind=content_kinds.EXERCISE)
.exists()
)
def test_contentnode_descendants_topic_parent_child_ancestor_ids(self):
root = content.ContentNode.objects.get(parent__isnull=True)
topic = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC, children__isnull=False
).first()
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={"ids": ",".join((root.id, topic.id))},
)
topic_items = [
datum for datum in response.data if datum["ancestor_id"] == topic.id
]
for node in topic.get_descendants(include_self=False).filter(available=True):
self.assertTrue(next(item for item in topic_items if item["id"] == node.id))
root_items = [
datum for datum in response.data if datum["ancestor_id"] == root.id
]
for node in root.get_descendants(include_self=False).filter(available=True):
self.assertTrue(next(item for item in root_items if item["id"] == node.id))
def test_contentnode_descendants_availability(self):
content.ContentNode.objects.all().update(available=False)
root = content.ContentNode.objects.get(parent__isnull=True)
topics = content.ContentNode.objects.filter(
parent=root, kind=content_kinds.TOPIC
)
topic_ids = topics.values_list("id", flat=True)
response = self.client.get(
reverse("kolibri:core:contentnode-descendants"),
data={"ids": ",".join(topic_ids)},
)
self.assertEqual(len(response.data), 0)
def test_contentnode_node_assessments_available(self):
content.ContentNode.objects.all().update(available=True)
root = content.ContentNode.objects.get(parent__isnull=True)
exercise_ids = (
root.get_descendants()
.filter(kind=content_kinds.EXERCISE)
.values_list("id", flat=True)
)
response = self.client.get(
reverse("kolibri:core:contentnode-node-assessments"),
data={"ids": ",".join(exercise_ids)},
)
self.assertEqual(response.data, 1)
def test_contentnode_node_assessments_not_available(self):
content.ContentNode.objects.all().update(available=False)
root = content.ContentNode.objects.get(parent__isnull=True)
exercise_ids = (
root.get_descendants()
.filter(kind=content_kinds.EXERCISE)
.values_list("id", flat=True)
)
response = self.client.get(
reverse("kolibri:core:contentnode-node-assessments"),
data={"ids": ",".join(exercise_ids)},
)
self.assertEqual(response.data, 0)
def test_contentnode_recommendations(self):
node_id = content.ContentNode.objects.get(title="c2c2").id
response = self.client.get(
reverse(
"kolibri:core:contentnode-recommendations-for", kwargs={"pk": node_id}
)
)
self.assertEqual(len(response.data), 2)
def test_contentnode_recommendations_does_error_for_unavailable_node(self):
node = content.ContentNode.objects.get(title="c2c2")
node.available = False
node.save()
node_id = node.id
response = self.client.get(
reverse(
"kolibri:core:contentnode-recommendations-for", kwargs={"pk": node_id}
)
)
self.assertEqual(response.status_code, 404)
def test_contentnode_ids(self):
titles = ["c2c2", "c2c3"]
nodes = [content.ContentNode.objects.get(title=title) for title in titles]
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"ids": ",".join([n.id for n in nodes])},
)
self.assertEqual(len(response.data), 2)
for i in range(len(titles)):
self.assertEqual(response.data[i]["title"], titles[i])
def test_contentnode_parent(self):
parent = content.ContentNode.objects.get(title="c2")
children = parent.get_children()
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"parent": parent.id, "include_coach_content": False},
)
self.assertEqual(len(response.data), children.count())
for i in range(len(children)):
self.assertEqual(response.data[i]["title"], children[i].title)
def test_contentnode_tags(self):
expected = {
"root": ["tag_1", "tag_2", "tag_3"],
"c1": ["tag_1"],
"c2": ["tag_2"],
}
for title, tags in expected.items():
node = content.ContentNode.objects.get(title=title)
response = self.client.get(
reverse("kolibri:core:contentnode-detail", kwargs={"pk": node.id})
)
self.assertEqual(set(response.data["tags"]), set(tags))
def test_channelmetadata_list(self):
response = self.client.get(reverse("kolibri:core:channel-list", kwargs={}))
self.assertEqual(response.data[0]["name"], "testing")
def test_channelmetadata_retrieve(self):
data = content.ChannelMetadata.objects.values()[0]
response = self.client.get(
reverse("kolibri:core:channel-detail", kwargs={"pk": data["id"]})
)
self.assertEqual(response.data["name"], "testing")
def test_channelmetadata_langfield(self):
data = content.ChannelMetadata.objects.first()
root_lang = content.Language.objects.get(pk=1)
data.root.lang = root_lang
data.root.save()
response = self.client.get(
reverse("kolibri:core:channel-detail", kwargs={"pk": data.id})
)
self.assertEqual(response.data["lang_code"], root_lang.lang_code)
self.assertEqual(response.data["lang_name"], root_lang.lang_name)
def test_channelmetadata_langfield_none(self):
data = content.ChannelMetadata.objects.first()
response = self.client.get(
reverse("kolibri:core:channel-detail", kwargs={"pk": data.id})
)
self.assertEqual(response.data["lang_code"], None)
self.assertEqual(response.data["lang_name"], None)
def test_channelmetadata_content_available_param_filter_lowercase_true(self):
response = self.client.get(
reverse("kolibri:core:channel-list"), {"available": "true"}
)
self.assertEqual(response.data[0]["id"], "6199dde695db4ee4ab392222d5af1e5c")
def test_channelmetadata_content_available_param_filter_uppercase_true(self):
response = self.client.get(
reverse("kolibri:core:channel-list"), {"available": True}
)
self.assertEqual(response.data[0]["id"], "6199dde695db4ee4ab392222d5af1e5c")
def test_channelmetadata_content_unavailable_param_filter_false(self):
content.ContentNode.objects.filter(title="root").update(available=False)
response = self.client.get(
reverse("kolibri:core:channel-list"), {"available": False}
)
self.assertEqual(response.data[0]["id"], "6199dde695db4ee4ab392222d5af1e5c")
def test_channelmetadata_content_available_field_true(self):
response = self.client.get(reverse("kolibri:core:channel-list"))
self.assertEqual(response.data[0]["available"], True)
def test_channelmetadata_content_available_field_false(self):
content.ContentNode.objects.filter(title="root").update(available=False)
response = self.client.get(reverse("kolibri:core:channel-list"))
self.assertEqual(response.data[0]["available"], False)
def test_channelmetadata_has_exercises_filter(self):
# Has nothing else for that matter...
no_exercise_channel = content.ContentNode.objects.create(
pk="6a406ac66b224106aa2e93f73a94333d",
channel_id="f8ec4a5d14cd4716890999da596032d2",
content_id="ded4a083e75f4689b386fd2b706e792a",
kind="topic",
title="no exercise channel",
)
content.ChannelMetadata.objects.create(
id="63acff41781543828861ade41dbdd7ff",
name="no exercise channel metadata",
root=no_exercise_channel,
)
no_filter_response = self.client.get(reverse("kolibri:core:channel-list"))
self.assertEqual(len(no_filter_response.data), 2)
with_filter_response = self.client.get(
reverse("kolibri:core:channel-list"), {"has_exercise": True}
)
self.assertEqual(len(with_filter_response.data), 1)
self.assertEqual(with_filter_response.data[0]["name"], "testing")
def test_file_list(self):
response = self.client.get(reverse("kolibri:core:file-list"))
self.assertEqual(len(response.data), 5)
def test_file_retrieve(self):
response = self.client.get(
reverse(
"kolibri:core:file-detail",
kwargs={"pk": "6bdfea4a01830fdd4a585181c0b8068c"},
)
)
self.assertEqual(response.data["preset"], "high_res_video")
def _setup_contentnode_progress(self):
# set up data for testing progress_fraction field on content node endpoint
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="learner", facility=facility)
user.set_password("pass")
user.save()
root = content.ContentNode.objects.get(title="root")
c1 = content.ContentNode.objects.get(title="c1")
c2 = content.ContentNode.objects.get(title="c2")
c2c1 = content.ContentNode.objects.get(title="c2c1")
c2c3 = content.ContentNode.objects.get(title="c2c3")
for node, progress in [(c2c1, 0.7), (c2c3, 0.5)]:
ContentSummaryLog.objects.create(
user=user,
content_id=node.content_id,
progress=progress,
channel_id=self.the_channel_id,
start_timestamp=datetime.datetime.now(),
)
return facility, root, c1, c2, c2c1, c2c3
def test_contentnode_progress_detail_endpoint(self):
facility, root, c1, c2, c2c1, c2c3 = self._setup_contentnode_progress()
def assert_progress(node, progress):
response = self.client.get(
reverse(
"kolibri:core:contentnodeprogress-detail", kwargs={"pk": node.id}
)
)
self.assertEqual(response.data["progress_fraction"], progress)
# check that there is no progress when not logged in
assert_progress(root, 0)
assert_progress(c1, 0)
assert_progress(c2, 0)
assert_progress(c2c1, 0)
# check that progress is calculated appropriately when user is logged in
self.client.login(username="learner", password="pass", facility=facility)
# The progress endpoint is used, so should report progress for topics
assert_progress(root, 0.24)
assert_progress(c1, 0)
assert_progress(c2, 0.4)
assert_progress(c2c1, 0.7)
def test_contentnode_progress_list_endpoint(self):
facility, root, c1, c2, c2c1, c2c3 = self._setup_contentnode_progress()
response = self.client.get(reverse("kolibri:core:contentnodeprogress-list"))
def get_progress_fraction(node):
return list(filter(lambda x: x["id"] == node.id, response.data))[0][
"progress_fraction"
]
# check that there is no progress when not logged in
self.assertEqual(get_progress_fraction(root), 0)
self.assertEqual(get_progress_fraction(c1), 0)
self.assertEqual(get_progress_fraction(c2), 0)
self.assertEqual(get_progress_fraction(c2c1), 0)
# check that progress is calculated appropriately when user is logged in
self.client.login(username="learner", password="pass", facility=facility)
response = self.client.get(reverse("kolibri:core:contentnodeprogress-list"))
# The progress endpoint is used, so should report progress for topics
self.assertEqual(get_progress_fraction(root), 0.24)
self.assertEqual(get_progress_fraction(c1), 0)
self.assertEqual(get_progress_fraction(c2), 0.4)
self.assertEqual(get_progress_fraction(c2c1), 0.7)
def test_filtering_coach_content_anon(self):
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"include_coach_content": False},
)
# TODO make content_test.json fixture more organized. Here just, hardcoding the correct count
self.assertEqual(len(response.data), 7)
def test_filtering_coach_content_admin(self):
self.client.login(username=self.admin.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-list"),
data={"include_coach_content": True},
)
expected_output = content.ContentNode.objects.exclude(
available=False
).count() # coach_content node should be returned
self.assertEqual(len(response.data), expected_output)
def test_copies(self):
# the pk is actually a content id
response = self.client.get(
reverse(
"kolibri:core:contentnode-copies",
kwargs={"pk": "c6f49ea527824f398f4d5d26faf19396"},
)
)
expected_titles = set(["root", "c1", "copy"])
response_titles = set()
for node in response.data[0]:
response_titles.add(node["title"])
self.assertSetEqual(expected_titles, response_titles)
def test_available_copies(self):
# the pk is actually a content id
response = self.client.get(
reverse(
"kolibri:core:contentnode-copies",
kwargs={"pk": "f2332710c2fd483386cdeb5dcbdda81a"},
)
)
# no results should be returned for unavailable content node
self.assertEqual(len(response.data), 0)
def test_copies_count(self):
response = self.client.get(
reverse("kolibri:core:contentnode-copies-count"),
data={
"content_ids": "f2332710c2fd483386cdeb5dcbdda81f,c6f49ea527824f398f4d5d26faf15555,f2332710c2fd483386cdeb5dcbdda81a"
},
)
# assert non existent content id does not show up in results
# no results should be returned for unavailable content node
self.assertEqual(len(response.data), 1)
self.assertEqual(
response.data[0]["count"],
content.ContentNode.objects.filter(
content_id="f2332710c2fd483386cdeb5dcbdda81f"
).count(),
)
def test_search_total_results(self):
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(response.data["total_results"], 1)
def test_search_kinds(self):
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(list(response.data["content_kinds"]), [content_kinds.TOPIC])
def test_search_repeated_kinds(self):
# Ensure that each kind is only returned once.
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "c"}
)
kinds = response.data["content_kinds"][:]
self.assertEqual(len(kinds), len(set(kinds)))
def test_search_channels(self):
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(response.data["channel_ids"][:], [self.the_channel_id])
def test_search_repeated_channels(self):
# Ensure that each channel_id is only returned once.
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "c"}
)
channel_ids = response.data["channel_ids"][:]
self.assertEqual(len(channel_ids), len(set(channel_ids)))
def test_search(self):
# ensure search works when there are no words not defined
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "!?,"}
)
self.assertEqual(len(response.data["results"]), 0)
# ensure search words when there is only stopwords
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "or"}
)
self.assertEqual(len(response.data["results"]), 0)
# regular search
response = self.client.get(
reverse("kolibri:core:contentnode_search-list"), data={"search": "root"}
)
self.assertEqual(len(response.data["results"]), 1)
def _create_session_logs(self):
content_ids = (
"f2332710c2fd483386cdeb5ecbdda81f",
"ce603df7c46b424b934348995e1b05fb",
"481e1bda1faa445d801ceb2afbd2f42f",
)
channel_id = "6199dde695db4ee4ab392222d5af1e5c"
[
ContentSessionLog.objects.create(
channel_id=channel_id,
content_id=content_ids[0],
start_timestamp=timezone.now(),
kind="audio",
)
for _ in range(50)
]
[
ContentSessionLog.objects.create(
channel_id=channel_id,
content_id=content_ids[1],
start_timestamp=timezone.now(),
kind="exercise",
)
for _ in range(25)
]
[
ContentSessionLog.objects.create(
channel_id=channel_id,
content_id=content_ids[2],
start_timestamp=timezone.now(),
kind="document",
)
for _ in range(1)
]
# create log for non existent content id
# should not show up in api response
ContentSessionLog.objects.create(
channel_id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
start_timestamp=timezone.now(),
kind="content",
)
return content_ids
def test_popular(self):
expected_content_ids = self._create_session_logs()
response = self.client.get(reverse("kolibri:core:contentnode-popular"))
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_popular_no_coach_content(self):
expected_content_ids = self._create_session_logs()
node = content.ContentNode.objects.get(content_id=expected_content_ids[0])
node.coach_content = True
node.save()
expected_content_ids = expected_content_ids[1:]
response = self.client.get(
reverse("kolibri:core:contentnode-popular"),
data={"include_coach_content": False},
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_popular_coach_has_coach_content(self):
coach = FacilityUser.objects.create(username="coach", facility=self.facility)
coach.set_password(DUMMY_PASSWORD)
coach.save()
self.facility.add_coach(coach)
expected_content_ids = self._create_session_logs()
node = content.ContentNode.objects.get(content_id=expected_content_ids[0])
node.coach_content = True
node.save()
self.client.login(username="coach", password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-popular"),
data={"include_coach_content": True},
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_popular_ten_minute_cache(self):
self._create_session_logs()
response = self.client.get(reverse("kolibri:core:contentnode-popular"))
self.assertEqual(response["Cache-Control"], "max-age=600")
def _create_summary_logs(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
content_ids = ("f2332710c2fd483386cdeb5ecbdda81f",)
channel_id = "6199dde695db4ee4ab392222d5af1e5c"
ContentSummaryLog.objects.create(
channel_id=channel_id,
content_id=content_ids[0],
user_id=user.id,
start_timestamp=timezone.now(),
kind="audio",
)
# create log with progress of 1
# should not show up in api response
ContentSummaryLog.objects.create(
channel_id=channel_id,
content_id="ce603df7c46b424b934348995e1b05fb",
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
# create log for non existent content id
# should not show up in api response
ContentSummaryLog.objects.create(
channel_id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
user_id=user.id,
start_timestamp=timezone.now(),
kind="content",
)
user.set_password(DUMMY_PASSWORD)
user.save()
return user, content_ids
def test_resume(self):
user, expected_content_ids = self._create_summary_logs()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-resume", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_resume_wrong_id(self):
user, expected_content_ids = self._create_summary_logs()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-resume", kwargs={"pk": "wrong"})
)
response_content_ids = [node["content_id"] for node in response.json()]
self.assertEqual([], response_content_ids)
def test_resume_zero_cache(self):
user, expected_content_ids = self._create_summary_logs()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-resume", kwargs={"pk": user.id})
)
self.assertEqual(response["Cache-Control"], "max-age=0")
def test_next_steps_prereq(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
expected_content_ids = (post_req.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_prereq_zero_cache(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
self.assertEqual(response["Cache-Control"], "max-age=0")
def test_next_steps_prereq_wrong_id(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": "wrong"})
)
response_content_ids = [node["content_id"] for node in response.json()]
self.assertEqual([], response_content_ids)
def test_next_steps_prereq_in_progress(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
ContentSummaryLog.objects.create(
channel_id=post_req.channel_id,
content_id=post_req.content_id,
user_id=user.id,
progress=0.5,
start_timestamp=timezone.now(),
kind="audio",
)
expected_content_ids = []
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_prereq_coach_content_not_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
post_req.coach_content = True
post_req.save()
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(), response_content_ids)
def test_next_steps_prereq_coach_content_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
facility.add_coach(user)
root = content.ContentNode.objects.get(title="root")
ContentSummaryLog.objects.create(
channel_id=root.channel_id,
content_id=root.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
post_req = root.prerequisite_for.first()
post_req.coach_content = True
post_req.save()
expected_content_ids = (post_req.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_sibling(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
expected_content_ids = (sibling.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_sibling_in_progress(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
ContentSummaryLog.objects.create(
channel_id=sibling.channel_id,
content_id=sibling.content_id,
user_id=user.id,
progress=0.5,
start_timestamp=timezone.now(),
kind="audio",
)
expected_content_ids = []
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def test_next_steps_sibling_coach_content_not_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
sibling.coach_content = True
sibling.save()
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(), response_content_ids)
def test_next_steps_sibling_coach_content_coach(self):
facility = Facility.objects.create(name="MyFac")
user = FacilityUser.objects.create(username="user", facility=facility)
facility.add_coach(user)
node = content.ContentNode.objects.get(
content_id="ce603df7c46b424b934348995e1b05fb"
)
ContentSummaryLog.objects.create(
channel_id=node.channel_id,
content_id=node.content_id,
user_id=user.id,
progress=1,
start_timestamp=timezone.now(),
kind="audio",
)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
sibling = node.get_next_sibling()
sibling.coach_content = True
sibling.save()
expected_content_ids = (sibling.content_id,)
response = self.client.get(
reverse("kolibri:core:contentnode-next-steps", kwargs={"pk": user.id})
)
response_content_ids = set(node["content_id"] for node in response.json())
self.assertSetEqual(set(expected_content_ids), response_content_ids)
def tearDown(self):
"""
clean up files/folders created during the test
"""
cache.clear()
super(ContentNodeAPITestCase, self).tearDown()
def mock_patch_decorator(func):
def wrapper(*args, **kwargs):
mock_object = mock.Mock()
mock_object.json.return_value = [{"id": 1, "name": "studio"}]
with mock.patch.object(requests, "get", return_value=mock_object):
return func(*args, **kwargs)
return wrapper
class KolibriStudioAPITestCase(APITestCase):
@classmethod
def setUpTestData(cls):
DeviceSettings.objects.create(is_provisioned=True)
cls.facility = Facility.objects.create(name="facility")
superuser = FacilityUser.objects.create(
username="superuser", facility=cls.facility
)
superuser.set_password(DUMMY_PASSWORD)
superuser.save()
cls.superuser = superuser
DevicePermissions.objects.create(user=superuser, is_superuser=True)
def setUp(self):
self.client.login(username=self.superuser.username, password=DUMMY_PASSWORD)
@mock_patch_decorator
def test_channel_list(self):
response = self.client.get(
reverse("kolibri:core:remotechannel-list"), format="json"
)
self.assertEqual(response.data[0]["id"], 1)
@mock_patch_decorator
def test_channel_retrieve_list(self):
response = self.client.get(
reverse("kolibri:core:remotechannel-retrieve-list", kwargs={"pk": 1}),
format="json",
)
self.assertEqual(response.data[0]["id"], 1)
@mock_patch_decorator
def test_no_permission_non_superuser_channel_list(self):
user = FacilityUser.objects.create(username="user", facility=self.facility)
user.set_password(DUMMY_PASSWORD)
user.save()
self.client.logout()
self.client.login(username=user.username, password=DUMMY_PASSWORD)
response = self.client.get(
reverse("kolibri:core:remotechannel-list"), format="json"
)
self.assertEqual(response.status_code, 403)
@mock_patch_decorator
def test_channel_retrieve(self):
response = self.client.get(
reverse("kolibri:core:remotechannel-detail", kwargs={"pk": "abc"}),
format="json",
)
self.assertEqual(response.data["name"], "studio")
@mock_patch_decorator
def test_channel_info_404(self):
mock_object = mock.Mock()
mock_object.status_code = 404
requests.get.return_value = mock_object
response = self.client.get(
reverse("kolibri:core:remotechannel-detail", kwargs={"pk": "abc"}),
format="json",
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def tearDown(self):
cache.clear()<|fim▁end|> | f: getattr(lang, f) |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os
DIRNAME = os.path.dirname(__file__)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}<|fim▁hole|> "django.contrib.contenttypes",
"pqauth.pqauth_django_server"
)
SECRET_KEY = "chicken butt"
PQAUTH_SERVER_KEY = os.path.join(DIRNAME, "server.key")
ROOT_URLCONF = "pqauth.pqauth_django_server.urls"
TEST_CLIENT_KEY = os.path.join(DIRNAME, "client.key")
TEST_EVIL_KEY = os.path.join(DIRNAME, "evil.key")<|fim▁end|> | }
INSTALLED_APPS = (
"django.contrib.auth", |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# state_machine documentation build configuration file, created by
# sphinx-quickstart on Thu Apr 3 08:46:47 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'state_machine'
copyright = u'2014, Jonathan Tushman'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.9'
# The full version, including alpha/beta/rc tags.
release = '0.2.9'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.<|fim▁hole|># of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'state_machinedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'state_machine.tex', u'state\\_machine Documentation',
u'Jonathan Tushman', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'state_machine', u'state_machine Documentation',
[u'Jonathan Tushman'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'state_machine', u'state_machine Documentation',
u'Jonathan Tushman', 'state_machine', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | #html_short_title = None
# The name of an image file (relative to this directory) to place at the top |
<|file_name|>edits-reducer.js<|end_file_name|><|fim▁begin|>/** @format */
/**
* External dependencies
*/
import { compact, isEqual } from 'lodash';
/**
* Internal dependencies
*/
import { createReducer } from 'state/utils';
import {
WOOCOMMERCE_PRODUCT_CATEGORY_CREATE,
WOOCOMMERCE_PRODUCT_CATEGORY_UPDATE,
WOOCOMMERCE_PRODUCT_CATEGORY_EDIT,
WOOCOMMERCE_PRODUCT_CATEGORY_EDIT_CLEAR,
WOOCOMMERCE_PRODUCT_CATEGORY_UPDATED,
} from 'woocommerce/state/action-types';
import { getBucket } from '../helpers';
export default createReducer( null, {
[ WOOCOMMERCE_PRODUCT_CATEGORY_EDIT ]: editProductCategoryAction,
[ WOOCOMMERCE_PRODUCT_CATEGORY_EDIT_CLEAR ]: clearEditsAction,
[ WOOCOMMERCE_PRODUCT_CATEGORY_UPDATED ]: productCategoryUpdatedAction,
} );
function productCategoryUpdatedAction( edits, action ) {
const { originatingAction } = action;
if ( WOOCOMMERCE_PRODUCT_CATEGORY_CREATE === originatingAction.type ) {
const prevCategoryId = originatingAction.category.id;
const prevEdits = edits || {};
const prevCreates = prevEdits.creates || [];
const newCreates = compact(
prevCreates.map( category => {
if ( isEqual( prevCategoryId, category.id ) ) {
// Remove this create, it's no longer needed.
return undefined;
}
return category;
} )
);
return {
...prevEdits,
creates: newCreates.length ? newCreates : undefined,
};
}
if ( WOOCOMMERCE_PRODUCT_CATEGORY_UPDATE === originatingAction.type ) {
const prevCategoryId = originatingAction.category.id;
const prevEdits = edits || {};
const prevUpdates = prevEdits.updates || [];
const newUpdates = compact(
prevUpdates.map( category => {
if ( isEqual( prevCategoryId, category.id ) ) {
return undefined;
}
return category;
} )
);
return {
...prevEdits,
updates: newUpdates.length ? newUpdates : undefined,
};
}
// TODO: Add support for delete.
return edits;
}
<|fim▁hole|>
function editProductCategoryAction( edits, action ) {
const { category, data } = action;
const prevEdits = edits || {};
const bucket = getBucket( category );
const newArray = editProductCategory( prevEdits[ bucket ], category, data );
return {
...prevEdits,
[ bucket ]: newArray,
currentlyEditingId: category.id,
};
}
function editProductCategory( array, category, data ) {
const prevArray = array || [];
let found = false;
// Look for this object in the appropriate create or edit array first.
const newArray = compact(
prevArray.map( c => {
if ( isEqual( category.id, c.id ) ) {
found = true;
// If data is null, remove this edit, otherwise update the edit data.
return data ? { ...c, ...data } : undefined;
}
return c;
} )
);
if ( ! found ) {
// update or create not already in edit state, so add it now.
newArray.push( { id: category.id, ...data } );
}
return newArray;
}<|fim▁end|> | function clearEditsAction() {
return null;
} |
<|file_name|>test.go<|end_file_name|><|fim▁begin|>/*§
===========================================================================
MoonDeploy
===========================================================================
Copyright (C) 2015-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.<|fim▁hole|> http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package test
/*
It is not trivial to perform automated tests for MoonDeploy.
Currently, testing is based on its actual execution for existing programs
such as GraphsJ, Chronos IDE and KnapScal.
*/<|fim▁end|> | You may obtain a copy of the License at
|
<|file_name|>lgt_spiders.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
##### ##### ===== 포함 파일 =====
# 개인적인 아이디, 비밀번호 파일.
from personal.jconfig import LOGIN_ID, LOGIN_PW
# scrapy item 파일.
from joonggonara.items import JoonggonaraItem
# 로그인을 위한 FormRequest.
# 로그인 이후 크롤링을 위한 Request.
from scrapy.http import FormRequest, Request
# 게시판 페이지에서 각 게시글 url을 얻어온 후 url을 Spider에 넣어주기 위한 urljoin.
from urlparse import urljoin
# scrapy를 사용하기 위한 scrapy.
import scrapy
# response에서 ArticleNumber를 얻어내기위한 re.
import re
# file의 존재유무 체크를 위한 os.path
import os.path
# 랜덤 sleep을 위한 time, random
import time
import random
# Database를 위한 sqlite3
import sqlite3
##### ##### ===== 포함 파일 끝 =====
##### ##### ===== 전역 변수 지역 =====
CRAWL_TARGET = 0
CRAWL_COUNT = 0
MAX_PAGE = 0
DOWNLOAD_DELAY = 2
conn = None
cur = None
##### ##### ===== 전역 변수 지역 끝 =====
##### ##### ===== 프로젝트별 변수 =====
# 주요 변수<|fim▁hole|>
DATABASE_NAME = 'joonggonara.sqlite'
LIST_DB = 'list_lgt'
DOWNLOADED_DB = 'downloaded_lgt'
# 임시 변수
TARGET_FILE = 'target_lgt.txt'
MAX_FILE = 'max_lgt.txt'
LOGIN_FILE = 'output/login_lgt.html'
ARTICLE_AHREF = '//a[contains(@href, "articleid") and not(contains(@href, "specialmenutype"))]/@href'
SAVE_LOCATION = 'output/lgt/'
##### ##### ===== 프로젝트별 변수 끝 =====
##### ##### ===== 클래스 선언 지역 =====
##### ----- -----
##### 중고나라 스파이더 클래스
##### ----- -----
class Spider(scrapy.Spider):
name = SPIDER_NAME
global CRAWL_TARGET
global CRAWL_COUNT
global MAX_PAGE
global conn
global cur
# 딜레이 설정
download_delay = DOWNLOAD_DELAY
# 로그인을 하고 시작해야함
# 따라서 로그인 페이지에서 시작
start_urls = [
START_URL
]
# 파일로부터 수집할 개수를 읽어옴
# 이렇게 하는 것이 소스코드 수정 없이 수집양을 조절할 수 있음
target_file = open(TARGET_FILE, 'r')
CRAWL_TARGET = int(target_file.readline())
target_file.close()
max_file = open(MAX_FILE, 'r')
MAX_PAGE = int(max_file.readline())
max_file.close()
# 로그인을 하는 함수
def parse(self, response):
# 로그인을 수정하기 위한 부분
# 각 폼에 맞게 id와 pw를 입력
# 이후의 쿠키는 scrapy가 알아서 관리해줌
return scrapy.FormRequest.from_response(
response,
formname='frmNIDLogin',
formdata={'id': LOGIN_ID, 'pw': LOGIN_PW},
clickdata={'nr': 0},
callback=self.after_login
)
# 로그인이후 게시판 List에서 각 게시글 URL을 얻기위한 함수
def after_login(self, response):
# 글로벌 변수를 불러옴
global CRAWL_TARGET
global CRAWL_COUNT
global MAX_PAGE
global conn
global cur
# 로그인 디버깅 용
with open(LOGIN_FILE, 'wb') as f:
f.write(response.body)
f.close()
# Create Database Connector
conn = sqlite3.connect(DATABASE_NAME)
# Create Database Cursor
cur = conn.cursor()
# Create Table
cur.executescript('''
CREATE TABLE IF NOT EXISTS ''' + LIST_DB + ''' (
article_num INTEGER PRIMARY KEY NOT NULL UNIQUE);
''' +
'''
CREATE TABLE IF NOT EXISTS ''' + DOWNLOADED_DB + ''' (
article_num INTEGER PRIMARY KEY NOT NULL UNIQUE);
'''
)
conn.commit()
# 이전 수집때 목표로 저장해둔 리스트 수 불러오기
cur.execute('''
SELECT COUNT(*) FROM ''' + LIST_DB
)
CRAWL_COUNT = CRAWL_COUNT + int(cur.fetchone()[0])
# 로그인 성공 후 게시판에서 각 게시글의 URL을 따옴
return Request(url=BOARD_PAGE_URL + str(1), callback=self.parse_list)
# 수집한 게시판 정보에서 공지사항을 제외한 게시글 URL을 파싱
def parse_list(self, response):
# 글로벌 변수를 불러옴
global CRAWL_TARGET
global CRAWL_COUNT
global MAX_PAGE
global conn
global cur
# 사용자가 작성한 게시글 파악
for ahref in response.xpath(ARTICLE_AHREF).extract():
# 수집 목표량을 채웠을 경우 탈출
if CRAWL_COUNT >= CRAWL_TARGET:
break
# 게시글 번호 파싱
article_num = re.split(r'[?=&]', ahref)[12]
# 이미 받은 게시글일 경우 패스
cur.execute('SELECT * FROM ' + DOWNLOADED_DB + ' WHERE article_num = ' + str(article_num)
)
if cur.fetchone() is not None:
print 'tartget skip: ' + str(article_num)
continue
# 다운로드 대상에 입력
cur.execute('INSERT OR IGNORE INTO ' + LIST_DB + ' (article_num) VALUES (' + str(article_num) + ')'
)
conn.commit()
CRAWL_COUNT = CRAWL_COUNT + 1
# 목표 개수 만큼 리스트를 채웠는지 체크
page_num = int(re.split(r'[=]', response.url)[8])
if ((CRAWL_COUNT >= CRAWL_TARGET) or (page_num >= MAX_PAGE)):
return self.crawl_article()
else:
# 목표 개수 미달인 경우 다음 페이지 불러오기
next_url = BOARD_PAGE_URL + str(page_num+1)
return Request(url=next_url, callback=self.parse_list)
# 게시글 수집
def crawl_article(self):
# 글로벌 변수를 불러옴
global CRAWL_TARGET
global CRAWL_COUNT
global MAX_PAGE
global conn
global cur
# 다운로드 대상 리스트 불러오기
# 참고: yield로 Request를 전송하기 때문에 cur가 동시에 사용될 가능성이 있다
# 따라서 fetchall()로 데이터를 모두 가져와야 한다
cur.execute('SELECT * FROM ' + LIST_DB)
target_list = cur.fetchall()
# Request 보내기
for data in target_list:
# request_url 조립
article_num = data[0]
request_url = ARTICLE_URL + str(article_num)
# Request를 날리기 전 다운로드 대상 리스트에서 제거
cur.execute('DELETE FROM ' + LIST_DB + ' WHERE article_num = ' + str(article_num)
)
conn.commit()
# 랜덤 sleep
time.sleep(random.randint(0, 1))
# 요청 전송
yield Request(request_url, callback = self.parse_article)
# 각 게시글의 원본을 저장
def parse_article(self, response):
# 글로벌 변수를 불러옴.
global CRAWL_TARGET
global CRAWL_COUNT
global MAX_PAGE
global conn
global cur
# 수집한 게시글 다운로드 완료 리스트에 저장
article_num = re.split(r'[?=&]', response.url)[10]
cur.execute('INSERT OR IGNORE INTO ' + DOWNLOADED_DB + ' (article_num) VALUES (' + str(article_num) + ')'
)
conn.commit()
# 수집한 게시글을 파일로 저장
with open(SAVE_LOCATION + article_num + '.html', 'wb') as f:
f.write(response.body)
f.close()
##### ##### ===== 클래스 선언 지역 끝 =====<|fim▁end|> | SPIDER_NAME = 'lgt'
START_URL = 'http://nid.naver.com/nidlogin.login'
BOARD_PAGE_URL = 'http://cafe.naver.com/ArticleList.nhn?search.boardtype=L&userDisplay=50&search.menuid=425&search.questionTab=A&search.clubid=10050146&search.specialmenutype=&search.totalCount=501&search.page=' # SKT - 339, KT - 424, LGT - 425, 여성상의(fup) - 356, 남성상의(mup) - 358
ARTICLE_URL = 'http://cafe.naver.com/ArticleRead.nhn?clubid=10050146&page=1&menuid=425&boardtype=L&articleid=' |
<|file_name|>features_dense_longint_modular.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from modshogun import LongIntFeatures
from numpy import array, int64, all
# create dense matrix A
matrix=array([[1,2,3],[4,0,0],[0,0,0],[0,5,0],[0,0,6],[9,9,9]], dtype=int64)
parameter_list = [[matrix]]
# ... of type LongInt
def features_dense_longint_modular (A=matrix):
a=LongIntFeatures(A)<|fim▁hole|> # get matrix
a_out = a.get_feature_matrix()
assert(all(a_out==A))
return a_out
if __name__=='__main__':
print('dense_longint')
features_dense_longint_modular(*parameter_list[0])<|fim▁end|> | # get first feature vector and set it
a.set_feature_vector(array([1,4,0,0,0,9], dtype=int64), 0)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Call vendor to add the dependencies to the classpath
import vendor
vendor.add('lib')
# Import the Flask Framework
from flask import Flask, render_template, url_for, request, jsonify
app = Flask(__name__)
import translate
# Root directory
@app.route('/')
def index_route():
phrase = request.args.get("q")
if not phrase:
return render_template("index.html", phrase="")
return render_template("index.html", phrase=phrase)
@app.route("/translate")
def translate_route():
phrase = request.args.get("text")
fro = request.args.get("from")
to = request.args.get("to")
translated_text = translate.get_translation(phrase, lang=fro + "-" + to)
if translated_text == None:
return "Failed to translate", 404
return translated_text
if __name__ == '__main__':
#app.run(host="0.0.0.0") # For development<|fim▁hole|><|fim▁end|> | app.run() # For production |
<|file_name|>bids.py<|end_file_name|><|fim▁begin|># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
#
# Copyright 2021 The NiPreps Developers <nipreps@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# We support and encourage derived works from this project, please read
# about our expectations at
#
# https://www.nipreps.org/community/licensing/
#
"""Interfaces for handling BIDS-like neuroimaging structures."""
from collections import defaultdict
from json import dumps, loads
from pathlib import Path
from shutil import copytree, rmtree
from pkg_resources import resource_filename as _pkgres
import re
import nibabel as nb
import numpy as np
from nipype import logging
from nipype.interfaces.base import (
traits,
isdefined,
Undefined,
TraitedSpec,
BaseInterfaceInputSpec,
DynamicTraitedSpec,
File,
Directory,
InputMultiObject,
OutputMultiObject,
Str,
SimpleInterface,
)
from nipype.interfaces.io import add_traits
from templateflow.api import templates as _get_template_list
from ..utils.bids import _init_layout, relative_to_root
from ..utils.images import set_consumables, unsafe_write_nifti_header_and_data
from ..utils.misc import splitext as _splitext, _copy_any
regz = re.compile(r"\.gz$")
_pybids_spec = loads(Path(_pkgres("niworkflows", "data/nipreps.json")).read_text())
BIDS_DERIV_ENTITIES = frozenset({e["name"] for e in _pybids_spec["entities"]})
BIDS_DERIV_PATTERNS = tuple(_pybids_spec["default_path_patterns"])
STANDARD_SPACES = _get_template_list()
LOGGER = logging.getLogger("nipype.interface")
def _none():
return None
# Automatically coerce certain suffixes (DerivativesDataSink)
DEFAULT_DTYPES = defaultdict(
_none,
(
("mask", "uint8"),
("dseg", "int16"),
("probseg", "float32"),
("boldref", "source"),
),
)
class _BIDSBaseInputSpec(BaseInterfaceInputSpec):
bids_dir = traits.Either(
(None, Directory(exists=True)), usedefault=True, desc="optional bids directory"
)
bids_validate = traits.Bool(True, usedefault=True, desc="enable BIDS validator")
class _BIDSInfoInputSpec(_BIDSBaseInputSpec):
in_file = File(mandatory=True, desc="input file, part of a BIDS tree")
class _BIDSInfoOutputSpec(DynamicTraitedSpec):
subject = traits.Str()
session = traits.Str()
task = traits.Str()
acquisition = traits.Str()<|fim▁hole|>
class BIDSInfo(SimpleInterface):
"""
Extract BIDS entities from a BIDS-conforming path.
This interface uses only the basename, not the path, to determine the
subject, session, task, run, acquisition or reconstruction.
>>> bids_info = BIDSInfo(bids_dir=str(datadir / 'ds054'), bids_validate=False)
>>> bids_info.inputs.in_file = '''\
sub-01/func/ses-retest/sub-01_ses-retest_task-covertverbgeneration_bold.nii.gz'''
>>> res = bids_info.run()
>>> res.outputs
<BLANKLINE>
acquisition = <undefined>
reconstruction = <undefined>
run = <undefined>
session = retest
subject = 01
suffix = bold
task = covertverbgeneration
<BLANKLINE>
>>> bids_info = BIDSInfo(bids_dir=str(datadir / 'ds054'), bids_validate=False)
>>> bids_info.inputs.in_file = '''\
sub-01/func/ses-retest/sub-01_ses-retest_task-covertverbgeneration_rec-MB_acq-AP_run-1_bold.nii.gz'''
>>> res = bids_info.run()
>>> res.outputs
<BLANKLINE>
acquisition = AP
reconstruction = MB
run = 1
session = retest
subject = 01
suffix = bold
task = covertverbgeneration
<BLANKLINE>
>>> bids_info = BIDSInfo(bids_dir=str(datadir / 'ds054'), bids_validate=False)
>>> bids_info.inputs.in_file = '''\
sub-01/func/ses-retest/sub-01_ses-retest_task-covertverbgeneration_acq-AP_run-01_bold.nii.gz'''
>>> res = bids_info.run()
>>> res.outputs
<BLANKLINE>
acquisition = AP
reconstruction = <undefined>
run = 1
session = retest
subject = 01
suffix = bold
task = covertverbgeneration
<BLANKLINE>
>>> bids_info = BIDSInfo(bids_validate=False)
>>> bids_info.inputs.in_file = str(
... datadir / 'ds114' / 'sub-01' / 'ses-retest' /
... 'func' / 'sub-01_ses-retest_task-covertverbgeneration_bold.nii.gz')
>>> res = bids_info.run()
>>> res.outputs
<BLANKLINE>
acquisition = <undefined>
reconstruction = <undefined>
run = <undefined>
session = retest
subject = 01
suffix = bold
task = covertverbgeneration
<BLANKLINE>
>>> bids_info = BIDSInfo(bids_validate=False)
>>> bids_info.inputs.in_file = '''\
sub-01/func/ses-retest/sub-01_ses-retest_task-covertverbgeneration_bold.nii.gz'''
>>> res = bids_info.run()
>>> res.outputs
<BLANKLINE>
acquisition = <undefined>
reconstruction = <undefined>
run = <undefined>
session = retest
subject = 01
suffix = bold
task = covertverbgeneration
<BLANKLINE>
"""
input_spec = _BIDSInfoInputSpec
output_spec = _BIDSInfoOutputSpec
def _run_interface(self, runtime):
from bids.layout import parse_file_entities
bids_dir = self.inputs.bids_dir
in_file = self.inputs.in_file
if bids_dir is not None:
try:
in_file = str(Path(in_file).relative_to(bids_dir))
except ValueError:
pass
params = parse_file_entities(in_file)
self._results = {
key: params.get(key, Undefined)
for key in _BIDSInfoOutputSpec().get().keys()
}
return runtime
class _BIDSDataGrabberInputSpec(BaseInterfaceInputSpec):
subject_data = traits.Dict(Str, traits.Any)
subject_id = Str()
class _BIDSDataGrabberOutputSpec(TraitedSpec):
out_dict = traits.Dict(desc="output data structure")
fmap = OutputMultiObject(desc="output fieldmaps")
bold = OutputMultiObject(desc="output functional images")
sbref = OutputMultiObject(desc="output sbrefs")
t1w = OutputMultiObject(desc="output T1w images")
roi = OutputMultiObject(desc="output ROI images")
t2w = OutputMultiObject(desc="output T2w images")
flair = OutputMultiObject(desc="output FLAIR images")
class BIDSDataGrabber(SimpleInterface):
"""
Collect files from a BIDS directory structure.
>>> bids_src = BIDSDataGrabber(anat_only=False)
>>> bids_src.inputs.subject_data = bids_collect_data(
... str(datadir / 'ds114'), '01', bids_validate=False)[0]
>>> bids_src.inputs.subject_id = '01'
>>> res = bids_src.run()
>>> res.outputs.t1w # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
['.../ds114/sub-01/ses-retest/anat/sub-01_ses-retest_T1w.nii.gz',
'.../ds114/sub-01/ses-test/anat/sub-01_ses-test_T1w.nii.gz']
"""
input_spec = _BIDSDataGrabberInputSpec
output_spec = _BIDSDataGrabberOutputSpec
_require_funcs = True
def __init__(self, *args, **kwargs):
anat_only = kwargs.pop("anat_only")
anat_derivatives = kwargs.pop("anat_derivatives", None)
super(BIDSDataGrabber, self).__init__(*args, **kwargs)
if anat_only is not None:
self._require_funcs = not anat_only
self._require_t1w = anat_derivatives is None
def _run_interface(self, runtime):
bids_dict = self.inputs.subject_data
self._results["out_dict"] = bids_dict
self._results.update(bids_dict)
if self._require_t1w and not bids_dict['t1w']:
raise FileNotFoundError(
"No T1w images found for subject sub-{}".format(self.inputs.subject_id)
)
if self._require_funcs and not bids_dict["bold"]:
raise FileNotFoundError(
"No functional images found for subject sub-{}".format(
self.inputs.subject_id
)
)
for imtype in ["bold", "t2w", "flair", "fmap", "sbref", "roi"]:
if not bids_dict[imtype]:
LOGGER.info(
'No "%s" images found for sub-%s', imtype, self.inputs.subject_id
)
return runtime
class _DerivativesDataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
base_directory = traits.Directory(
desc="Path to the base directory for storing data."
)
check_hdr = traits.Bool(True, usedefault=True, desc="fix headers of NIfTI outputs")
compress = InputMultiObject(
traits.Either(None, traits.Bool),
usedefault=True,
desc="whether ``in_file`` should be compressed (True), uncompressed (False) "
"or left unmodified (None, default).",
)
data_dtype = Str(
desc="NumPy datatype to coerce NIfTI data to, or `source` to"
"match the input file dtype"
)
dismiss_entities = InputMultiObject(
traits.Either(None, Str),
usedefault=True,
desc="a list entities that will not be propagated from the source file",
)
in_file = InputMultiObject(
File(exists=True), mandatory=True, desc="the object to be saved"
)
meta_dict = traits.DictStrAny(desc="an input dictionary containing metadata")
source_file = InputMultiObject(
File(exists=False), mandatory=True, desc="the source file(s) to extract entities from")
class _DerivativesDataSinkOutputSpec(TraitedSpec):
out_file = OutputMultiObject(File(exists=True, desc="written file path"))
out_meta = OutputMultiObject(File(exists=True, desc="written JSON sidecar path"))
compression = OutputMultiObject(
traits.Either(None, traits.Bool),
desc="whether ``in_file`` should be compressed (True), uncompressed (False) "
"or left unmodified (None).",
)
fixed_hdr = traits.List(traits.Bool, desc="whether derivative header was fixed")
class DerivativesDataSink(SimpleInterface):
"""
Store derivative files.
Saves the ``in_file`` into a BIDS-Derivatives folder provided
by ``base_directory``, given the input reference ``source_file``.
>>> import tempfile
>>> tmpdir = Path(tempfile.mkdtemp())
>>> tmpfile = tmpdir / 'a_temp_file.nii.gz'
>>> tmpfile.open('w').close() # "touch" the file
>>> t1w_source = bids_collect_data(
... str(datadir / 'ds114'), '01', bids_validate=False)[0]['t1w'][0]
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False)
>>> dsink.inputs.in_file = str(tmpfile)
>>> dsink.inputs.source_file = t1w_source
>>> dsink.inputs.desc = 'denoised'
>>> dsink.inputs.compress = False
>>> res = dsink.run()
>>> res.outputs.out_file # doctest: +ELLIPSIS
'.../niworkflows/sub-01/ses-retest/anat/sub-01_ses-retest_desc-denoised_T1w.nii'
>>> tmpfile = tmpdir / 'a_temp_file.nii'
>>> tmpfile.open('w').close() # "touch" the file
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False,
... allowed_entities=("custom",))
>>> dsink.inputs.in_file = str(tmpfile)
>>> dsink.inputs.source_file = t1w_source
>>> dsink.inputs.custom = 'noise'
>>> res = dsink.run()
>>> res.outputs.out_file # doctest: +ELLIPSIS
'.../niworkflows/sub-01/ses-retest/anat/sub-01_ses-retest_custom-noise_T1w.nii'
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False,
... allowed_entities=("custom",))
>>> dsink.inputs.in_file = [str(tmpfile), str(tmpfile)]
>>> dsink.inputs.source_file = t1w_source
>>> dsink.inputs.custom = [1, 2]
>>> dsink.inputs.compress = True
>>> res = dsink.run()
>>> res.outputs.out_file # doctest: +ELLIPSIS
['.../niworkflows/sub-01/ses-retest/anat/sub-01_ses-retest_custom-1_T1w.nii.gz',
'.../niworkflows/sub-01/ses-retest/anat/sub-01_ses-retest_custom-2_T1w.nii.gz']
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False,
... allowed_entities=("custom1", "custom2"))
>>> dsink.inputs.in_file = [str(tmpfile)] * 2
>>> dsink.inputs.source_file = t1w_source
>>> dsink.inputs.custom1 = [1, 2]
>>> dsink.inputs.custom2 = "b"
>>> res = dsink.run()
>>> res.outputs.out_file # doctest: +ELLIPSIS
['.../niworkflows/sub-01/ses-retest/anat/sub-01_ses-retest_custom1-1_custom2-b_T1w.nii',
'.../niworkflows/sub-01/ses-retest/anat/sub-01_ses-retest_custom1-2_custom2-b_T1w.nii']
When multiple source files are passed, only common entities are passed down.
For example, if two T1w images from different sessions are used to generate
a single image, the session entity is removed automatically.
>>> bids_dir = tmpdir / 'bidsroot'
>>> multi_source = [
... bids_dir / 'sub-02/ses-A/anat/sub-02_ses-A_T1w.nii.gz',
... bids_dir / 'sub-02/ses-B/anat/sub-02_ses-B_T1w.nii.gz']
>>> for source_file in multi_source:
... source_file.parent.mkdir(parents=True, exist_ok=True)
... _ = source_file.write_text("")
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False)
>>> dsink.inputs.in_file = str(tmpfile)
>>> dsink.inputs.source_file = list(map(str, multi_source))
>>> dsink.inputs.desc = 'preproc'
>>> res = dsink.run()
>>> res.outputs.out_file # doctest: +ELLIPSIS
'.../niworkflows/sub-02/anat/sub-02_desc-preproc_T1w.nii'
If, on the other hand, only one is used, the session is preserved:
>>> dsink.inputs.source_file = str(multi_source[0])
>>> res = dsink.run()
>>> res.outputs.out_file # doctest: +ELLIPSIS
'.../niworkflows/sub-02/ses-A/anat/sub-02_ses-A_desc-preproc_T1w.nii'
>>> bids_dir = tmpdir / 'bidsroot' / 'sub-02' / 'ses-noanat' / 'func'
>>> bids_dir.mkdir(parents=True, exist_ok=True)
>>> tricky_source = bids_dir / 'sub-02_ses-noanat_task-rest_run-01_bold.nii.gz'
>>> tricky_source.open('w').close()
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False)
>>> dsink.inputs.in_file = str(tmpfile)
>>> dsink.inputs.source_file = str(tricky_source)
>>> dsink.inputs.desc = 'preproc'
>>> res = dsink.run()
>>> res.outputs.out_file # doctest: +ELLIPSIS
'.../niworkflows/sub-02/ses-noanat/func/sub-02_ses-noanat_task-rest_run-1_\
desc-preproc_bold.nii'
>>> bids_dir = tmpdir / 'bidsroot' / 'sub-02' / 'ses-noanat' / 'func'
>>> bids_dir.mkdir(parents=True, exist_ok=True)
>>> tricky_source = bids_dir / 'sub-02_ses-noanat_task-rest_run-1_bold.nii.gz'
>>> tricky_source.open('w').close()
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False)
>>> dsink.inputs.in_file = str(tmpfile)
>>> dsink.inputs.source_file = str(tricky_source)
>>> dsink.inputs.desc = 'preproc'
>>> dsink.inputs.RepetitionTime = 0.75
>>> res = dsink.run()
>>> res.outputs.out_meta # doctest: +ELLIPSIS
'.../niworkflows/sub-02/ses-noanat/func/sub-02_ses-noanat_task-rest_run-1_\
desc-preproc_bold.json'
>>> Path(res.outputs.out_meta).read_text().splitlines()[1]
' "RepetitionTime": 0.75'
>>> bids_dir = tmpdir / 'bidsroot' / 'sub-02' / 'ses-noanat' / 'func'
>>> bids_dir.mkdir(parents=True, exist_ok=True)
>>> tricky_source = bids_dir / 'sub-02_ses-noanat_task-rest_run-01_bold.nii.gz'
>>> tricky_source.open('w').close()
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False,
... SkullStripped=True)
>>> dsink.inputs.in_file = str(tmpfile)
>>> dsink.inputs.source_file = str(tricky_source)
>>> dsink.inputs.desc = 'preproc'
>>> dsink.inputs.space = 'MNI152NLin6Asym'
>>> dsink.inputs.resolution = '01'
>>> dsink.inputs.RepetitionTime = 0.75
>>> res = dsink.run()
>>> res.outputs.out_meta # doctest: +ELLIPSIS
'.../niworkflows/sub-02/ses-noanat/func/sub-02_ses-noanat_task-rest_run-1_\
space-MNI152NLin6Asym_res-01_desc-preproc_bold.json'
>>> lines = Path(res.outputs.out_meta).read_text().splitlines()
>>> lines[1]
' "RepetitionTime": 0.75,'
>>> lines[2]
' "SkullStripped": true'
>>> bids_dir = tmpdir / 'bidsroot' / 'sub-02' / 'ses-noanat' / 'func'
>>> bids_dir.mkdir(parents=True, exist_ok=True)
>>> tricky_source = bids_dir / 'sub-02_ses-noanat_task-rest_run-01_bold.nii.gz'
>>> tricky_source.open('w').close()
>>> dsink = DerivativesDataSink(base_directory=str(tmpdir), check_hdr=False,
... SkullStripped=True)
>>> dsink.inputs.in_file = str(tmpfile)
>>> dsink.inputs.source_file = str(tricky_source)
>>> dsink.inputs.desc = 'preproc'
>>> dsink.inputs.resolution = 'native'
>>> dsink.inputs.space = 'MNI152NLin6Asym'
>>> dsink.inputs.RepetitionTime = 0.75
>>> dsink.inputs.meta_dict = {'RepetitionTime': 1.75, 'SkullStripped': False, 'Z': 'val'}
>>> res = dsink.run()
>>> res.outputs.out_meta # doctest: +ELLIPSIS
'.../niworkflows/sub-02/ses-noanat/func/sub-02_ses-noanat_task-rest_run-1_\
space-MNI152NLin6Asym_desc-preproc_bold.json'
>>> lines = Path(res.outputs.out_meta).read_text().splitlines()
>>> lines[1]
' "RepetitionTime": 0.75,'
>>> lines[2]
' "SkullStripped": true,'
>>> lines[3]
' "Z": "val"'
"""
input_spec = _DerivativesDataSinkInputSpec
output_spec = _DerivativesDataSinkOutputSpec
out_path_base = "niworkflows"
_always_run = True
_allowed_entities = set(BIDS_DERIV_ENTITIES)
def __init__(self, allowed_entities=None, out_path_base=None, **inputs):
"""Initialize the SimpleInterface and extend inputs with custom entities."""
self._allowed_entities = set(allowed_entities or []).union(
self._allowed_entities
)
if out_path_base:
self.out_path_base = out_path_base
self._metadata = {}
self._static_traits = self.input_spec.class_editable_traits() + sorted(
self._allowed_entities
)
for dynamic_input in set(inputs) - set(self._static_traits):
self._metadata[dynamic_input] = inputs.pop(dynamic_input)
# First regular initialization (constructs InputSpec object)
super().__init__(**inputs)
add_traits(self.inputs, self._allowed_entities)
for k in self._allowed_entities.intersection(list(inputs.keys())):
# Add additional input fields (self.inputs is an object)
setattr(self.inputs, k, inputs[k])
def _run_interface(self, runtime):
from bids.layout import parse_file_entities
from bids.layout.writing import build_path
from bids.utils import listify
# Ready the output folder
base_directory = runtime.cwd
if isdefined(self.inputs.base_directory):
base_directory = self.inputs.base_directory
base_directory = Path(base_directory).absolute()
out_path = base_directory / self.out_path_base
out_path.mkdir(exist_ok=True, parents=True)
# Ensure we have a list
in_file = listify(self.inputs.in_file)
# Read in the dictionary of metadata
if isdefined(self.inputs.meta_dict):
meta = self.inputs.meta_dict
# inputs passed in construction take priority
meta.update(self._metadata)
self._metadata = meta
# Initialize entities with those from the source file.
in_entities = [
parse_file_entities(str(relative_to_root(source_file)))
for source_file in self.inputs.source_file
]
out_entities = {k: v for k, v in in_entities[0].items()
if all(ent.get(k) == v for ent in in_entities[1:])}
for drop_entity in listify(self.inputs.dismiss_entities or []):
out_entities.pop(drop_entity, None)
# Override extension with that of the input file(s)
out_entities["extension"] = [
# _splitext does not accept .surf.gii (for instance)
"".join(Path(orig_file).suffixes).lstrip(".")
for orig_file in in_file
]
compress = listify(self.inputs.compress) or [None]
if len(compress) == 1:
compress = compress * len(in_file)
for i, ext in enumerate(out_entities["extension"]):
if compress[i] is not None:
ext = regz.sub("", ext)
out_entities["extension"][i] = f"{ext}.gz" if compress[i] else ext
# Override entities with those set as inputs
for key in self._allowed_entities:
value = getattr(self.inputs, key)
if value is not None and isdefined(value):
out_entities[key] = value
# Clean up native resolution with space
if out_entities.get("resolution") == "native" and out_entities.get("space"):
out_entities.pop("resolution", None)
if len(set(out_entities["extension"])) == 1:
out_entities["extension"] = out_entities["extension"][0]
# Insert custom (non-BIDS) entities from allowed_entities.
custom_entities = set(out_entities.keys()) - set(BIDS_DERIV_ENTITIES)
patterns = BIDS_DERIV_PATTERNS
if custom_entities:
# Example: f"{key}-{{{key}}}" -> "task-{task}"
custom_pat = "_".join(f"{key}-{{{key}}}" for key in sorted(custom_entities))
patterns = [
pat.replace("_{suffix", "_".join(("", custom_pat, "{suffix")))
for pat in patterns
]
# Prepare SimpleInterface outputs object
self._results["out_file"] = []
self._results["compression"] = []
self._results["fixed_hdr"] = [False] * len(in_file)
dest_files = build_path(out_entities, path_patterns=patterns)
if not dest_files:
raise ValueError(f"Could not build path with entities {out_entities}.")
# Make sure the interpolated values is embedded in a list, and check
dest_files = listify(dest_files)
if len(in_file) != len(dest_files):
raise ValueError(
f"Input files ({len(in_file)}) not matched "
f"by interpolated patterns ({len(dest_files)})."
)
for i, (orig_file, dest_file) in enumerate(zip(in_file, dest_files)):
out_file = out_path / dest_file
out_file.parent.mkdir(exist_ok=True, parents=True)
self._results["out_file"].append(str(out_file))
self._results["compression"].append(str(dest_file).endswith(".gz"))
# Set data and header iff changes need to be made. If these are
# still None when it's time to write, just copy.
new_data, new_header = None, None
is_nifti = out_file.name.endswith(
(".nii", ".nii.gz")
) and not out_file.name.endswith((".dtseries.nii", ".dtseries.nii.gz"))
data_dtype = self.inputs.data_dtype or DEFAULT_DTYPES[self.inputs.suffix]
if is_nifti and any((self.inputs.check_hdr, data_dtype)):
nii = nb.load(orig_file)
if self.inputs.check_hdr:
hdr = nii.header
curr_units = tuple(
[None if u == "unknown" else u for u in hdr.get_xyzt_units()]
)
curr_codes = (int(hdr["qform_code"]), int(hdr["sform_code"]))
# Default to mm, use sec if data type is bold
units = (
curr_units[0] or "mm",
"sec" if out_entities["suffix"] == "bold" else None,
)
xcodes = (1, 1) # Derivative in its original scanner space
if self.inputs.space:
xcodes = (
(4, 4) if self.inputs.space in STANDARD_SPACES else (2, 2)
)
if curr_codes != xcodes or curr_units != units:
self._results["fixed_hdr"][i] = True
new_header = hdr.copy()
new_header.set_qform(nii.affine, xcodes[0])
new_header.set_sform(nii.affine, xcodes[1])
new_header.set_xyzt_units(*units)
if data_dtype == "source": # match source dtype
try:
data_dtype = nb.load(self.inputs.source_file[0]).get_data_dtype()
except Exception:
LOGGER.warning(
f"Could not get data type of file {self.inputs.source_file[0]}"
)
data_dtype = None
if data_dtype:
data_dtype = np.dtype(data_dtype)
orig_dtype = nii.get_data_dtype()
if orig_dtype != data_dtype:
LOGGER.warning(
f"Changing {out_file} dtype from {orig_dtype} to {data_dtype}"
)
# coerce dataobj to new data dtype
if np.issubdtype(data_dtype, np.integer):
new_data = np.rint(nii.dataobj).astype(data_dtype)
else:
new_data = np.asanyarray(nii.dataobj, dtype=data_dtype)
# and set header to match
if new_header is None:
new_header = nii.header.copy()
new_header.set_data_dtype(data_dtype)
del nii
if new_data is new_header is None:
_copy_any(orig_file, str(out_file))
else:
orig_img = nb.load(orig_file)
if new_data is None:
set_consumables(new_header, orig_img.dataobj)
new_data = orig_img.dataobj.get_unscaled()
else:
# Without this, we would be writing nans
# This is our punishment for hacking around nibabel defaults
new_header.set_slope_inter(slope=1., inter=0.)
unsafe_write_nifti_header_and_data(
fname=out_file,
header=new_header,
data=new_data
)
del orig_img
if len(self._results["out_file"]) == 1:
meta_fields = self.inputs.copyable_trait_names()
self._metadata.update(
{
k: getattr(self.inputs, k)
for k in meta_fields
if k not in self._static_traits
}
)
if self._metadata:
out_file = Path(self._results["out_file"][0])
# 1.3.x hack
# For dtseries, we have been generating weird non-BIDS JSON files.
# We can safely keep producing them to avoid breaking derivatives, but
# only the existing keys should keep going into them.
if out_file.name.endswith(".dtseries.nii"):
legacy_metadata = {}
for key in ("grayordinates", "space", "surface", "surface_density", "volume"):
if key in self._metadata:
legacy_metadata[key] = self._metadata.pop(key)
if legacy_metadata:
sidecar = out_file.parent / f"{_splitext(str(out_file))[0]}.json"
sidecar.write_text(dumps(legacy_metadata, sort_keys=True, indent=2))
# The future: the extension is the first . and everything after
sidecar = out_file.parent / f"{out_file.name.split('.', 1)[0]}.json"
sidecar.write_text(dumps(self._metadata, sort_keys=True, indent=2))
self._results["out_meta"] = str(sidecar)
return runtime
class _ReadSidecarJSONInputSpec(_BIDSBaseInputSpec):
in_file = File(exists=True, mandatory=True, desc="the input nifti file")
class _ReadSidecarJSONOutputSpec(_BIDSInfoOutputSpec):
out_dict = traits.Dict()
class ReadSidecarJSON(SimpleInterface):
"""
Read JSON sidecar files of a BIDS tree.
>>> fmap = str(datadir / 'ds054' / 'sub-100185' / 'fmap' /
... 'sub-100185_phasediff.nii.gz')
>>> meta = ReadSidecarJSON(in_file=fmap, bids_dir=str(datadir / 'ds054'),
... bids_validate=False).run()
>>> meta.outputs.subject
'100185'
>>> meta.outputs.suffix
'phasediff'
>>> meta.outputs.out_dict['Manufacturer']
'SIEMENS'
>>> meta = ReadSidecarJSON(in_file=fmap, fields=['Manufacturer'],
... bids_dir=str(datadir / 'ds054'),
... bids_validate=False).run()
>>> meta.outputs.out_dict['Manufacturer']
'SIEMENS'
>>> meta.outputs.Manufacturer
'SIEMENS'
>>> meta.outputs.OtherField # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
AttributeError:
>>> meta = ReadSidecarJSON(
... in_file=fmap, fields=['MadeUpField'],
... bids_dir=str(datadir / 'ds054'),
... bids_validate=False).run() # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
KeyError:
>>> meta = ReadSidecarJSON(in_file=fmap, fields=['MadeUpField'],
... undef_fields=True,
... bids_dir=str(datadir / 'ds054'),
... bids_validate=False).run()
>>> meta.outputs.MadeUpField
<undefined>
"""
input_spec = _ReadSidecarJSONInputSpec
output_spec = _ReadSidecarJSONOutputSpec
layout = None
_always_run = True
def __init__(self, fields=None, undef_fields=False, **inputs):
from bids.utils import listify
super(ReadSidecarJSON, self).__init__(**inputs)
self._fields = listify(fields or [])
self._undef_fields = undef_fields
def _outputs(self):
base = super(ReadSidecarJSON, self)._outputs()
if self._fields:
base = add_traits(base, self._fields)
return base
def _run_interface(self, runtime):
self.layout = self.inputs.bids_dir or self.layout
self.layout = _init_layout(
self.inputs.in_file, self.layout, self.inputs.bids_validate
)
# Fill in BIDS entities of the output ("*_id")
output_keys = list(_BIDSInfoOutputSpec().get().keys())
params = self.layout.parse_file_entities(self.inputs.in_file)
self._results = {
key: params.get(key.split("_")[0], Undefined) for key in output_keys
}
# Fill in metadata
metadata = self.layout.get_metadata(self.inputs.in_file)
self._results["out_dict"] = metadata
# Set dynamic outputs if fields input is present
for fname in self._fields:
if not self._undef_fields and fname not in metadata:
raise KeyError(
'Metadata field "%s" not found for file %s'
% (fname, self.inputs.in_file)
)
self._results[fname] = metadata.get(fname, Undefined)
return runtime
class _BIDSFreeSurferDirInputSpec(BaseInterfaceInputSpec):
derivatives = Directory(
exists=True, mandatory=True, desc="BIDS derivatives directory"
)
freesurfer_home = Directory(
exists=True, mandatory=True, desc="FreeSurfer installation directory"
)
subjects_dir = traits.Either(
traits.Str(),
Directory(),
default="freesurfer",
usedefault=True,
desc="Name of FreeSurfer subjects directory",
)
spaces = traits.List(traits.Str, desc="Set of output spaces to prepare")
overwrite_fsaverage = traits.Bool(
False, usedefault=True, desc="Overwrite fsaverage directories, if present"
)
class _BIDSFreeSurferDirOutputSpec(TraitedSpec):
subjects_dir = traits.Directory(exists=True, desc="FreeSurfer subjects directory")
class BIDSFreeSurferDir(SimpleInterface):
"""
Prepare a FreeSurfer subjects directory for use in a BIDS context.
Constructs a subjects directory path, creating if necessary, and copies
fsaverage subjects (if necessary or forced via ``overwrite_fsaverage``)
into from the local FreeSurfer distribution.
If ``subjects_dir`` is an absolute path, then it is returned as the output
``subjects_dir``.
If it is a relative path, it will be resolved relative to the
```derivatives`` directory.`
Regardless of the path, if ``fsaverage`` spaces are provided, they will be
verified to exist, or copied from ``$FREESURFER_HOME/subjects``, if missing.
The output ``subjects_dir`` is intended to be passed to ``ReconAll`` and
other FreeSurfer interfaces.
"""
input_spec = _BIDSFreeSurferDirInputSpec
output_spec = _BIDSFreeSurferDirOutputSpec
_always_run = True
def _run_interface(self, runtime):
subjects_dir = Path(self.inputs.subjects_dir)
if not subjects_dir.is_absolute():
subjects_dir = Path(self.inputs.derivatives) / subjects_dir
subjects_dir.mkdir(parents=True, exist_ok=True)
self._results["subjects_dir"] = str(subjects_dir)
orig_subjects_dir = Path(self.inputs.freesurfer_home) / "subjects"
# Source is target, so just quit
if subjects_dir == orig_subjects_dir:
return runtime
spaces = list(self.inputs.spaces)
# Always copy fsaverage, for proper recon-all functionality
if "fsaverage" not in spaces:
spaces.append("fsaverage")
for space in spaces:
# Skip non-freesurfer spaces and fsnative
if not space.startswith("fsaverage"):
continue
source = orig_subjects_dir / space
dest = subjects_dir / space
# Edge case, but give a sensible error
if not source.exists():
if dest.exists():
continue
else:
raise FileNotFoundError("Expected to find '%s' to copy" % source)
# Finesse is overrated. Either leave it alone or completely clobber it.
if dest.exists() and self.inputs.overwrite_fsaverage:
rmtree(dest)
if not dest.exists():
try:
copytree(source, dest)
except FileExistsError:
LOGGER.warning(
"%s exists; if multiple jobs are running in parallel"
", this can be safely ignored",
dest,
)
return runtime<|fim▁end|> | reconstruction = traits.Str()
run = traits.Int()
suffix = traits.Str()
|
<|file_name|>page_test_runner.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import sys
from telemetry.core import browser_finder
from telemetry.core import browser_options
from telemetry.page import page_test
from telemetry.page import page_runner
from telemetry.page import page_set
from telemetry.test import discover
def Main(test_dir, page_set_filenames):
"""Turns a PageTest into a command-line program.
Args:
test_dir: Path to directory containing PageTests.
"""
tests = discover.DiscoverClasses(test_dir,
os.path.join(test_dir, '..'),
page_test.PageTest)
# Naively find the test. If we use the browser options parser, we run
# the risk of failing to parse if we use a test-specific parameter.
test_name = None
for arg in sys.argv:
if arg in tests:
test_name = arg
options = browser_options.BrowserOptions()
parser = options.CreateParser('%prog [options] <test> <page_set>')
page_runner.PageRunner.AddCommandLineOptions(parser)
test = None
if test_name is not None:<|fim▁hole|> sys.exit(1)
test = tests[test_name]()
test.AddCommandLineOptions(parser)
_, args = parser.parse_args()
if test is None or len(args) != 2:
parser.print_usage()
print >> sys.stderr, 'Available tests:\n%s\n' % ',\n'.join(
sorted(tests.keys()))
print >> sys.stderr, 'Available page_sets:\n%s\n' % ',\n'.join(
sorted([os.path.relpath(f)
for f in page_set_filenames]))
sys.exit(1)
ps = page_set.PageSet.FromFile(args[1])
results = page_test.PageTestResults()
return RunTestOnPageSet(options, ps, test, results)
def RunTestOnPageSet(options, ps, test, results):
test.CustomizeBrowserOptions(options)
possible_browser = browser_finder.FindBrowser(options)
if not possible_browser:
print >> sys.stderr, """No browser found.\n
Use --browser=list to figure out which are available.\n"""
sys.exit(1)
with page_runner.PageRunner(ps) as runner:
runner.Run(options, possible_browser, test, results)
print '%i pages succeed\n' % len(results.page_successes)
if len(results.page_failures):
logging.warning('Failed pages: %s', '\n'.join(
[failure['page'].url for failure in results.page_failures]))
if len(results.skipped_pages):
logging.warning('Skipped pages: %s', '\n'.join(
[skipped['page'].url for skipped in results.skipped_pages]))
return min(255, len(results.page_failures))<|fim▁end|> | if test_name not in tests:
sys.stderr.write('No test name %s found' % test_name) |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = 'wechat-python-sdk',
version = '0.5.7',
keywords = ('wechat', 'sdk', 'wechat sdk'),
description = u'微信公众平台Python开发包',
long_description = open("README.rst").read(),
license = 'BSD License',
url = 'https://github.com/doraemonext/wechat-python-sdk',<|fim▁hole|> author_email = 'doraemonext@gmail.com',
packages = find_packages(),
include_package_data = True,
platforms = 'any',
install_requires=open("requirements.txt").readlines(),
)<|fim▁end|> | author = 'doraemonext', |
<|file_name|>266658781c00_instances_nullable_in_equipments_provider.py<|end_file_name|><|fim▁begin|>"""
column 'instances' will be deleted later. Has to be nullable for transition
Revision ID: 266658781c00
Revises: 204aae05372a<|fim▁hole|>
"""
# revision identifiers, used by Alembic.
revision = '266658781c00'
down_revision = '204aae05372a'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True)
def downgrade():
op.alter_column(
'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False
)<|fim▁end|> | Create Date: 2019-04-15 16:27:22.362244 |
<|file_name|>ioutils.cc<|end_file_name|><|fim▁begin|>/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/*
* Copyright 2014-2020 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _WIN32
#include <cerrno>
#endif
#include "connect.h"
#include "ioutils.h"
#include "hostlist.h"
#include "iotable.h"
#include "ssl.h"
lcbio_CSERR lcbio_mkcserr(int syserr)
{
switch (syserr) {
case 0:
return LCBIO_CSERR_CONNECTED;
case EINTR:
return LCBIO_CSERR_INTR;
case EWOULDBLOCK:
#ifdef USE_EAGAIN
case EAGAIN:
#endif
case EINPROGRESS:
case EALREADY:
return LCBIO_CSERR_BUSY;
case EISCONN:
return LCBIO_CSERR_CONNECTED;
#ifdef _WIN32
case EINVAL:
return LCBIO_CSERR_EINVAL;
#endif
default:
return LCBIO_CSERR_EFAIL;
}
}
void lcbio_mksyserr(lcbio_OSERR in, lcbio_OSERR *out)
{
switch (in) {
case EINTR:
case EWOULDBLOCK:
#ifdef USE_EAGAIN
case EAGAIN:
#endif
case EINVAL:
case EINPROGRESS:
case EISCONN:
case EALREADY:
return;
default:
*out = in;<|fim▁hole|>}
static lcb_STATUS ioerr2lcberr(lcbio_OSERR in, const lcb_settings *settings)
{
switch (in) {
case 0:
return LCB_ERR_SOCKET_SHUTDOWN;
case ECONNREFUSED:
return LCB_ERR_CONNECTION_REFUSED;
case ENETUNREACH:
case EHOSTUNREACH:
case EHOSTDOWN:
return LCB_ERR_NODE_UNREACHABLE;
case EMFILE:
case ENFILE:
return LCB_ERR_FD_LIMIT_REACHED;
case EADDRINUSE:
case EADDRNOTAVAIL:
return LCB_ERR_CANNOT_GET_PORT;
case ECONNRESET:
case ECONNABORTED:
return LCB_ERR_CONNECTION_RESET;
default:
lcb_log(settings, "lcbio", LCB_LOG_WARN, __FILE__, __LINE__,
"OS errno %d (%s) does not have a direct client error code equivalent. Using NETWORK_ERROR", in,
strerror(in));
return LCB_ERR_NETWORK;
}
}
lcb_STATUS lcbio_mklcberr(lcbio_OSERR in, const lcb_settings *settings)
{
if (settings->detailed_neterr == 0) {
lcb_log(settings, "lcbio", LCB_LOG_WARN, __FILE__, __LINE__, "Translating errno=%d (%s), %s to LCB_ERR_NETWORK",
in, strerror(in), lcb_strerror_short(ioerr2lcberr(in, settings)));
return LCB_ERR_NETWORK;
}
return ioerr2lcberr(in, settings);
}
lcb_socket_t lcbio_E_ai2sock(lcbio_TABLE *io, struct addrinfo **ai, int *connerr)
{
lcb_socket_t ret = INVALID_SOCKET;
*connerr = 0;
for (; *ai; *ai = (*ai)->ai_next) {
ret = io->E_socket(*ai);
if (ret != INVALID_SOCKET) {
return ret;
} else {
*connerr = io->get_errno();
}
}
return ret;
}
lcb_sockdata_t *lcbio_C_ai2sock(lcbio_TABLE *io, struct addrinfo **ai, int *connerr)
{
lcb_sockdata_t *ret = nullptr;
for (; *ai; *ai = (*ai)->ai_next) {
ret = io->C_socket(*ai);
if (ret) {
return ret;
} else {
*connerr = IOT_ERRNO(io);
}
}
return ret;
}
static int saddr_to_host_and_port(struct sockaddr *saddr, int len, char *host, lcb_size_t nhost, char *port,
lcb_size_t nport)
{
return getnameinfo(saddr, len, host, nhost, port, nport, NI_NUMERICHOST | NI_NUMERICSERV);
}
static int saddr_to_string(struct sockaddr *saddr, int len, char *buf, lcb_size_t nbuf)
{
char h[NI_MAXHOST + 1];
char p[NI_MAXSERV + 1];
int rv;
rv = saddr_to_host_and_port(saddr, len, h, sizeof(h), p, sizeof(p));
if (rv < 0) {
return 0;
}
if (snprintf(buf, nbuf, "%s;%s", h, p) < 0) {
return 0;
}
return 1;
}
static void lcbio_cache_local_name(lcbio_CONNINFO *sock)
{
char addr_str[NI_MAXHOST + 1];
switch (sock->sa_local.ss_family) {
case AF_INET: {
auto *addr = (struct sockaddr_in *)&sock->sa_local;
inet_ntop(AF_INET, &(addr->sin_addr), addr_str, sizeof(addr_str));
strncpy(sock->ep_local.host, addr_str, sizeof(sock->ep_local.host));
snprintf(sock->ep_local.port, sizeof(sock->ep_local.port), "%d", (int)ntohs(addr->sin_port));
} break;
case AF_INET6: {
auto *addr = (struct sockaddr_in6 *)&sock->sa_local;
inet_ntop(AF_INET6, &(addr->sin6_addr), addr_str, sizeof(addr_str));
strncpy(sock->ep_local.host, addr_str, sizeof(sock->ep_local.host));
snprintf(sock->ep_local.port, sizeof(sock->ep_local.port), "%d", (int)ntohs(addr->sin6_port));
} break;
}
snprintf(sock->ep_local_host_and_port, sizeof(sock->ep_local_host_and_port), "%s:%s", sock->ep_local.host,
sock->ep_local.port);
}
void lcbio__load_socknames(lcbio_SOCKET *sock)
{
int n_salocal, n_saremote, rv;
struct lcb_nameinfo_st ni {
};
lcbio_CONNINFO *info = sock->info;
n_salocal = sizeof(info->sa_local);
n_saremote = sizeof(info->sa_remote);
ni.local.name = (struct sockaddr *)&info->sa_local;
ni.local.len = &n_salocal;
ni.remote.name = (struct sockaddr *)&info->sa_remote;
ni.remote.len = &n_saremote;
if (!IOT_IS_EVENT(sock->io)) {
if (!sock->u.sd) {
return;
}
rv = IOT_V1(sock->io).nameinfo(IOT_ARG(sock->io), sock->u.sd, &ni);
if (ni.local.len == nullptr || ni.remote.len == nullptr || rv < 0) {
return;
}
} else {
socklen_t sl_tmp = sizeof(info->sa_local);
if (sock->u.fd == INVALID_SOCKET) {
return;
}
rv = getsockname(sock->u.fd, ni.local.name, &sl_tmp);
n_salocal = sl_tmp;
if (rv < 0) {
return;
}
rv = getpeername(sock->u.fd, ni.remote.name, &sl_tmp);
n_saremote = sl_tmp;
if (rv < 0) {
return;
}
}
info->naddr = n_salocal;
lcbio_cache_local_name(info);
}
int lcbio_get_nameinfo(lcbio_SOCKET *sock, struct lcbio_NAMEINFO *nistrs)
{
lcbio_CONNINFO *info = sock->info;
if (!info) {
return 0;
}
if (!info->naddr) {
return 0;
}
if (!saddr_to_string((struct sockaddr *)&info->sa_remote, info->naddr, nistrs->remote, sizeof(nistrs->remote))) {
return 0;
}
if (!saddr_to_string((struct sockaddr *)&info->sa_local, info->naddr, nistrs->local, sizeof(nistrs->local))) {
return 0;
}
return 1;
}
int lcbio_is_netclosed(lcbio_SOCKET *sock, int flags)
{
lcbio_pTABLE iot = sock->io;
if (iot->is_E()) {
return iot->E_check_closed(sock->u.fd, flags);
} else {
return iot->C_check_closed(sock->u.sd, flags);
}
}
lcb_STATUS lcbio_enable_sockopt(lcbio_SOCKET *s, int cntl)
{
lcbio_pTABLE iot = s->io;
int rv;
int value = 1;
if (!iot->has_cntl()) {
return LCB_ERR_UNSUPPORTED_OPERATION;
}
if (iot->is_E()) {
rv = iot->E_cntl(s->u.fd, LCB_IO_CNTL_SET, cntl, &value);
} else {
rv = iot->C_cntl(s->u.sd, LCB_IO_CNTL_SET, cntl, &value);
}
if (rv != 0) {
return lcbio_mklcberr(IOT_ERRNO(iot), s->settings);
} else {
return LCB_SUCCESS;
}
}
const char *lcbio_strsockopt(int cntl)
{
switch (cntl) {
case LCB_IO_CNTL_TCP_KEEPALIVE:
return "TCP_KEEPALIVE";
case LCB_IO_CNTL_TCP_NODELAY:
return "TCP_NODELAY";
default:
return "FIXME: Unknown option";
}
}
int lcbio_ssl_supported(void)
{
#ifdef LCB_NO_SSL
return 0;
#else
return 1;
#endif
}
lcbio_pSSLCTX lcbio_ssl_new__fallback(const char *, const char *, const char *, int, lcb_STATUS *errp, lcb_settings *)
{
if (errp) {
*errp = LCB_ERR_SDK_FEATURE_UNAVAILABLE;
}
return nullptr;
}
#ifdef LCB_NO_SSL
void lcbio_ssl_free(lcbio_pSSLCTX) {}
lcb_STATUS lcbio_ssl_apply(lcbio_SOCKET *, lcbio_pSSLCTX)
{
return LCB_ERR_SDK_FEATURE_UNAVAILABLE;
}
int lcbio_ssl_check(lcbio_SOCKET *)
{
return 0;
}
lcb_STATUS lcbio_ssl_get_error(lcbio_SOCKET *)
{
return LCB_SUCCESS;
}
void lcbio_ssl_global_init(void) {}
lcb_STATUS lcbio_sslify_if_needed(lcbio_SOCKET *, lcb_settings *)
{
return LCB_SUCCESS;
}
#endif<|fim▁end|> | break;
} |
<|file_name|>list.py<|end_file_name|><|fim▁begin|># The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import json
import logging
from multiprocessing.dummy import Pool
from pip._vendor import six
from pip._vendor.requests.adapters import DEFAULT_POOLSIZE
from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import IndexGroupCommand
from pip._internal.exceptions import CommandError
from pip._internal.index.package_finder import PackageFinder
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.self_outdated_check import make_link_collector
from pip._internal.utils.misc import (
dist_is_editable,
get_installed_distributions,
tabulate,
write_output,
)
from pip._internal.utils.packaging import get_installer
logger = logging.getLogger(__name__)
class ListCommand(IndexGroupCommand):
"""
List installed packages, including editables.
Packages are listed in a case-insensitive sorted order.
"""
usage = """
%prog [options]"""
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help=('If in a virtualenv that has global access, do not list '
'globally-installed packages.'),
)
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
cmd_opts.add_option(cmdoptions.list_path())
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
cmd_opts.add_option(
'--format',
action='store',
dest='list_format',
default="columns",
choices=('columns', 'freeze', 'json'),
help="Select the output format among: columns (default), freeze, "
"or json",
)
cmd_opts.add_option(
'--not-required',
action='store_true',
dest='not_required',
help="List packages that are not dependencies of "
"installed packages.",
)
cmd_opts.add_option(
'--exclude-editable',
action='store_false',
dest='include_editable',
help='Exclude editable package from output.',
)
cmd_opts.add_option(
'--include-editable',
action='store_true',
dest='include_editable',
help='Include editable package from output.',
default=True,
)
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group, self.parser
)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, session):
"""
Create a package finder appropriate to this list command.
"""
link_collector = make_link_collector(session, options=options)
# Pass allow_yanked=False to ignore yanked versions.
selection_prefs = SelectionPreferences(
allow_yanked=False,
allow_all_prereleases=options.pre,
)
return PackageFinder.create(
link_collector=link_collector,
selection_prefs=selection_prefs,
)
def run(self, options, args):
if options.outdated and options.uptodate:
raise CommandError(
"Options --outdated and --uptodate cannot be combined.")
cmdoptions.check_list_path_option(options)
packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=options.editable,
include_editables=options.include_editable,
paths=options.path,
)
# get_not_required must be called firstly in order to find and
# filter out all dependencies correctly. Otherwise a package
# can't be identified as requirement because some parent packages
# could be filtered out before.
if options.not_required:
packages = self.get_not_required(packages, options)
if options.outdated:
packages = self.get_outdated(packages, options)
elif options.uptodate:
packages = self.get_uptodate(packages, options)
self.output_package_listing(packages, options)
def get_outdated(self, packages, options):
return [
dist for dist in self.iter_packages_latest_infos(packages, options)
if dist.latest_version > dist.parsed_version
]
def get_uptodate(self, packages, options):
return [
dist for dist in self.iter_packages_latest_infos(packages, options)
if dist.latest_version == dist.parsed_version
]
def get_not_required(self, packages, options):
dep_keys = set()
for dist in packages:
dep_keys.update(requirement.key for requirement in dist.requires())
return {pkg for pkg in packages if pkg.key not in dep_keys}
def iter_packages_latest_infos(self, packages, options):
with self._build_session(options) as session:
finder = self._build_package_finder(options, session)
def latest_info(dist):
typ = 'unknown'
all_candidates = finder.find_all_candidates(dist.key)
if not options.pre:
# Remove prereleases
all_candidates = [candidate for candidate in all_candidates
if not candidate.version.is_prerelease]
evaluator = finder.make_candidate_evaluator(
project_name=dist.project_name,
)
best_candidate = evaluator.sort_best_candidate(all_candidates)
if best_candidate is None:
return None
remote_version = best_candidate.version
if best_candidate.link.is_wheel:
typ = 'wheel'
else:
typ = 'sdist'
# This is dirty but makes the rest of the code much cleaner
dist.latest_version = remote_version
dist.latest_filetype = typ
return dist
# This is done for 2x speed up of requests to pypi.org
# so that "real time" of this function
# is almost equal to "user time"
pool = Pool(DEFAULT_POOLSIZE)
for dist in pool.imap_unordered(latest_info, packages):
if dist is not None:
yield dist
pool.close()
pool.join()
def output_package_listing(self, packages, options):
packages = sorted(
packages,
key=lambda dist: dist.project_name.lower(),
)
if options.list_format == 'columns' and packages:
data, header = format_for_columns(packages, options)
self.output_package_listing_columns(data, header)
elif options.list_format == 'freeze':
for dist in packages:
if options.verbose >= 1:
write_output("%s==%s (%s)", dist.project_name,
dist.version, dist.location)
else:
write_output("%s==%s", dist.project_name, dist.version)
elif options.list_format == 'json':
write_output(format_for_json(packages, options))
def output_package_listing_columns(self, data, header):
# insert the header first: we need to know the size of column names
if len(data) > 0:
data.insert(0, header)
pkg_strings, sizes = tabulate(data)
# Create and add a separator.
if len(data) > 0:
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
for val in pkg_strings:
write_output(val)
def format_for_columns(pkgs, options):
"""
Convert the package data into something usable
by output_package_listing_columns.
"""
running_outdated = options.outdated
# Adjust the header for the `pip list --outdated` case.
if running_outdated:
header = ["Package", "Version", "Latest", "Type"]
else:
header = ["Package", "Version"]
data = []
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
header.append("Location")
if options.verbose >= 1:
header.append("Installer")
for proj in pkgs:
# if we're working on the 'outdated' list, separate out the
# latest_version and type
row = [proj.project_name, proj.version]
if running_outdated:
row.append(proj.latest_version)
row.append(proj.latest_filetype)<|fim▁hole|> row.append(get_installer(proj))
data.append(row)
return data, header
def format_for_json(packages, options):
data = []
for dist in packages:
info = {
'name': dist.project_name,
'version': six.text_type(dist.version),
}
if options.verbose >= 1:
info['location'] = dist.location
info['installer'] = get_installer(dist)
if options.outdated:
info['latest_version'] = six.text_type(dist.latest_version)
info['latest_filetype'] = dist.latest_filetype
data.append(info)
return json.dumps(data)<|fim▁end|> |
if options.verbose >= 1 or dist_is_editable(proj):
row.append(proj.location)
if options.verbose >= 1: |
<|file_name|>LandingCtrl.js<|end_file_name|><|fim▁begin|>(function() {
function LandingCtrl() {
this.heroTitle = "Turn the Music Up!";
}<|fim▁hole|>})();<|fim▁end|> |
angular
.module('blocJams')
.controller('LandingCtrl', LandingCtrl); |
<|file_name|>gce.go<|end_file_name|><|fim▁begin|>package provider
////// SET UP API ACCESS:
//
// 1) In the Google Developer Console navigate to:
// Permissions > Service accounts
//
// 2) Create or use an existing Service Account
//
// 3) For your Service Account, create and save a key as "~/.gce/quilt.json"
//
// 4) In the Google Developer Console navigate to:
// Permissions > Permissions
//
// 5) If the Service Account is not already, assign it the "Editor" role.
// You select the account by email.
import (
"errors"
"fmt"
"net/http"
"os"
"path/filepath"
"reflect"
"sort"
"strings"
"time"
"github.com/NetSys/quilt/constants"
"github.com/NetSys/quilt/db"
"github.com/NetSys/quilt/stitch"
log "github.com/Sirupsen/logrus"
"github.com/satori/go.uuid"
"golang.org/x/net/context"
"golang.org/x/oauth2/google"
compute "google.golang.org/api/compute/v1"
)
const computeBaseURL string = "https://www.googleapis.com/compute/v1/projects"
const (
// These are the various types of Operations that the GCE API returns
local = iota
global
)
var supportedZones = []string{"us-central1-a", "us-east1-b", "europe-west1-b"}
var gAuthClient *http.Client // the oAuth client
var gceService *compute.Service // gce service
type gceCluster struct {
projID string // gce project ID
imgURL string // gce url to the VM image
baseURL string // gce project specific url prefix
ipv4Range string // ipv4 range of the internal network
intFW string // gce internal firewall name
extFW string // gce external firewall name
ns string // cluster namespace
id int // the id of the cluster, used externally
}
// Create a GCE cluster.
//
// Clusters are differentiated (namespace) by setting the description and
// filtering off of that.
//
// XXX: A lot of the fields are hardcoded.
func (clst *gceCluster) Connect(namespace string) error {
if err := gceInit(); err != nil {
log.WithError(err).Debug("failed to start up gce")
return err
}
clst.projID = "declarative-infrastructure"
clst.ns = namespace
clst.imgURL = fmt.Sprintf(
"%s/%s",
computeBaseURL,
"ubuntu-os-cloud/global/images/ubuntu-1510-wily-v20160310")
clst.baseURL = fmt.Sprintf("%s/%s", computeBaseURL, clst.projID)
clst.ipv4Range = "192.168.0.0/16"
clst.intFW = fmt.Sprintf("%s-internal", clst.ns)
clst.extFW = fmt.Sprintf("%s-external", clst.ns)
if err := clst.netInit(); err != nil {
log.WithError(err).Debug("failed to start up gce network")
return err
}
if err := clst.fwInit(); err != nil {
log.WithError(err).Debug("failed to start up gce firewalls")
return err
}
return nil
}
// Get a list of machines from the cluster
//
// XXX: This doesn't use the instance group listing functionality because
// listing that way doesn't get you information about the instances
func (clst *gceCluster) List() ([]Machine, error) {
var mList []Machine
for _, zone := range supportedZones {
list, err := gceService.Instances.List(clst.projID, zone).
Filter(fmt.Sprintf("description eq %s", clst.ns)).Do()
if err != nil {
return nil, err
}
for _, item := range list.Items {
// XXX: This make some iffy assumptions about NetworkInterfaces
machineSplitURL := strings.Split(item.MachineType, "/")
mtype := machineSplitURL[len(machineSplitURL)-1]
mList = append(mList, Machine{
ID: item.Name,
PublicIP: item.NetworkInterfaces[0].
AccessConfigs[0].NatIP,
PrivateIP: item.NetworkInterfaces[0].NetworkIP,
Size: mtype,
Region: zone,
Provider: db.Google,
})
}
}
return mList, nil
}
// Boots instances, it is a blocking call.
//
// XXX: currently ignores cloudConfig
// XXX: should probably have a better clean up routine if an error is encountered
func (clst *gceCluster) Boot(bootSet []Machine) error {
var names []string
for _, m := range bootSet {
name := "quilt-" + uuid.NewV4().String()
_, err := clst.instanceNew(name, m.Size, m.Region,
cloudConfigUbuntu(m.SSHKeys, "wily"))
if err != nil {
log.WithFields(log.Fields{
"error": err,
"id": m.ID,
}).Error("Failed to start instance.")
continue
}
names = append(names, name)
}
if err := clst.wait(names, true); err != nil {
return err
}
return nil
}
// Deletes the instances, it is a blocking call.
//
// If an error occurs while deleting, it will finish the ones that have
// successfully started before returning.
//
// XXX: should probably have a better clean up routine if an error is encountered
func (clst *gceCluster) Stop(machines []Machine) error {
var names []string
for _, m := range machines {
if _, err := clst.instanceDel(m.ID, m.Region); err != nil {
log.WithFields(log.Fields{
"error": err,
"id": m.ID,
}).Error("Failed to delete instance.")
continue
}
names = append(names, m.ID)
}
if err := clst.wait(names, false); err != nil {
return err
}
return nil
}
func (clst *gceCluster) ChooseSize(ram stitch.Range, cpu stitch.Range,
maxPrice float64) string {
return pickBestSize(constants.GoogleDescriptions, ram, cpu, maxPrice)
}
// Get() and operationWait() don't always present the same results, so
// Boot() and Stop() must have a special wait to stay in sync with Get().
func (clst *gceCluster) wait(names []string, live bool) error {
if len(names) == 0 {
return nil
}
after := time.After(3 * time.Minute)
tick := time.NewTicker(3 * time.Second)
defer tick.Stop()
for range tick.C {
select {
case <-after:
return errors.New("wait(): timeout")
default:
}
for len(names) > 0 {
name := names[0]
instances, err := clst.List()
if err != nil {
return err
}
exists := false
for _, ist := range instances {
if name == ist.ID {
exists = true
}
}
if live == exists {
names = append(names[:0], names[1:]...)
}
}
if len(names) == 0 {
return nil
}
}
return nil
}
// Blocking wait with a hardcoded timeout.
//
// Waits on operations, the type of which is indicated by 'domain'. All
// operations must be of the same 'domain'
//
// XXX: maybe not hardcode timeout, and retry interval
func (clst *gceCluster) operationWait(ops []*compute.Operation, domain int) error {
if len(ops) == 0 {
return nil
}
after := time.After(3 * time.Minute)
tick := time.NewTicker(3 * time.Second)
defer tick.Stop()
var op *compute.Operation
var err error
for {
select {
case <-after:
return fmt.Errorf("operationWait(): timeout")
case <-tick.C:
for len(ops) > 0 {
switch {
case domain == local:
op, err = gceService.ZoneOperations.
Get(clst.projID, ops[0].Zone,
ops[0].Name).Do()
case domain == global:
op, err = gceService.GlobalOperations.
Get(clst.projID, ops[0].Name).Do()
}
if err != nil {
return err
}
if op.Status != "DONE" {
break
}
ops = append(ops[:0], ops[1:]...)
}
if len(ops) == 0 {
return nil
}
}
}
}
// Get a GCE instance.
func (clst *gceCluster) instanceGet(name, zone string) (*compute.Instance, error) {
ist, err := gceService.Instances.
Get(clst.projID, zone, name).Do()
return ist, err
}
// Create new GCE instance.
//
// Does not check if the operation succeeds.
//
// XXX: all kinds of hardcoded junk in here
// XXX: currently only defines the bare minimum
func (clst *gceCluster) instanceNew(name string, size string, zone string,
cloudConfig string) (*compute.Operation, error) {
instance := &compute.Instance{
Name: name,
Description: clst.ns,
MachineType: fmt.Sprintf("%s/zones/%s/machineTypes/%s",
clst.baseURL,
zone,
size),
Disks: []*compute.AttachedDisk{
{
Boot: true,
AutoDelete: true,
InitializeParams: &compute.AttachedDiskInitializeParams{
SourceImage: clst.imgURL,
},
},
},
NetworkInterfaces: []*compute.NetworkInterface{
{
AccessConfigs: []*compute.AccessConfig{
{
Type: "ONE_TO_ONE_NAT",
Name: "External NAT",
},
},
Network: fmt.Sprintf("%s/global/networks/%s",
clst.baseURL,
clst.ns),
},
},
Metadata: &compute.Metadata{
Items: []*compute.MetadataItems{
{
Key: "startup-script",
Value: &cloudConfig,
},
},
},
}
op, err := gceService.Instances.
Insert(clst.projID, zone, instance).Do()
if err != nil {
return nil, err
}
return op, nil
}
// Delete a GCE instance.
//
// Does not check if the operation succeeds
func (clst *gceCluster) instanceDel(name, zone string) (*compute.Operation, error) {
op, err := gceService.Instances.Delete(clst.projID, zone, name).Do()
return op, err
}
func (clst *gceCluster) SetACLs(acls []string) error {
list, err := gceService.Firewalls.List(clst.projID).Do()
if err != nil {
return err
}
var fw *compute.Firewall
for _, val := range list.Items {
if val.Name == clst.extFW {
fw = val
break
}
}
sort.Strings(fw.SourceRanges)
sort.Strings(acls)
if fw == nil || reflect.DeepEqual(fw.SourceRanges, acls) {<|fim▁hole|> if err != nil {
return err
}
if err = clst.operationWait([]*compute.Operation{op}, global); err != nil {
return err
}
return nil
}
// Creates the network for the cluster.
func (clst *gceCluster) networkNew(name string) (*compute.Operation, error) {
network := &compute.Network{
Name: name,
IPv4Range: clst.ipv4Range,
}
op, err := gceService.Networks.Insert(clst.projID, network).Do()
return op, err
}
func (clst *gceCluster) networkExists(name string) (bool, error) {
list, err := gceService.Networks.List(clst.projID).Do()
if err != nil {
return false, err
}
for _, val := range list.Items {
if val.Name == name {
return true, nil
}
}
return false, nil
}
// This creates a firewall but does nothing else
//
// XXX: Assumes there is only one network
func (clst *gceCluster) insertFirewall(name, sourceRange string) (
*compute.Operation, error) {
firewall := &compute.Firewall{
Name: name,
Network: fmt.Sprintf("%s/global/networks/%s",
clst.baseURL,
clst.ns),
Allowed: []*compute.FirewallAllowed{
{
IPProtocol: "tcp",
Ports: []string{"0-65535"},
},
{
IPProtocol: "udp",
Ports: []string{"0-65535"},
},
{
IPProtocol: "icmp",
},
},
SourceRanges: []string{sourceRange},
}
op, err := gceService.Firewalls.Insert(clst.projID, firewall).Do()
return op, err
}
func (clst *gceCluster) firewallExists(name string) (bool, error) {
list, err := gceService.Firewalls.List(clst.projID).Do()
if err != nil {
return false, err
}
for _, val := range list.Items {
if val.Name == name {
return true, nil
}
}
return false, nil
}
// Updates the firewall using PATCH semantics.
//
// The IP addresses must be in CIDR notation.
// XXX: Assumes there is only one network
// XXX: Assumes the firewall only needs to adjust the IP addrs affected
func (clst *gceCluster) firewallPatch(name string,
ips []string) (*compute.Operation, error) {
firewall := &compute.Firewall{
Name: name,
Network: fmt.Sprintf("%s/global/networks/%s",
clst.baseURL,
clst.ns),
SourceRanges: ips,
}
op, err := gceService.Firewalls.Patch(clst.projID, name, firewall).Do()
return op, err
}
// Initialize GCE.
//
// Authenication and the client are things that are re-used across clusters.
//
// Idempotent, can call multiple times but will only initialize once.
//
// XXX: ^but should this be the case? maybe we can just have the user call it?
func gceInit() error {
if gAuthClient == nil {
log.Debug("GCE initializing...")
keyfile := filepath.Join(
os.Getenv("HOME"),
".gce",
"quilt.json")
err := os.Setenv("GOOGLE_APPLICATION_CREDENTIALS", keyfile)
if err != nil {
return err
}
srv, err := newComputeService(context.Background())
if err != nil {
return err
}
gceService = srv
} else {
log.Debug("GCE already initialized! Skipping...")
}
log.Debug("GCE initialize success")
return nil
}
func newComputeService(ctx context.Context) (*compute.Service, error) {
client, err := google.DefaultClient(ctx, compute.ComputeScope)
if err != nil {
return nil, err
}
computeService, err := compute.New(client)
if err != nil {
return nil, err
}
return computeService, nil
}
// Initializes the network for the cluster
//
// XXX: Currently assumes that each cluster is entirely behind 1 network
func (clst *gceCluster) netInit() error {
exists, err := clst.networkExists(clst.ns)
if err != nil {
return err
}
if exists {
log.Debug("Network already exists")
return nil
}
log.Debug("Creating network")
op, err := clst.networkNew(clst.ns)
if err != nil {
return err
}
err = clst.operationWait([]*compute.Operation{op}, global)
if err != nil {
return err
}
return nil
}
// Initializes the firewall for the cluster
//
// XXX: Currently assumes that each cluster is entirely behind 1 network
func (clst *gceCluster) fwInit() error {
var ops []*compute.Operation
if exists, err := clst.firewallExists(clst.intFW); err != nil {
return err
} else if exists {
log.Debug("internal firewall already exists")
} else {
log.Debug("creating internal firewall")
op, err := clst.insertFirewall(clst.intFW, clst.ipv4Range)
if err != nil {
return err
}
ops = append(ops, op)
}
if exists, err := clst.firewallExists(clst.extFW); err != nil {
return err
} else if exists {
log.Debug("external firewall already exists")
} else {
log.Debug("creating external firewall")
op, err := clst.insertFirewall(clst.extFW, "127.0.0.1/32")
if err != nil {
return err
}
ops = append(ops, op)
}
if err := clst.operationWait(ops, global); err != nil {
return err
}
return nil
}<|fim▁end|> | return nil
}
op, err := clst.firewallPatch(clst.extFW, acls) |
<|file_name|>recurrent.py<|end_file_name|><|fim▁begin|># Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Haiku recurrent core."""
import abc
import types
from typing import Any, NamedTuple, Optional, Sequence, Tuple, Union
from haiku._src import base
from haiku._src import basic
from haiku._src import conv
from haiku._src import initializers
from haiku._src import module
from haiku._src import stateful
import jax
import jax.nn
import jax.numpy as jnp
# If you are forking replace this with `import haiku as hk`.
hk = types.ModuleType("haiku")
hk.initializers = initializers
hk.Linear = basic.Linear
hk.ConvND = conv.ConvND
hk.get_parameter = base.get_parameter
hk.Module = module.Module
hk.scan = stateful.scan
inside_transform = base.inside_transform
del base, basic, conv, initializers, module
class RNNCore(hk.Module):
"""Base class for RNN cores.
This class defines the basic functionality that every core should
implement: :meth:`initial_state`, used to construct an example of the
core state; and :meth:`__call__` which applies the core parameterized
by a previous state to an input.
Cores may be used with :func:`dynamic_unroll` and :func:`static_unroll` to
iteratively construct an output sequence from the given input sequence.
"""
@abc.abstractmethod
def __call__(self, inputs, prev_state) -> Tuple[Any, Any]:
"""Run one step of the RNN.
Args:
inputs: An arbitrarily nested structure.
prev_state: Previous core state.
Returns:
A tuple with two elements ``output, next_state``. ``output`` is an
arbitrarily nested structure. ``next_state`` is the next core state, this
must be the same shape as ``prev_state``.
"""
@abc.abstractmethod
def initial_state(self, batch_size: Optional[int]):
"""Constructs an initial state for this core.
Args:
batch_size: Optional int or an integral scalar tensor representing
batch size. If None, the core may either fail or (experimentally)
return an initial state without a batch dimension.
Returns:
Arbitrarily nested initial state for this core.
"""
def static_unroll(core, input_sequence, initial_state, time_major=True):
"""Performs a static unroll of an RNN.
An *unroll* corresponds to calling the core on each element of the
input sequence in a loop, carrying the state through::
state = initial_state
for t in range(len(input_sequence)):
outputs, state = core(input_sequence[t], state)
A *static* unroll replaces a loop with its body repeated multiple
times when executed inside :func:`jax.jit`::
state = initial_state
outputs0, state = core(input_sequence[0], state)
outputs1, state = core(input_sequence[1], state)
outputs2, state = core(input_sequence[2], state)
...
See :func:`dynamic_unroll` for a loop-preserving unroll function.
Args:
core: An :class:`RNNCore` to unroll.
input_sequence: An arbitrarily nested structure of tensors of shape
``[T, ...]`` if time-major=True, or ``[B, T, ...]`` if time_major=False,
where ``T`` is the number of time steps.
initial_state: An initial state of the given core.
time_major: If True, inputs are expected time-major, otherwise they are
expected batch-major.
Returns:
A tuple with two elements:
* **output_sequence** - An arbitrarily nested structure of tensors
of shape ``[T, ...]`` if time-major, otherwise ``[B, T, ...]``.
* **final_state** - Core state at time step ``T``.
"""
output_sequence = []
time_axis = 0 if time_major else 1
num_steps = jax.tree_leaves(input_sequence)[0].shape[time_axis]
state = initial_state
for t in range(num_steps):
if time_major:
inputs = jax.tree_map(lambda x, _t=t: x[_t], input_sequence)
else:
inputs = jax.tree_map(lambda x, _t=t: x[:, _t], input_sequence)
outputs, state = core(inputs, state)
output_sequence.append(outputs)
# Stack outputs along the time axis.
output_sequence = jax.tree_multimap(
lambda *args: jnp.stack(args, axis=time_axis),
*output_sequence)
return output_sequence, state
def _swap_batch_time(inputs):
"""Swaps batch and time axes, assumed to be the first two axes."""
return jax.tree_map(lambda x: jnp.swapaxes(x, 0, 1), inputs)
def dynamic_unroll(core,
input_sequence,
initial_state,
time_major=True,
reverse=False,
return_all_states=False):
"""Performs a dynamic unroll of an RNN.
An *unroll* corresponds to calling the core on each element of the
input sequence in a loop, carrying the state through::
state = initial_state
for t in range(len(input_sequence)):
outputs, state = core(input_sequence[t], state)
A *dynamic* unroll preserves the loop structure when executed inside
:func:`jax.jit`. See :func:`static_unroll` for an unroll function which
replaces a loop with its body repeated multiple times.
Args:
core: An :class:`RNNCore` to unroll.
input_sequence: An arbitrarily nested structure of tensors of shape
``[T, ...]`` if time-major=True, or ``[B, T, ...]`` if time_major=False,
where ``T`` is the number of time steps.
initial_state: An initial state of the given core.
time_major: If True, inputs are expected time-major, otherwise they are
expected batch-major.
reverse: If True, inputs are scanned in the reversed order. Equivalent to
reversing the time dimension in both inputs and outputs. See
https://jax.readthedocs.io/en/latest/_autosummary/jax.lax.scan.html for
more details.
return_all_states: If True, all intermediate states are returned rather than
only the last one in time.
Returns:
A tuple with two elements:
* **output_sequence** - An arbitrarily nested structure of tensors
of shape ``[T, ...]`` if time-major, otherwise ``[B, T, ...]``.
* **state_sequence** - If return_all_states is True, returns the sequence
of core states. Otherwise, core state at time step ``T``.
"""
scan = hk.scan if inside_transform() else jax.lax.scan
# Swap the input and output of core.
def scan_f(prev_state, inputs):
outputs, next_state = core(inputs, prev_state)
if return_all_states:
return next_state, (outputs, next_state)
return next_state, outputs
# TODO(hamzamerzic): Remove axis swapping once scan supports time axis arg.
if not time_major:
input_sequence = _swap_batch_time(input_sequence)
scan_result = scan(
scan_f, initial_state, input_sequence, reverse=reverse)
if return_all_states:
_, (output_sequence, state_sequence) = scan_result
else:
last_state, output_sequence = scan_result
if not time_major:
output_sequence = _swap_batch_time(output_sequence)
if return_all_states:
state_sequence = _swap_batch_time(state_sequence)
if return_all_states:
return output_sequence, state_sequence
return output_sequence, last_state
def add_batch(nest, batch_size: Optional[int]):
"""Adds a batch dimension at axis 0 to the leaves of a nested structure."""
broadcast = lambda x: jnp.broadcast_to(x, (batch_size,) + x.shape)
return jax.tree_map(broadcast, nest)
class VanillaRNN(RNNCore):
r"""Basic fully-connected RNN core.
Given :math:`x_t` and the previous hidden state :math:`h_{t-1}` the
core computes
.. math::
h_t = \operatorname{ReLU}(w_i x_t + b_i + w_h h_{t-1} + b_h)
The output is equal to the new state, :math:`h_t`.
"""
def __init__(
self,
hidden_size: int,
double_bias: bool = True,
name: Optional[str] = None
):
"""Constructs a vanilla RNN core.
Args:
hidden_size: Hidden layer size.
double_bias: Whether to use a bias in the two linear layers. This changes
nothing to the learning performance of the cell. However, doubling will
create two sets of bias parameters rather than one.
name: Name of the module.
"""
super().__init__(name=name)
self.hidden_size = hidden_size
self.double_bias = double_bias
def __call__(self, inputs, prev_state):
input_to_hidden = hk.Linear(self.hidden_size)
# TODO(b/173771088): Consider changing default to double_bias=False.
hidden_to_hidden = hk.Linear(self.hidden_size, with_bias=self.double_bias)
out = jax.nn.relu(input_to_hidden(inputs) + hidden_to_hidden(prev_state))
return out, out
def initial_state(self, batch_size: Optional[int]):
state = jnp.zeros([self.hidden_size])
if batch_size is not None:
state = add_batch(state, batch_size)
return state
class LSTMState(NamedTuple):
"""An LSTM core state consists of hidden and cell vectors.
Attributes:
hidden: Hidden state.
cell: Cell state.
"""
hidden: jnp.ndarray
cell: jnp.ndarray
class LSTM(RNNCore):
r"""Long short-term memory (LSTM) RNN core.
The implementation is based on :cite:`zaremba2014recurrent`. Given
:math:`x_t` and the previous state :math:`(h_{t-1}, c_{t-1})` the core
computes
.. math::
\begin{array}{ll}
i_t = \sigma(W_{ii} x_t + W_{hi} h_{t-1} + b_i) \\
f_t = \sigma(W_{if} x_t + W_{hf} h_{t-1} + b_f) \\
g_t = \tanh(W_{ig} x_t + W_{hg} h_{t-1} + b_g) \\
o_t = \sigma(W_{io} x_t + W_{ho} h_{t-1} + b_o) \\
c_t = f_t c_{t-1} + i_t g_t \\
h_t = o_t \tanh(c_t)
\end{array}
where :math:`i_t`, :math:`f_t`, :math:`o_t` are input, forget and
output gate activations, and :math:`g_t` is a vector of cell updates.
The output is equal to the new hidden, :math:`h_t`.
Notes:
Forget gate initialization:
Following :cite:`jozefowicz2015empirical` we add 1.0 to :math:`b_f`
after initialization in order to reduce the scale of forgetting in
the beginning of the training.
"""
def __init__(self, hidden_size: int, name: Optional[str] = None):
"""Constructs an LSTM.
Args:
hidden_size: Hidden layer size.
name: Name of the module.
"""
super().__init__(name=name)
self.hidden_size = hidden_size
def __call__(
self,
inputs: jnp.ndarray,
prev_state: LSTMState,
) -> Tuple[jnp.ndarray, LSTMState]:
if len(inputs.shape) > 2 or not inputs.shape:
raise ValueError("LSTM input must be rank-1 or rank-2.")
x_and_h = jnp.concatenate([inputs, prev_state.hidden], axis=-1)
gated = hk.Linear(4 * self.hidden_size)(x_and_h)
# TODO(slebedev): Consider aligning the order of gates with Sonnet.
# i = input, g = cell_gate, f = forget_gate, o = output_gate
i, g, f, o = jnp.split(gated, indices_or_sections=4, axis=-1)
f = jax.nn.sigmoid(f + 1) # Forget bias, as in sonnet.
c = f * prev_state.cell + jax.nn.sigmoid(i) * jnp.tanh(g)
h = jax.nn.sigmoid(o) * jnp.tanh(c)
return h, LSTMState(h, c)
def initial_state(self, batch_size: Optional[int]) -> LSTMState:
state = LSTMState(hidden=jnp.zeros([self.hidden_size]),
cell=jnp.zeros([self.hidden_size]))
if batch_size is not None:
state = add_batch(state, batch_size)
return state
class ConvNDLSTM(RNNCore):
r"""``num_spatial_dims``-D convolutional LSTM.
The implementation is based on :cite:`xingjian2015convolutional`.
Given :math:`x_t` and the previous state :math:`(h_{t-1}, c_{t-1})`
the core computes
.. math::
\begin{array}{ll}
i_t = \sigma(W_{ii} * x_t + W_{hi} * h_{t-1} + b_i) \\
f_t = \sigma(W_{if} * x_t + W_{hf} * h_{t-1} + b_f) \\
g_t = \tanh(W_{ig} * x_t + W_{hg} * h_{t-1} + b_g) \\
o_t = \sigma(W_{io} * x_t + W_{ho} * h_{t-1} + b_o) \\
c_t = f_t c_{t-1} + i_t g_t \\
h_t = o_t \tanh(c_t)
\end{array}
where :math:`*` denotes the convolution operator; :math:`i_t`,
:math:`f_t`, :math:`o_t` are input, forget and output gate activations,
and :math:`g_t` is a vector of cell updates.
The output is equal to the new hidden state, :math:`h_t`.
Notes:
Forget gate initialization:
Following :cite:`jozefowicz2015empirical` we add 1.0 to :math:`b_f`
after initialization in order to reduce the scale of forgetting in
the beginning of the training.
"""
def __init__(
self,
num_spatial_dims: int,
input_shape: Sequence[int],
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
name: Optional[str] = None,
):
"""Constructs a convolutional LSTM.
Args:
num_spatial_dims: Number of spatial dimensions of the input.
input_shape: Shape of the inputs excluding batch size.
output_channels: Number of output channels.
kernel_shape: Sequence of kernel sizes (of length ``num_spatial_dims``),
or an int. ``kernel_shape`` will be expanded to define a kernel size in
all dimensions.
name: Name of the module.
"""
super().__init__(name=name)
self.num_spatial_dims = num_spatial_dims
self.input_shape = tuple(input_shape)<|fim▁hole|> def __call__(
self,
inputs,
state: LSTMState,
) -> Tuple[jnp.ndarray, LSTMState]:
input_to_hidden = hk.ConvND(
num_spatial_dims=self.num_spatial_dims,
output_channels=4 * self.output_channels,
kernel_shape=self.kernel_shape,
name="input_to_hidden")
hidden_to_hidden = hk.ConvND(
num_spatial_dims=self.num_spatial_dims,
output_channels=4 * self.output_channels,
kernel_shape=self.kernel_shape,
name="hidden_to_hidden")
gates = input_to_hidden(inputs) + hidden_to_hidden(state.hidden)
i, g, f, o = jnp.split(gates, indices_or_sections=4, axis=-1)
f = jax.nn.sigmoid(f + 1)
c = f * state.cell + jax.nn.sigmoid(i) * jnp.tanh(g)
h = jax.nn.sigmoid(o) * jnp.tanh(c)
return h, LSTMState(h, c)
def initial_state(self, batch_size: Optional[int]) -> LSTMState:
shape = self.input_shape + (self.output_channels,)
state = LSTMState(jnp.zeros(shape), jnp.zeros(shape))
if batch_size is not None:
state = add_batch(state, batch_size)
return state
class Conv1DLSTM(ConvNDLSTM): # pylint: disable=empty-docstring
__doc__ = ConvNDLSTM.__doc__.replace("``num_spatial_dims``", "1")
def __init__(
self,
input_shape: Sequence[int],
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
name: Optional[str] = None,
):
"""Constructs a 1-D convolutional LSTM.
Args:
input_shape: Shape of the inputs excluding batch size.
output_channels: Number of output channels.
kernel_shape: Sequence of kernel sizes (of length 1), or an int.
``kernel_shape`` will be expanded to define a kernel size in all
dimensions.
name: Name of the module.
"""
super().__init__(
num_spatial_dims=1,
input_shape=input_shape,
output_channels=output_channels,
kernel_shape=kernel_shape,
name=name)
class Conv2DLSTM(ConvNDLSTM): # pylint: disable=empty-docstring
__doc__ = ConvNDLSTM.__doc__.replace("``num_spatial_dims``", "2")
def __init__(
self,
input_shape: Sequence[int],
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
name: Optional[str] = None,
):
"""Constructs a 2-D convolutional LSTM.
Args:
input_shape: Shape of the inputs excluding batch size.
output_channels: Number of output channels.
kernel_shape: Sequence of kernel sizes (of length 2), or an int.
``kernel_shape`` will be expanded to define a kernel size in all
dimensions.
name: Name of the module.
"""
super().__init__(
num_spatial_dims=2,
input_shape=input_shape,
output_channels=output_channels,
kernel_shape=kernel_shape,
name=name)
class Conv3DLSTM(ConvNDLSTM): # pylint: disable=empty-docstring
__doc__ = ConvNDLSTM.__doc__.replace("``num_spatial_dims``", "3")
def __init__(
self,
input_shape: Sequence[int],
output_channels: int,
kernel_shape: Union[int, Sequence[int]],
name: Optional[str] = None,
):
"""Constructs a 3-D convolutional LSTM.
Args:
input_shape: Shape of the inputs excluding batch size.
output_channels: Number of output channels.
kernel_shape: Sequence of kernel sizes (of length 3), or an int.
``kernel_shape`` will be expanded to define a kernel size in all
dimensions.
name: Name of the module.
"""
super().__init__(
num_spatial_dims=3,
input_shape=input_shape,
output_channels=output_channels,
kernel_shape=kernel_shape,
name=name)
class GRU(RNNCore):
r"""Gated Recurrent Unit.
The implementation is based on: https://arxiv.org/pdf/1412.3555v1.pdf with
biases.
Given :math:`x_t` and the previous state :math:`h_{t-1}` the core computes
.. math::
\begin{array}{ll}
z_t &= \sigma(W_{iz} x_t + W_{hz} h_{t-1} + b_z) \\
r_t &= \sigma(W_{ir} x_t + W_{hr} h_{t-1} + b_r) \\
a_t &= \tanh(W_{ia} x_t + W_{ha} (r_t \bigodot h_{t-1}) + b_a) \\
h_t &= (1 - z_t) \bigodot h_{t-1} + z_t \bigodot a_t
\end{array}
where :math:`z_t` and :math:`r_t` are reset and update gates.
The output is equal to the new hidden state, :math:`h_t`.
"""
def __init__(
self,
hidden_size: int,
w_i_init: Optional[hk.initializers.Initializer] = None,
w_h_init: Optional[hk.initializers.Initializer] = None,
b_init: Optional[hk.initializers.Initializer] = None,
name: Optional[str] = None,
):
super().__init__(name=name)
self.hidden_size = hidden_size
self.w_i_init = w_i_init or hk.initializers.VarianceScaling()
self.w_h_init = w_h_init or hk.initializers.VarianceScaling()
self.b_init = b_init or jnp.zeros
def __call__(self, inputs, state):
if inputs.ndim not in (1, 2):
raise ValueError("GRU input must be rank-1 or rank-2.")
input_size = inputs.shape[-1]
hidden_size = self.hidden_size
w_i = hk.get_parameter("w_i", [input_size, 3 * hidden_size], inputs.dtype,
init=self.w_i_init)
w_h = hk.get_parameter("w_h", [hidden_size, 3 * hidden_size], inputs.dtype,
init=self.w_h_init)
b = hk.get_parameter("b", [3 * hidden_size], inputs.dtype, init=self.b_init)
w_h_z, w_h_a = jnp.split(w_h, indices_or_sections=[2 * hidden_size], axis=1)
b_z, b_a = jnp.split(b, indices_or_sections=[2 * hidden_size], axis=0)
gates_x = jnp.matmul(inputs, w_i)
zr_x, a_x = jnp.split(
gates_x, indices_or_sections=[2 * hidden_size], axis=-1)
zr_h = jnp.matmul(state, w_h_z)
zr = zr_x + zr_h + jnp.broadcast_to(b_z, zr_h.shape)
z, r = jnp.split(jax.nn.sigmoid(zr), indices_or_sections=2, axis=-1)
a_h = jnp.matmul(r * state, w_h_a)
a = jnp.tanh(a_x + a_h + jnp.broadcast_to(b_a, a_h.shape))
next_state = (1 - z) * state + z * a
return next_state, next_state
def initial_state(self, batch_size: Optional[int]):
state = jnp.zeros([self.hidden_size])
if batch_size is not None:
state = add_batch(state, batch_size)
return state
class IdentityCore(RNNCore):
"""A recurrent core that forwards the inputs and an empty state.
This is commonly used when switching between recurrent and feedforward
versions of a model while preserving the same interface.
"""
def __call__(self, inputs, state):
return inputs, state
def initial_state(self, batch_size: Optional[int]):
return ()
def _validate_and_conform(should_reset, state):
"""Ensures that should_reset is compatible with state."""
if should_reset.shape == state.shape[:should_reset.ndim]:
broadcast_shape = should_reset.shape + (1,)*(state.ndim - should_reset.ndim)
return jnp.reshape(should_reset, broadcast_shape)
raise ValueError(
"should_reset signal shape {} is not compatible with "
"state shape {}".format(should_reset.shape, state.shape))
class ResetCore(RNNCore):
"""A wrapper for managing state resets during unrolls.
When unrolling an :class:`RNNCore` on a batch of inputs sequences it may be
necessary to reset the core's state at different timesteps for different
elements of the batch. The :class:`ResetCore` class enables this by taking a
batch of ``should_reset`` booleans in addition to the batch of inputs, and
conditionally resetting the core's state for individual elements of the batch.
You may also reset individual entries of the state by passing a
``should_reset`` nest compatible with the state structure.
"""
def __init__(self, core: RNNCore, name: Optional[str] = None):
super().__init__(name=name)
self.core = core
def __call__(self, inputs, state):
"""Run one step of the wrapped core, handling state reset.
Args:
inputs: Tuple with two elements, ``inputs, should_reset``, where
``should_reset`` is the signal used to reset the wrapped core's state.
``should_reset`` can be either tensor or nest. If nest, ``should_reset``
must match the state structure, and its components' shapes must be
prefixes of the corresponding entries tensors' shapes in the state nest.
If tensor, supported shapes are all commom shape prefixes of the state
component tensors, e.g. ``[batch_size]``.
state: Previous wrapped core state.
Returns:
Tuple of the wrapped core's ``output, next_state``.
"""
inputs, should_reset = inputs
if jax.treedef_is_leaf(jax.tree_structure(should_reset)):
# Equivalent to not tree.is_nested, but with support for Jax extensible
# pytrees.
should_reset = jax.tree_map(lambda _: should_reset, state)
# We now need to manually pad 'on the right' to ensure broadcasting operates
# correctly.
# Automatic broadcasting would in fact implicitly pad 'on the left',
# resulting in the signal to trigger resets for parts of the state
# across batch entries. For example:
#
# import jax
# import jax.numpy as jnp
#
# shape = (2, 2, 2)
# x = jnp.zeros(shape)
# y = jnp.ones(shape)
# should_reset = jnp.array([False, True])
# v = jnp.where(should_reset, x, y)
# for batch_entry in range(shape[0]):
# print("batch_entry {}:\n".format(batch_entry), v[batch_entry])
#
# >> batch_entry 0:
# >> [[1. 0.]
# >> [1. 0.]]
# >> batch_entry 1:
# >> [[1. 0.]
# >> [1. 0.]]
#
# Note how manually padding the should_reset tensor yields the desired
# behavior.
#
# import jax
# import jax.numpy as jnp
#
# shape = (2, 2, 2)
# x = jnp.zeros(shape)
# y = jnp.ones(shape)
# should_reset = jnp.array([False, True])
# dims_to_add = x.ndim - should_reset.ndim
# should_reset = should_reset.reshape(should_reset.shape + (1,)*dims_to_add)
# v = jnp.where(should_reset, x, y)
# for batch_entry in range(shape[0]):
# print("batch_entry {}:\n".format(batch_entry), v[batch_entry])
#
# >> batch_entry 0:
# >> [[1. 1.]
# >> [1. 1.]]
# >> batch_entry 1:
# >> [[0. 0.]
# >> [0. 0.]]
should_reset = jax.tree_multimap(_validate_and_conform, should_reset, state)
if self._is_batched(state):
batch_size = jax.tree_leaves(inputs)[0].shape[0]
else:
batch_size = None
initial_state = jax.tree_multimap(
lambda s, i: i.astype(s.dtype), state, self.initial_state(batch_size))
state = jax.tree_multimap(jnp.where, should_reset, initial_state, state)
return self.core(inputs, state)
def initial_state(self, batch_size: Optional[int]):
return self.core.initial_state(batch_size)
def _is_batched(self, state):
state = jax.tree_leaves(state)
if not state: # Empty state is treated as unbatched.
return False
batched = jax.tree_leaves(self.initial_state(batch_size=1))
return all(b.shape[1:] == s.shape[1:] for b, s in zip(batched, state))
class _DeepRNN(RNNCore):
"""Underlying implementation of DeepRNN with skip connections."""
def __init__(
self,
layers: Sequence[Any],
skip_connections: bool,
name: Optional[str] = None
):
super().__init__(name=name)
self.layers = layers
self.skip_connections = skip_connections
if skip_connections:
for layer in layers:
if not isinstance(layer, RNNCore):
raise ValueError("skip_connections requires for all layers to be "
"`hk.RNNCore`s. Layers is: {}".format(layers))
def __call__(self, inputs, state):
current_inputs = inputs
next_states = []
outputs = []
state_idx = 0
concat = lambda *args: jnp.concatenate(args, axis=-1)
for idx, layer in enumerate(self.layers):
if self.skip_connections and idx > 0:
current_inputs = jax.tree_multimap(concat, inputs, current_inputs)
if isinstance(layer, RNNCore):
current_inputs, next_state = layer(current_inputs, state[state_idx])
outputs.append(current_inputs)
next_states.append(next_state)
state_idx += 1
else:
current_inputs = layer(current_inputs)
if self.skip_connections:
out = jax.tree_multimap(concat, *outputs)
else:
out = current_inputs
return out, tuple(next_states)
def initial_state(self, batch_size: Optional[int]):
return tuple(
layer.initial_state(batch_size)
for layer in self.layers
if isinstance(layer, RNNCore))
class DeepRNN(_DeepRNN):
r"""Wraps a sequence of cores and callables as a single core.
>>> deep_rnn = hk.DeepRNN([
... hk.LSTM(hidden_size=4),
... jax.nn.relu,
... hk.LSTM(hidden_size=2),
... ])
The state of a :class:`DeepRNN` is a tuple with one element per
:class:`RNNCore`. If no layers are :class:`RNNCore`\ s, the state is an empty
tuple.
"""
def __init__(self, layers: Sequence[Any], name: Optional[str] = None):
super().__init__(layers, skip_connections=False, name=name)
def deep_rnn_with_skip_connections(layers: Sequence[RNNCore],
name: Optional[str] = None) -> RNNCore:
r"""Constructs a :class:`DeepRNN` with skip connections.
Skip connections alter the dependency structure within a :class:`DeepRNN`.
Specifically, input to the i-th layer (i > 0) is given by a
concatenation of the core's inputs and the outputs of the (i-1)-th layer.
The output of the :class:`DeepRNN` is the concatenation of the outputs of all
cores.
.. code-block:: python
outputs0, ... = layers[0](inputs, ...)
outputs1, ... = layers[1](tf.concat([inputs, outputs0], axis=-1], ...)
outputs2, ... = layers[2](tf.concat([inputs, outputs1], axis=-1], ...)
...
Args:
layers: List of :class:`RNNCore`\ s.
name: Name of the module.
Returns:
A :class:`_DeepRNN` with skip connections.
Raises:
ValueError: If any of the layers is not an :class:`RNNCore`.
"""
return _DeepRNN(layers, skip_connections=True, name=name)<|fim▁end|> | self.output_channels = output_channels
self.kernel_shape = kernel_shape
|
<|file_name|>migrate_transcripts.py<|end_file_name|><|fim▁begin|>"""
Command to migrate transcripts to django storage.
"""
import logging
from django.core.management import BaseCommand, CommandError
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import CourseLocator
from cms.djangoapps.contentstore.tasks import (
DEFAULT_ALL_COURSES,
DEFAULT_FORCE_UPDATE,
DEFAULT_COMMIT,
enqueue_async_migrate_transcripts_tasks
)
from openedx.core.lib.command_utils import get_mutually_exclusive_required_option, parse_course_keys
from openedx.core.djangoapps.video_config.models import TranscriptMigrationSetting
from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Example usage:
$ ./manage.py cms migrate_transcripts --all-courses --force-update --commit
$ ./manage.py cms migrate_transcripts --course-id 'Course1' --course-id 'Course2' --commit
$ ./manage.py cms migrate_transcripts --from-settings
"""
help = 'Migrates transcripts to S3 for one or more courses.'
def add_arguments(self, parser):
"""
Add arguments to the command parser.
"""<|fim▁hole|> help=u'Migrates transcripts for the list of courses.'
)
parser.add_argument(
'--all-courses', '--all', '--all_courses',
dest='all_courses',
action='store_true',
default=DEFAULT_ALL_COURSES,
help=u'Migrates transcripts to the configured django storage for all courses.'
)
parser.add_argument(
'--from-settings', '--from_settings',
dest='from_settings',
help='Migrate Transcripts with settings set via django admin',
action='store_true',
default=False,
)
parser.add_argument(
'--force-update', '--force_update',
dest='force_update',
action='store_true',
default=DEFAULT_FORCE_UPDATE,
help=u'Force migrate transcripts for the requested courses, overwrite if already present.'
)
parser.add_argument(
'--commit',
dest='commit',
action='store_true',
default=DEFAULT_COMMIT,
help=u'Commits the discovered video transcripts to django storage. '
u'Without this flag, the command will return the transcripts discovered for migration.'
)
def _parse_course_key(self, raw_value):
""" Parses course key from string """
try:
result = CourseKey.from_string(raw_value)
except InvalidKeyError:
raise CommandError("Invalid course_key: '%s'." % raw_value)
if not isinstance(result, CourseLocator):
raise CommandError(u"Argument {0} is not a course key".format(raw_value))
return result
def _get_migration_options(self, options):
"""
Returns the command arguments configured via django admin.
"""
force_update = options['force_update']
commit = options['commit']
courses_mode = get_mutually_exclusive_required_option(options, 'course_ids', 'all_courses', 'from_settings')
if courses_mode == 'all_courses':
course_keys = [course.id for course in modulestore().get_course_summaries()]
elif courses_mode == 'course_ids':
course_keys = map(self._parse_course_key, options['course_ids'])
else:
if self._latest_settings().all_courses:
course_keys = [course.id for course in modulestore().get_course_summaries()]
else:
course_keys = parse_course_keys(self._latest_settings().course_ids.split())
force_update = self._latest_settings().force_update
commit = self._latest_settings().commit
return course_keys, force_update, commit
def _latest_settings(self):
"""
Return the latest version of the TranscriptMigrationSetting
"""
return TranscriptMigrationSetting.current()
def handle(self, *args, **options):
"""
Invokes the migrate transcripts enqueue function.
"""
course_keys, force_update, commit = self._get_migration_options(options)
command_run = self._latest_settings().increment_run() if commit else -1
enqueue_async_migrate_transcripts_tasks(
course_keys=course_keys, commit=commit, command_run=command_run, force_update=force_update
)<|fim▁end|> | parser.add_argument(
'--course-id', '--course_id',
dest='course_ids',
action='append', |
<|file_name|>DynamoDocumentStoreTemplateTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 Click Travel Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.clicktravel.infrastructure.persistence.aws.dynamodb;
import static com.clicktravel.common.random.Randoms.randomId;
import static com.clicktravel.common.random.Randoms.randomString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.powermock.api.mockito.PowerMockito.whenNew;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.document.DynamoDB;
import com.amazonaws.services.dynamodbv2.document.Item;
import com.amazonaws.services.dynamodbv2.document.Table;
import com.amazonaws.services.dynamodbv2.document.spec.DeleteItemSpec;
import com.amazonaws.services.dynamodbv2.document.spec.GetItemSpec;
import com.amazonaws.services.dynamodbv2.document.spec.PutItemSpec;
import com.amazonaws.services.dynamodbv2.model.ConditionalCheckFailedException;
import com.clicktravel.cheddar.infrastructure.persistence.database.ItemId;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.DatabaseSchemaHolder;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.ItemConfiguration;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.NonExistentItemException;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.OptimisticLockException;
import com.clicktravel.common.random.Randoms;
@RunWith(PowerMockRunner.class)
@PrepareForTest({ DynamoDocumentStoreTemplate.class })
public class DynamoDocumentStoreTemplateTest {
private DatabaseSchemaHolder mockDatabaseSchemaHolder;
private String schemaName;
private String tableName;
private AmazonDynamoDB mockAmazonDynamoDbClient;
private DynamoDB mockDynamoDBClient;
@Before
public void setup() throws Exception {
schemaName = randomString(10);
tableName = randomString(10);
mockDatabaseSchemaHolder = mock(DatabaseSchemaHolder.class);
when(mockDatabaseSchemaHolder.schemaName()).thenReturn(schemaName);
mockAmazonDynamoDbClient = mock(AmazonDynamoDB.class);
mockDynamoDBClient = mock(DynamoDB.class);
whenNew(DynamoDB.class).withParameterTypes(AmazonDynamoDB.class).withArguments(eq(mockAmazonDynamoDbClient))
.thenReturn(mockDynamoDBClient);
}
@SuppressWarnings("deprecation")
@Test
public void shouldCreate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
// When
final StubItem returnedItem = dynamoDocumentStoreTemplate.create(stubItem);
// Then
final ArgumentCaptor<PutItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(PutItemSpec.class);
verify(mockTable).putItem(getItemRequestCaptor.capture());
final PutItemSpec spec = getItemRequestCaptor.getValue();
assertEquals(itemId.value(), spec.getItem().get("id"));
assertEquals(itemId.value(), returnedItem.getId());
assertEquals(stubItem.getStringProperty(), returnedItem.getStringProperty());
assertEquals(stubItem.getStringProperty2(), returnedItem.getStringProperty2());
assertEquals(stubItem.getStringSetProperty(), returnedItem.getStringSetProperty());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotCreate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
doThrow(RuntimeException.class).when(mockTable).putItem(any(PutItemSpec.class));
RuntimeException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.create(stubItem);
} catch (final RuntimeException runtimeException) {
thrownException = runtimeException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldRead_withItemIdAndItemClass() throws Exception {
// Given
final ItemId itemId = new ItemId(randomId());
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final Item mockTableItem = mock(Item.class);
when(mockTable.getItem(any(GetItemSpec.class))).thenReturn(mockTableItem);
final StubItem stubItem = generateRandomStubItem(itemId);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
// When
final StubItem returnedItem = dynamoDocumentStoreTemplate.read(itemId, StubItem.class);
// Then
final ArgumentCaptor<GetItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(GetItemSpec.class);
verify(mockTable).getItem(getItemRequestCaptor.capture());
final GetItemSpec spec = getItemRequestCaptor.getValue();
assertEquals(1, spec.getKeyComponents().size());
assertEquals(itemId.value(), spec.getKeyComponents().iterator().next().getValue());
assertEquals(itemId.value(), returnedItem.getId());
assertEquals(stubItem.getStringProperty(), returnedItem.getStringProperty());
assertEquals(stubItem.getStringProperty2(), returnedItem.getStringProperty2());
assertEquals(stubItem.getStringSetProperty(), returnedItem.getStringSetProperty());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotRead_withNonExistentItemExceptionNoItem() throws Exception {
// Given
final ItemId itemId = new ItemId(randomId());
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);<|fim▁hole|> final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
when(mockTable.getItem(any(GetItemSpec.class))).thenReturn(null);
NonExistentItemException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.read(itemId, StubItem.class);
} catch (final NonExistentItemException nonExistentItemException) {
thrownException = nonExistentItemException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotRead_withNonExistentItemExceptionNoContent() throws Exception {
// Given
final ItemId itemId = new ItemId(randomId());
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final Item mockTableItem = mock(Item.class);
when(mockTable.getItem(any(GetItemSpec.class))).thenReturn(mockTableItem);
when(mockTableItem.toJSON()).thenReturn("");
NonExistentItemException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.read(itemId, StubItem.class);
} catch (final NonExistentItemException nonExistentItemException) {
thrownException = nonExistentItemException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldUpdate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
// When
final StubItem returnedItem = dynamoDocumentStoreTemplate.update(stubItem);
// Then
final ArgumentCaptor<PutItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(PutItemSpec.class);
verify(mockTable).putItem(getItemRequestCaptor.capture());
final PutItemSpec spec = getItemRequestCaptor.getValue();
assertEquals(itemId.value(), spec.getItem().get("id"));
assertEquals(itemId.value(), returnedItem.getId());
assertEquals(stubItem.getStringProperty(), returnedItem.getStringProperty());
assertEquals(stubItem.getStringProperty2(), returnedItem.getStringProperty2());
assertEquals(stubItem.getStringSetProperty(), returnedItem.getStringSetProperty());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotUpdate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
doThrow(ConditionalCheckFailedException.class).when(mockTable).putItem(any(PutItemSpec.class));
OptimisticLockException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.update(stubItem);
} catch (final OptimisticLockException optimisticLockException) {
thrownException = optimisticLockException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldDelete_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
// When
dynamoDocumentStoreTemplate.delete(stubItem);
// Then
final ArgumentCaptor<DeleteItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(DeleteItemSpec.class);
verify(mockTable).deleteItem(getItemRequestCaptor.capture());
}
private StubItem generateRandomStubItem(final ItemId itemId) {
final StubItem item = new StubItem();
item.setBooleanProperty(Randoms.randomBoolean());
item.setId(itemId.value());
item.setStringProperty(Randoms.randomString());
item.setStringProperty2(Randoms.randomString());
item.setVersion(Randoms.randomLong());
final Set<String> stringSet = new HashSet<String>();
for (int i = 0; i < Randoms.randomInt(20); i++) {
stringSet.add(Randoms.randomString());
}
item.setStringSetProperty(stringSet);
return item;
}
}<|fim▁end|> | final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
|
<|file_name|>sys_test_params.py<|end_file_name|><|fim▁begin|>'''-------------------------------------------------------------------------<|fim▁hole|>You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
Limitations under the License.
-------------------------------------------------------------------------'''
'''
IMPORTANT: Make sure the variables AUTH_PI and KEYSTONE_IP point to the system
you are testing!!!
'''
'''------------------------------------------------------------------------'''
# Establishing Swift connection, user ID, etc
PROXY_PROTOCOL = 'HTTP'
AUTH_PROTOCOL = 'HTTP'
DEV_AUTH_IP = '9.26.19.179'
AUTH_IP = DEV_AUTH_IP
PROXY_PORT = '80'
AUTH_PORT = '5000'
ACCOUNT = 'service'
USER_NAME = 'swift'
PASSWORD = 'passw0rd'<|fim▁end|> | Copyright IBM Corp. 2015, 2015 All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. |
<|file_name|>graphviz.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module provides linkage between rustc::middle::graph and
//! libgraphviz traits, specialized to attaching borrowck analysis
//! data to rendered labels.
/// For clarity, rename the graphviz crate locally to dot.
use graphviz as dot;
pub use middle::cfg::graphviz::{Node, Edge};
use middle::cfg::graphviz as cfg_dot;
use middle::borrowck;
use middle::borrowck::{BorrowckCtxt, LoanPath};
use middle::cfg::{CFGIndex};
use middle::dataflow::{DataFlowOperator, DataFlowContext, EntryOrExit};
use middle::dataflow;
use std::rc::Rc;
use std::str;
#[deriving(Show)]
pub enum Variant {
Loans,
Moves,
Assigns,
}
impl Variant {
pub fn short_name(&self) -> &'static str {
match *self {
Loans => "loans",
Moves => "moves",
Assigns => "assigns",
}
}
}
pub struct DataflowLabeller<'a, 'tcx: 'a> {
pub inner: cfg_dot::LabelledCFG<'a, 'tcx>,
pub variants: Vec<Variant>,
pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>,
pub analysis_data: &'a borrowck::AnalysisData<'a, 'tcx>,
}
impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String {
let id = n.val1().data.id;
debug!("dataflow_for({}, id={}) {}", e, id, self.variants);
let mut sets = "".to_string();
let mut seen_one = false;
for &variant in self.variants.iter() {
if seen_one { sets.push_str(" "); } else { seen_one = true; }
sets.push_str(variant.short_name());
sets.push_str(": ");
sets.push_str(self.dataflow_for_variant(e, n, variant).as_slice());
}
sets
}
fn dataflow_for_variant(&self, e: EntryOrExit, n: &Node, v: Variant) -> String {
let cfgidx = n.val0();
match v {
Loans => self.dataflow_loans_for(e, cfgidx),
Moves => self.dataflow_moves_for(e, cfgidx),
Assigns => self.dataflow_assigns_for(e, cfgidx),
}
}
fn build_set<O:DataFlowOperator>(&self,
e: EntryOrExit,
cfgidx: CFGIndex,
dfcx: &DataFlowContext<'a, 'tcx, O>,
to_lp: |uint| -> Rc<LoanPath>) -> String {
let mut saw_some = false;
let mut set = "{".to_string();
dfcx.each_bit_for_node(e, cfgidx, |index| {
let lp = to_lp(index);
if saw_some {
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
set.push_str(loan_str.as_slice());
saw_some = true;
true
});
set.append("}")
}
fn dataflow_loans_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.loans;
let loan_index_to_path = |loan_index| {
let all_loans = &self.analysis_data.all_loans;
all_loans.get(loan_index).loan_path()
};
self.build_set(e, cfgidx, dfcx, loan_index_to_path)
}
fn dataflow_moves_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_moves;
let move_index_to_path = |move_index| {
let move_data = &self.analysis_data.move_data.move_data;
let moves = move_data.moves.borrow();
let the_move = moves.get(move_index);
move_data.path_loan_path(the_move.path)
};
self.build_set(e, cfgidx, dfcx, move_index_to_path)<|fim▁hole|> let dfcx = &self.analysis_data.move_data.dfcx_assign;
let assign_index_to_path = |assign_index| {
let move_data = &self.analysis_data.move_data.move_data;
let assignments = move_data.var_assignments.borrow();
let assignment = assignments.get(assign_index);
move_data.path_loan_path(assignment.path)
};
self.build_set(e, cfgidx, dfcx, assign_index_to_path)
}
}
impl<'a, 'tcx> dot::Labeller<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn graph_id(&'a self) -> dot::Id<'a> { self.inner.graph_id() }
fn node_id(&'a self, n: &Node<'a>) -> dot::Id<'a> { self.inner.node_id(n) }
fn node_label(&'a self, n: &Node<'a>) -> dot::LabelText<'a> {
let prefix = self.dataflow_for(dataflow::Entry, n);
let suffix = self.dataflow_for(dataflow::Exit, n);
let inner_label = self.inner.node_label(n);
inner_label
.prefix_line(dot::LabelStr(str::Owned(prefix)))
.suffix_line(dot::LabelStr(str::Owned(suffix)))
}
fn edge_label(&'a self, e: &Edge<'a>) -> dot::LabelText<'a> { self.inner.edge_label(e) }
}
impl<'a, 'tcx> dot::GraphWalk<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn nodes(&self) -> dot::Nodes<'a, Node<'a>> { self.inner.nodes() }
fn edges(&self) -> dot::Edges<'a, Edge<'a>> { self.inner.edges() }
fn source(&self, edge: &Edge<'a>) -> Node<'a> { self.inner.source(edge) }
fn target(&self, edge: &Edge<'a>) -> Node<'a> { self.inner.target(edge) }
}<|fim▁end|> | }
fn dataflow_assigns_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String { |
<|file_name|>SFFerrors.py<|end_file_name|><|fim▁begin|><|fim▁hole|> self.msg = msg<|fim▁end|> | class Error(Exception):
def __init__(self, msg): |
<|file_name|>database.go<|end_file_name|><|fim▁begin|>package database
import (
"errors"
"time"
// DBの共通化のため
"github.com/Sirupsen/logrus"
_ "github.com/go-sql-driver/mysql"
// genmaiのサポートが危うくなっているため規模の大きいgormに移行
"fmt"
"github.com/cs3238-tsuzu/popcon-sc/lib/filesystem"
"github.com/cs3238-tsuzu/popcon-sc/lib/redis"
"github.com/jinzhu/gorm"
)
// DatabaseManager is a connector to this database
type DatabaseManager struct {
db *gorm.DB
fs *fs.MongoFSManager
redis *redis.RedisManager
logger func() *logrus.Entry
transactionStarted bool
}
func (dm *DatabaseManager) Close() {
dm.db.Close()
}
// NewDatabaseManager is a function to initialize database connections
// static function
func NewDatabaseManager(addr string, debugMode bool, fs *fs.MongoFSManager, redis *redis.RedisManager, logger func() *logrus.Entry) (*DatabaseManager, error) {
specifyError := func(cat string, err error) error {
return errors.New("In " + cat + ", " + err.Error())
}
dm := &DatabaseManager{
redis: redis,
fs: fs,
logger: logger,
}
var err error
cnt := 0
const RetryingMax = 1000
RETRY:
if cnt != 0 {
logger().Info("Waiting for MySQL Server Launching...", err.Error())
time.Sleep(3 * time.Second)
}
cnt++
// Database
dm.db, err = gorm.Open("mysql", addr)
if err != nil {
if cnt > RetryingMax {
return nil, specifyError("connection", err)
}
goto RETRY
}
dm.db.DB().SetConnMaxLifetime(3 * time.Minute)
dm.db.DB().SetMaxIdleConns(150)
dm.db.DB().SetMaxOpenConns(150)<|fim▁hole|> }
err = dm.db.DB().Ping()
if err != nil {
if cnt > RetryingMax {
return nil, specifyError("connection", err)
}
dm.db.Close()
goto RETRY
}
// user_and_group.go
// Create Users Table
err = dm.CreateUserTable()
if err != nil {
return nil, specifyError("user", err)
}
// session.go
// Create Sessions Table
err = dm.CreateSessionTable()
if err != nil {
return nil, specifyError("session", err)
}
// group.go
err = dm.CreateGroupTable()
if err != nil {
return nil, specifyError("group", err)
}
// news.go
err = dm.CreateNewsTable()
if err != nil {
return nil, specifyError("news", err)
}
err = dm.CreateContestTable()
if err != nil {
return nil, specifyError("contest", err)
}
err = dm.CreateContestProblemTable()
if err != nil {
return nil, specifyError("contest_problem", err)
}
err = dm.CreateSubmissionTable()
if err != nil {
return nil, specifyError("submission", err)
}
err = dm.CreateContestParticipationTable()
if err != nil {
return nil, specifyError("contest_participation", err)
}
err = dm.CreateLanguageTable()
if err != nil {
return nil, specifyError("language", err)
}
return dm, err
}
func (dm *DatabaseManager) BeginDM(f func(dm *DatabaseManager) error) (ret error) {
if dm.transactionStarted {
return ErrAlreadyTransactionBegun
}
db := dm.db.Begin()
if db.Error != nil {
return db.Error
}
clone := dm.Clone(db)
clone.transactionStarted = true
defer func() {
if err := recover(); err != nil {
db.Rollback()
e, ok := err.(error)
if ok {
ret = e
} else {
ret = errors.New(fmt.Sprint(err))
}
}
}()
err := f(clone)
if err != nil {
db.Rollback()
return err
}
err = db.Commit().Error
return err
}
func (dm *DatabaseManager) BeginDMIfNotStarted(f func(dm *DatabaseManager) error) error {
if err := dm.BeginDM(f); err != nil {
if err == ErrAlreadyTransactionBegun {
return f(dm)
}
return err
}
return nil
}
func (dm *DatabaseManager) Begin(f func(db *gorm.DB) error) error {
return dm.BeginDM(func(dm *DatabaseManager) error {
return f(dm.db)
})
}
func (dm *DatabaseManager) BeginIfNotStarted(f func(dm *gorm.DB) error) error {
if err := dm.Begin(f); err != nil {
if err == ErrAlreadyTransactionBegun {
return f(dm.db)
}
return err
}
return nil
}
func (dm *DatabaseManager) Logger() *logrus.Entry {
return dm.logger()
}
func (dm *DatabaseManager) Clone(db *gorm.DB) *DatabaseManager {
return &DatabaseManager{
db: db,
fs: dm.fs,
redis: dm.redis,
logger: dm.logger,
}
}
func (dm *DatabaseManager) DB() *gorm.DB {
return dm.db
}<|fim▁end|> | if debugMode {
dm.db.LogMode(true) |
<|file_name|>protectionIntentResource.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* Base class for backup ProtectionIntent.
*
* @extends models['Resource']
*/
class ProtectionIntentResource extends models['Resource'] {
/**
* Create a ProtectionIntentResource.
* @member {object} [properties] ProtectionIntentResource properties
* @member {string} [properties.backupManagementType] Type of backup
* managemenent for the backed up item. Possible values include: 'Invalid',
* 'AzureIaasVM', 'MAB', 'DPM', 'AzureBackupServer', 'AzureSql',
* 'AzureStorage', 'AzureWorkload', 'DefaultBackup'
* @member {string} [properties.sourceResourceId] ARM ID of the resource to
* be backed up.
* @member {string} [properties.itemId] ID of the item which is getting
* protected, In case of Azure Vm , it is ProtectedItemId
* @member {string} [properties.policyId] ID of the backup policy with which
* this item is backed up.
* @member {string} [properties.protectionState] Backup state of this backup
* item. Possible values include: 'Invalid', 'NotProtected', 'Protecting',
* 'Protected', 'ProtectionFailed'
* @member {string} [properties.protectionIntentItemType] Polymorphic
* Discriminator
*/
constructor() {
super();
}
/**
* Defines the metadata of ProtectionIntentResource
*
* @returns {object} metadata of ProtectionIntentResource
*
*/
mapper() {
return {
required: false,
serializedName: 'ProtectionIntentResource',
type: {
name: 'Composite',
className: 'ProtectionIntentResource',
modelProperties: {
id: {
required: false,
readOnly: true,
serializedName: 'id',
type: {
name: 'String'
}
},
name: {
required: false,
readOnly: true,
serializedName: 'name',
type: {
name: 'String'
}
},
type: {
required: false,
readOnly: true,
serializedName: 'type',
type: {
name: 'String'
}
},
location: {
required: false,
serializedName: 'location',
type: {
name: 'String'
}
},
tags: {
required: false,
serializedName: 'tags',
type: {
name: 'Dictionary',
value: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
},
eTag: {
required: false,
serializedName: 'eTag',
type: {
name: 'String'
}<|fim▁hole|> required: false,
serializedName: 'properties',
type: {
name: 'Composite',
polymorphicDiscriminator: {
serializedName: 'protectionIntentItemType',
clientName: 'protectionIntentItemType'
},
uberParent: 'ProtectionIntent',
className: 'ProtectionIntent'
}
}
}
}
};
}
}
module.exports = ProtectionIntentResource;<|fim▁end|> | },
properties: { |
<|file_name|>relations.py<|end_file_name|><|fim▁begin|>from flask import Blueprint, render_template, request
from werkzeug.exceptions import NotFound
from grano.lib.pager import Pager
from grano.model import Relation<|fim▁hole|>
@relations.route('/relations/<id>')
def view(id):
relation = Relation.by_id(id)
if relation is None:
raise NotFound()
return render_template('relation.html', relation=relation)<|fim▁end|> |
relations = Blueprint('relations', __name__, static_folder='../static', template_folder='../templates')
|
<|file_name|>push_link.js<|end_file_name|><|fim▁begin|>import { get_definition } from './../base';
export const push_link = (
oid, linkurl, linkname, onmenu='true', instance_name,
when, additional_args, description
) => get_definition({
oid,
linkurl,
linkname,<|fim▁hole|> onmenu,
instance_name
},
{
label: 'VersionOne - Push Link',
method: 'push_link',
module: 'main',
name: 'v1plugin'
},
when, additional_args, description
);<|fim▁end|> | |
<|file_name|>customformlayout.tsx<|end_file_name|><|fim▁begin|>import React, { useState } from 'react';
import { AutoFields, ColorPickerField } from '@react-page/editor';
// The editor core
import type { Value, CellPlugin } from '@react-page/editor';
import Editor, { createValue } from '@react-page/editor';
// import the main css, uncomment this: (this is commented in the example because of https://github.com/vercel/next.js/issues/19717)
// import '@react-page/editor/lib/index.css';
// The rich text area plugin
import slate from '@react-page/plugins-slate';
import PageLayout from '../../components/PageLayout';
const customContentPluginWithSpecialForm: CellPlugin<{
firstName: string;
lastName: string;
street: string;
zip: string;
city: string;
country: string;
age: number;
}> = {
Renderer: ({ data }) => (
<div>
<h3>Name</h3>
<p>Firstname: {data.firstName}</p>
<p>Lastname: {data.lastName}</p>
<p>Age: {data.age}</p>
<h3>Adress</h3>
<p>{data.street}</p>
<p>{data.lastName}</p>
<p>Age: {data.age}</p>
</div>
),
id: 'custom-content-plugin-with-custom-layout',
title: 'Custom content plugin',
description: 'Some custom content plugin with multiple controls',
version: 1,
controls: {
type: 'autoform',
schema: {
properties: {
firstName: { type: 'string' },
lastName: { type: 'string' },
street: { type: 'string' },
zip: { type: 'string' },
city: { type: 'string' },
country: { type: 'string' },
age: {
title: 'Age in years',
type: 'integer',
minimum: 0,
},
},
required: [],<|fim▁hole|> <p>Personal information:</p>
<AutoFields fields={['firstName', 'lastName', 'age']} />
</div>
<div style={{ flex: 1 }}>
<p>Adress:</p>
<AutoFields omitFields={['firstName', 'lastName', 'age']} />
</div>
</div>
),
},
};
const cellPlugins = [slate(), customContentPluginWithSpecialForm];
const INITIAL_VALUE = createValue(
{
rows: [
[
{
plugin: customContentPluginWithSpecialForm,
},
],
],
},
{
cellPlugins,
lang: 'default',
}
);
export default function CustomFormLayout() {
const [value, setValue] = useState<Value>(INITIAL_VALUE);
return (
<PageLayout>
<Editor cellPlugins={cellPlugins} value={value} onChange={setValue} />
</PageLayout>
);
}<|fim▁end|> | },
Content: () => (
<div style={{ display: 'flex' }}>
<div style={{ flex: 1, marginRight: 20 }}> |
<|file_name|>fcm.py<|end_file_name|><|fim▁begin|>import logging
import sqlite3
from pyfcm import FCMNotification
def insert_token(token):
try:
con = sqlite3.connect('fcm.db')
cur = con.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS tokens(token TEXT)')
cur.execute('INSERT INTO tokens VALUES (?)', (token, ))<|fim▁hole|> if cur:
cur.close()
if con:
con.close()
def notify_all(message_title=None, message_body=None):
con = sqlite3.connect('fcm.db')
con.row_factory = lambda cursor, row: row[0]
cur = con.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS tokens(token TEXT)')
cur.execute('SELECT * FROM tokens')
registration_ids = [row for row in cur.fetchall()]
if len(registration_ids) > 0:
noti = FCMNotification('API-KEY')
result = noti.notify_multiple_devices(registration_ids=registration_ids,
message_title=message_title,
message_body=message_body)
return result<|fim▁end|> |
con.commit()
finally: |
<|file_name|>CollectorStream.js<|end_file_name|><|fim▁begin|>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.<|fim▁hole|>
/*
* Copyright (c) 2015, Joyent, Inc.
*/
/*
* CollectorStream: transform that collects all input and makes it available as
* a single string
*/
var readable = require('readable-stream');
var util = require('util');
module.exports = CollectorStream;
function CollectorStream(options)
{
readable.Transform.call(this, options);
this.data = '';
}
util.inherits(CollectorStream, readable.Transform);
CollectorStream.prototype._transform = function (chunk, encoding, done)
{
this.data += chunk;
done();
};
CollectorStream.prototype._flush = function (callback)
{
callback();
};<|fim▁end|> | */ |
<|file_name|>Deploy.py<|end_file_name|><|fim▁begin|>import pathlib
from typing import Optional
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.utils import process_bool_arg, process_list_arg
from cumulusci.salesforce_api.metadata import ApiDeploy
from cumulusci.salesforce_api.package_zip import MetadataPackageZipBuilder
from cumulusci.tasks.salesforce.BaseSalesforceMetadataApiTask import (
BaseSalesforceMetadataApiTask,
)
class Deploy(BaseSalesforceMetadataApiTask):
api_class = ApiDeploy
task_options = {
"path": {
"description": "The path to the metadata source to be deployed",
"required": True,
},
"unmanaged": {
"description": "If True, changes namespace_inject to replace tokens with a blank string"
},
"namespace_inject": {
"description": "If set, the namespace tokens in files and filenames are replaced with the namespace's prefix"
},
"namespace_strip": {
"description": "If set, all namespace prefixes for the namespace specified are stripped from files and filenames"
},
"check_only": {
"description": "If True, performs a test deployment (validation) of components without saving the components in the target org"
},
"test_level": {
"description": "Specifies which tests are run as part of a deployment. Valid values: NoTestRun, RunLocalTests, RunAllTestsInOrg, RunSpecifiedTests."
},
"specified_tests": {
"description": "Comma-separated list of test classes to run upon deployment. Applies only with test_level set to RunSpecifiedTests."
},
"static_resource_path": {
"description": "The path where decompressed static resources are stored. Any subdirectories found will be zipped and added to the staticresources directory of the build."
},
"namespaced_org": {
"description": "If True, the tokens %%%NAMESPACED_ORG%%% and ___NAMESPACED_ORG___ will get replaced with the namespace. The default is false causing those tokens to get stripped and replaced with an empty string. Set this if deploying to a namespaced scratch org or packaging org."
},
"clean_meta_xml": {
"description": "Defaults to True which strips the <packageVersions/> element from all meta.xml files. The packageVersion element gets added automatically by the target org and is set to whatever version is installed in the org. To disable this, set this option to False"
},
}
namespaces = {"sf": "http://soap.sforce.com/2006/04/metadata"}
def _init_options(self, kwargs):
super(Deploy, self)._init_options(kwargs)
self.check_only = process_bool_arg(self.options.get("check_only", False))
self.test_level = self.options.get("test_level")
if self.test_level and self.test_level not in [
"NoTestRun",
"RunLocalTests",
"RunAllTestsInOrg",
"RunSpecifiedTests",
]:
raise TaskOptionsError(
f"Specified test run level {self.test_level} is not valid."
)
self.specified_tests = process_list_arg(self.options.get("specified_tests", []))
if bool(self.specified_tests) != (self.test_level == "RunSpecifiedTests"):
raise TaskOptionsError(
"The specified_tests option and test_level RunSpecifiedTests must be used together."
)
self.options["namespace_inject"] = (
self.options.get("namespace_inject")
or self.project_config.project__package__namespace
)
def _get_api(self, path=None):
if not path:
path = self.options.get("path")
package_zip = self._get_package_zip(path)
if package_zip is not None:
self.logger.info("Payload size: {} bytes".format(len(package_zip)))
else:
self.logger.warning("Deployment package is empty; skipping deployment.")
return
return self.api_class(
self,
package_zip,
purge_on_delete=False,
check_only=self.check_only,
test_level=self.test_level,
run_tests=self.specified_tests,
)
def _has_namespaced_package(self, ns: Optional[str]) -> bool:
if "unmanaged" in self.options:
return not process_bool_arg(self.options.get("unmanaged", True))
return bool(ns) and ns in self.org_config.installed_packages
def _is_namespaced_org(self, ns: Optional[str]) -> bool:
if "namespaced_org" in self.options:
return process_bool_arg(self.options.get("namespaced_org", False))
return bool(ns) and ns == self.org_config.namespace
def _get_package_zip(self, path):
assert path, f"Path should be specified for {self.__class__.name}"
if not pathlib.Path(path).exists():
self.logger.warning(f"{path} not found.")
return
namespace = self.options["namespace_inject"]
options = {
**self.options,
"clean_meta_xml": process_bool_arg(
self.options.get("clean_meta_xml", True)
),
"namespace_inject": namespace,
"unmanaged": not self._has_namespaced_package(namespace),
"namespaced_org": self._is_namespaced_org(namespace),
}
package_zip = MetadataPackageZipBuilder(
path=path, options=options, logger=self.logger
)
if not package_zip.zf.namelist():<|fim▁hole|> return
return package_zip.as_base64()
def freeze(self, step):
steps = super(Deploy, self).freeze(step)
for step in steps:
if step["kind"] == "other":
step["kind"] = "metadata"
return steps<|fim▁end|> | |
<|file_name|>gpio.py<|end_file_name|><|fim▁begin|>def gpio_init(pin, output):<|fim▁hole|> f.write(b"out" if output else b"in")
except Exception as e:
print(f"Failed to set gpio {pin} direction: {e}")
def gpio_set(pin, high):
try:
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
f.write(b"1" if high else b"0")
except Exception as e:
print(f"Failed to set gpio {pin} value: {e}")<|fim▁end|> | try:
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f: |
<|file_name|>comp-3104.component.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-comp-3104',
templateUrl: './comp-3104.component.html',
styleUrls: ['./comp-3104.component.css']
})
export class Comp3104Component implements OnInit {
constructor() { }
<|fim▁hole|>
}<|fim▁end|> | ngOnInit() {
} |
<|file_name|>dead-code-one-arm-if.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>pub fn main() { if 1 == 1 { return; } println!("Paul is dead"); }<|fim▁end|> | // except according to those terms.
// ignore-test #12920
|
<|file_name|>simdty.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 chacha20-poly1305-aead Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
#![allow(dead_code)]
#![allow(non_camel_case_types)]
use as_bytes::Safe;
#[cfg(feature = "simd")]
macro_rules! decl_simd {
($($decl:item)*) => {
$(
#[derive(Clone, Copy, Debug, Default)]
#[repr(simd)]
$decl
)*
}
}
#[cfg(not(feature = "simd"))]
macro_rules! decl_simd {
($($decl:item)*) => {
$(
#[derive(Clone, Copy, Debug, Default)]
#[repr(C)]
$decl
)*
}
}
decl_simd! {
pub struct Simd4<T>(pub T, pub T, pub T, pub T);
pub struct Simd8<T>(pub T, pub T, pub T, pub T,
pub T, pub T, pub T, pub T);
pub struct Simd16<T>(pub T, pub T, pub T, pub T,
pub T, pub T, pub T, pub T,
pub T, pub T, pub T, pub T,
pub T, pub T, pub T, pub T);
}
pub type u32x4 = Simd4<u32>;
pub type u16x8 = Simd8<u16>;
pub type u8x16 = Simd16<u8>;<|fim▁hole|>#[cfg_attr(feature = "clippy", allow(inline_always))]
impl<T> Simd4<T> {
#[inline(always)]
pub fn new(e0: T, e1: T, e2: T, e3: T) -> Simd4<T> {
Simd4(e0, e1, e2, e3)
}
}
unsafe impl<T: Safe> Safe for Simd4<T> {}
unsafe impl<T: Safe> Safe for Simd8<T> {}
unsafe impl<T: Safe> Safe for Simd16<T> {}<|fim▁end|> | |
<|file_name|>all_79.js<|end_file_name|><|fim▁begin|>var searchData=
[<|fim▁hole|><|fim▁end|> | ['y',['y',['../group___accelerometer_service.html#a58008ae111afcb60b47419541c48aa91',1,'AccelData::y()'],['../group___graphics_types.html#a94afc39fa39df567b9e78702d1f07b3e',1,'GPoint::y()']]],
['year_5funit',['YEAR_UNIT',['../group___wall_time.html#gga0423d00e0eb199de523a92031b5a1107a652f00ea2c629930a32a684f9d8c876d',1,'pebble.h']]]
]; |
<|file_name|>network.go<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2015, Brian Hummer (brian@redq.me)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE<|fim▁hole|>
package neat
import "math"
// Represents a neural network
type Network interface {
// Activates the neural network using the inputs. Returns the ouput values.
Activate(inputs []float64) (outputs []float64, err error)
}
type NeuronType byte
const (
Bias NeuronType = iota + 1 // 1
Input // 2
Hidden // 3
Output // 4
)
func (n NeuronType) String() string {
switch n {
case Bias:
return "Bias"
case Input:
return "Input"
case Hidden:
return "Hidden"
case Output:
return "Output"
default:
return "Unknown NeuronType"
}
}
type ActivationType byte
const (
Direct ActivationType = iota + 1 // 1
SteependSigmoid // 2
Sigmoid // 3
Tanh // 4
InverseAbs // 5
)
var (
Activations []ActivationType = []ActivationType{SteependSigmoid, Sigmoid, Tanh, InverseAbs}
)
func (a ActivationType) String() string {
switch a {
case Direct:
return "Direct"
case SteependSigmoid:
return "Steepend Sigmoid"
case Sigmoid:
return "Sigmoid"
case Tanh:
return "Tanh"
case InverseAbs:
return "Inverse ABS"
default:
return "Unknown ActivationType"
}
}
func (a ActivationType) Range() (float64, float64) {
switch a {
case Direct:
return math.Inf(-1), math.Inf(1)
case SteependSigmoid:
return 0, 1.0
case Sigmoid:
return 0, 1.0
case Tanh:
return -1.0, 1.0
case InverseAbs:
return -1.0, 1.0
default:
return math.NaN(), math.NaN()
}
}
func DirectActivation(x float64) float64 { return x }
func SigmoidActivation(x float64) float64 { return 1.0 / (1.0 + exp1(-x)) }
func SteependSigmoidActivation(x float64) float64 { return 1.0 / (1.0 + exp1(-4.9*x)) }
func TanhActivation(x float64) float64 { return math.Tanh(0.9 * x) }
func InverseAbsActivation(x float64) float64 { return x / (1.0 + math.Abs(x)) }
// Speed up over math.Exp by using less precision
// https://codingforspeed.com/using-faster-exponential-approximation/
func exp1(x float64) float64 {
x = 1.0 + x/256.0
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
return x
}
func exp2(x float64) float64 {
x = 1.0 + x/1024
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
x *= x
return x
}<|fim▁end|> | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ |
<|file_name|>resourceConfigurationImpl.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import Event, { Emitter } from 'vs/base/common/event';
import { Disposable } from 'vs/base/common/lifecycle';
import URI from 'vs/base/common/uri';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { ITextResourceConfigurationService } from 'vs/editor/common/services/resourceConfiguration';
import { IPosition, Position } from 'vs/editor/common/core/position';
import { IModeService } from 'vs/editor/common/services/modeService';
import { IModelService } from 'vs/editor/common/services/modelService';
export class TextResourceConfigurationService extends Disposable implements ITextResourceConfigurationService {
public _serviceBrand: any;
private readonly _onDidUpdateConfiguration: Emitter<void> = this._register(new Emitter<void>());
public readonly onDidUpdateConfiguration: Event<void> = this._onDidUpdateConfiguration.event;<|fim▁hole|> @IModeService private modeService: IModeService,
) {
super();
this._register(this.configurationService.onDidUpdateConfiguration(() => this._onDidUpdateConfiguration.fire()));
}
getConfiguration<T>(resource: URI, section?: string): T
getConfiguration<T>(resource: URI, at?: IPosition, section?: string): T
getConfiguration<T>(resource: URI, arg2?: any, arg3?: any): T {
const position: IPosition = Position.isIPosition(arg2) ? arg2 : null;
const section: string = position ? (typeof arg3 === 'string' ? arg3 : void 0) : (typeof arg2 === 'string' ? arg2 : void 0);
const language = resource ? this.getLanguage(resource, position) : void 0;
return this.configurationService.getConfiguration<T>(section, { resource, overrideIdentifier: language });
}
private getLanguage(resource: URI, position: IPosition): string {
const model = this.modelService.getModel(resource);
if (model) {
return position ? this.modeService.getLanguageIdentifier(model.getLanguageIdAtPosition(position.lineNumber, position.column)).language : model.getLanguageIdentifier().language;
}
return this.modeService.getModeIdByFilenameOrFirstLine(resource.fsPath);
}
}<|fim▁end|> |
constructor(
@IConfigurationService private configurationService: IConfigurationService,
@IModelService private modelService: IModelService, |
<|file_name|>application.reducer.spec.ts<|end_file_name|><|fim▁begin|>import {applicationReducer} from "./application.reducer";
import {ApplicationContainerState} from "../../state/ContainersState";
import {EnableBusyFlag, DisableBusyFlag} from "../../actions/containers/application";
import {Dispatcher} from "@ngrx/store";
let deepfreeze = require("deep-freeze");
describe("reducer: containers > applicationReducer", () => {
describe("case CONTAINER_APPLICATION_ENABLE_BUSY_FLAG", () => {
it("should return a new applicationstate with the isBusyflag to true", () => {
let initialState: ApplicationContainerState = {
isBusy: false
};
deepfreeze(initialState);
let changedState: ApplicationContainerState = applicationReducer(initialState, new EnableBusyFlag());
expect(initialState).not.toBe(changedState);
expect(changedState.isBusy).toBe(true);
});
});
describe("case CONTAINER_APPLICATION_DISABLE_BUSY_FLAG", () => {<|fim▁hole|> let initialState: ApplicationContainerState = {
isBusy: false
};
deepfreeze(initialState);
let changedState: ApplicationContainerState = applicationReducer(initialState, new DisableBusyFlag());
expect(initialState).not.toBe(changedState);
expect(changedState.isBusy).toBe(false);
});
});
describe("case default", () => {
it("should return the exact same reference as before", () => {
let initialState: ApplicationContainerState = {
isBusy: false
};
deepfreeze(initialState);
let changedState: ApplicationContainerState = applicationReducer(initialState, {type: null} as any);
expect(changedState).toBe(initialState);
});
});
describe("case @ngrx/store/init", () => {
it("should return the default value for the state param", () => {
let changedState: ApplicationContainerState = applicationReducer(undefined, {type: Dispatcher.INIT} as any);
expect(changedState.isBusy).toBeFalsy();
});
});
});<|fim▁end|> | it("should return a new applicationstate with the isBusyflag to true", () => { |
<|file_name|>test_yaml.py<|end_file_name|><|fim▁begin|>#!/bin/env python
#-*- coding=utf-8 -*-
import yaml
data = """
f5:
# 蓝汛f5信息
- ip: 192.168.0.1
dc: lx
credentials:
username: huzichun
password: huzichun!@#
role: master
# 世纪互联f5信息
- ip: 192.168.0.2
dc: sjhl
credentials:
username: huzichun
password: huzichun!@#
role: slave
# 移动f5信息
- ip: 192.168.0.3
dc: yd
credentials:
username: huzichun
password: huzichun!@#
role: master
"""
print yaml.dump(yaml.load(data))
f = file('f5.yaml', 'w')
yaml.dump(yaml.load(data), f)
f.close()
f = file('f5.yaml', 'r')
infos = yaml.load(f)
#print infos
for info in infos["f5"]:
if info["ip"] == "192.168.0.2":
print info["credentials"]["username"], info["credentials"]["password"]<|fim▁hole|><|fim▁end|> |
f.close() |
<|file_name|>alertshandler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# Copyright (c) 2022 Roberto Riggio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Exposes a RESTful interface ."""
import uuid
import empower_core.apimanager.apimanager as apimanager
# pylint: disable=W0223
class AlertsHandler(apimanager.APIHandler):
"""Alerts handler"""
URLS = [r"/api/v1/alerts/?",
r"/api/v1/alerts/([a-zA-Z0-9-]*)/?"]
@apimanager.validate(min_args=0, max_args=1)
def get(self, *args, **kwargs):
"""Lists all the alerts.
Args:
[0], the alert id (optional)
Example URLs:
GET /api/v1/alerts
GET /api/v1/alerts/52313ecb-9d00-4b7d-b873-b55d3d9ada26
"""
return self.service.alerts \
if not args else self.service.alerts[uuid.UUID(args[0])]
@apimanager.validate(returncode=201, min_args=0, max_args=1)
def post(self, *args, **kwargs):
"""Create a new alert.
Args:
[0], the alert id (optional)
Request:
version: protocol version (1.0)
alert: the alert
"""
<|fim▁hole|> else:
alert = self.service.create(uuid=alert_id)
self.set_header("Location", "/api/v1/alerts/%s" % alert.uuid)
@apimanager.validate(returncode=204, min_args=0, max_args=1)
def delete(self, *args, **kwargs):
"""Delete one or all alerts.
Args:
[0], the alert id (optional)
Example URLs:
DELETE /api/v1/alerts
DELETE /api/v1/alerts/52313ecb-9d00-4b7d-b873-b55d3d9ada26
"""
if args:
self.service.remove(uuid.UUID(args[0]))
else:
self.service.remove_all()<|fim▁end|> | alert_id = uuid.UUID(args[0]) if args else uuid.uuid4()
if 'alert' in kwargs:
alert = self.service.create(uuid=alert_id, alert=kwargs['alert']) |
<|file_name|>BaseContainer.ts<|end_file_name|><|fim▁begin|>import { Component } from 'react'
<|fim▁hole|>
export default class extends Component<any,any> {
}<|fim▁end|> | |
<|file_name|>WeatherDomainTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2017, Miguel Gamboa
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import org.junit.Test;
import util.Countify;
import util.FileRequest;
import util.ICounter;
import weather.WeatherService;
import weather.data.WeatherWebApi;
import weather.model.Location;
import weather.model.WeatherInfo;
import java.time.LocalDate;
import static java.lang.System.out;
import static java.time.LocalDate.of;
import static org.junit.Assert.assertEquals;
import static util.queries.LazyQueries.count;
import static util.queries.LazyQueries.distinct;
import static util.queries.LazyQueries.filter;
import static util.queries.LazyQueries.map;<|fim▁hole|>import static util.queries.LazyQueries.skip;
/**
* @author Miguel Gamboa
* created on 29-03-2017
*/
public class WeatherDomainTest {
@Test
public void testWeatherService(){
/**
* Arrange WeatherService --> WeatherWebApi --> Countify --> FileRequest
*/
ICounter<String, Iterable<String>> req = Countify.of(new FileRequest()::getContent);
WeatherService api = new WeatherService(new WeatherWebApi(req::apply));
/**
* Act and Assert
* Counts 0 request while iterator() is not consumed
*/
Iterable<Location> locals = api.search("Porto");
assertEquals(0, req.getCount());
locals = filter(locals, l -> l.getLatitude() > 0 );
assertEquals(0, req.getCount());
/**
* Counts 1 request when iterate to get the first Location
*/
Location loc = locals.iterator().next();
assertEquals(1, req.getCount());
Iterable<WeatherInfo> infos = api.pastWeather(loc.getLatitude(), loc.getLongitude(), of(2017,02,01), of(2017,02,28));
assertEquals(1, req.getCount());
infos = filter(infos, info -> info.getDescription().toLowerCase().contains("sun"));
assertEquals(1, req.getCount());
Iterable<Integer> temps = map(infos, WeatherInfo::getTempC);
assertEquals(1, req.getCount());
temps = distinct(temps);
assertEquals(1, req.getCount());
/**
* When we iterate over the pastWeather then we make one more request
*/
assertEquals(5, count(temps)); // iterates all items
assertEquals(2, req.getCount());
assertEquals((long) 21, (long) skip(temps, 2).iterator().next()); // another iterator
assertEquals(3, req.getCount());
temps.forEach(System.out::println); // iterates all items
assertEquals(4, req.getCount());
}
}<|fim▁end|> | |
<|file_name|>runReadyImage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# This file should be compatible with both Python 2 and 3.
# If it is not, please file a bug report.
"""
Contains code that prepairs a subuser's image to be run.
"""
#external imports
import os
#internal imports
from subuserlib.classes.userOwnedObject import UserOwnedObject
class RunReadyImage(UserOwnedObject):
def __init__(self,user,subuser):
self.__subuser = subuser
self.__id = None
UserOwnedObject.__init__(self,user)
def setup(self):
if not "run-ready-image-id" in self.getSubuser().getRuntimeCache():
self.__id = self.build()
self.getSubuser().getRuntimeCache()["run-ready-image-id"] = self.__id
self.getSubuser().getRuntimeCache().save()
def getSubuser(self):
return self.__subuser
def getId(self):
if not self.__id:
self.__id = self.getSubuser().getRuntimeCache()["run-ready-image-id"]<|fim▁hole|> def generateImagePreparationDockerfile(self):
"""
There is still some preparation that needs to be done before an image is ready to be run. But this preparation requires run time information, so we cannot preform that preparation at build time.
"""
dockerfileContents = "FROM "+self.getSubuser().getImageId()+"\n"
dockerfileContents += "RUN useradd --uid="+str(self.getUser().getEndUser().uid)+" "+self.getUser().getEndUser().name+" ;export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo uid exists ; elif [ $exitstatus -eq 9 ]; then echo username exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN test -d "+self.getUser().getEndUser().homeDir+" || mkdir "+self.getUser().getEndUser().homeDir+" && chown "+self.getUser().getEndUser().name+" "+self.getUser().getEndUser().homeDir+"\n"
if self.getSubuser().getPermissions()["serial-devices"]:
dockerfileContents += "RUN groupadd dialout; export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo gid exists ; elif [ $exitstatus -eq 9 ]; then echo groupname exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN groupadd uucp; export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo gid exists ; elif [ $exitstatus -eq 9 ]; then echo groupname exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN usermod -a -G dialout "+self.getUser().getEndUser().name+"\n"
dockerfileContents += "RUN usermod -a -G uucp "+self.getUser().getEndUser().name+"\n"
if self.getSubuser().getPermissions()["sudo"]:
dockerfileContents += "RUN (umask 337; echo \""+self.getUser().getEndUser().name+" ALL=(ALL) NOPASSWD: ALL\" > /etc/sudoers.d/allowsudo )\n"
return dockerfileContents
def build(self):
"""
Returns the Id of the Docker image to be run.
"""
return self.getUser().getDockerDaemon().build(None,quietClient=True,useCache=True,forceRm=True,rm=True,dockerfile=self.generateImagePreparationDockerfile())<|fim▁end|> | return self.__id
|
<|file_name|>command.rs<|end_file_name|><|fim▁begin|>use std::ffi::OsStr;
use std::fs::File;
use std::io::{self, Write, BufRead, BufReader, Seek, SeekFrom};
use std::process::{self, Command, Stdio};
use std::time::Instant;
use regex::Regex;
use tempfile::tempfile;
use Cfg;
use errors::*;
use notifications::*;
use rustup_utils;
use telemetry::{Telemetry, TelemetryEvent};
pub fn run_command_for_dir<S: AsRef<OsStr>>(cmd: Command,
arg0: &str,
args: &[S],
cfg: &Cfg) -> Result<()> {
if (arg0 == "rustc" || arg0 == "rustc.exe") && try!(cfg.telemetry_enabled()) {
return telemetry_rustc(cmd, arg0, args, cfg);
}
run_command_for_dir_without_telemetry(cmd, arg0, args)<|fim▁hole|>fn telemetry_rustc<S: AsRef<OsStr>>(mut cmd: Command,
arg0: &str,
args: &[S], cfg: &Cfg) -> Result<()> {
#[cfg(unix)]
fn file_as_stdio(file: &File) -> Stdio {
use std::os::unix::io::{AsRawFd, FromRawFd};
unsafe { Stdio::from_raw_fd(file.as_raw_fd()) }
}
#[cfg(windows)]
fn file_as_stdio(file: &File) -> Stdio {
use std::os::windows::io::{AsRawHandle, FromRawHandle};
unsafe { Stdio::from_raw_handle(file.as_raw_handle()) }
}
let now = Instant::now();
cmd.args(args);
let has_color_args = args.iter().any(|e| {
let e = e.as_ref().to_str().unwrap_or("");
e.starts_with("--color")
});
if stderr_isatty() && !has_color_args
{
cmd.arg("--color");
cmd.arg("always");
}
let mut cmd_err_file = tempfile().unwrap();
let cmd_err_stdio = file_as_stdio(&cmd_err_file);
// FIXME rust-lang/rust#32254. It's not clear to me
// when and why this is needed.
let mut cmd = cmd.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(cmd_err_stdio)
.spawn()
.unwrap();
let status = cmd.wait();
let duration = now.elapsed();
let ms = (duration.as_secs() as u64 * 1000) + (duration.subsec_nanos() as u64 / 1000 / 1000);
let t = Telemetry::new(cfg.rustup_dir.join("telemetry"));
match status {
Ok(status) => {
let exit_code = status.code().unwrap_or(1);
let re = Regex::new(r"\[(?P<error>E.{4})\]").unwrap();
let mut buffer = String::new();
// Chose a HashSet instead of a Vec to avoid calls to sort() and dedup().
// The HashSet should be faster if there are a lot of errors, too.
let mut errors: Vec<String> = Vec::new();
let stderr = io::stderr();
let mut handle = stderr.lock();
cmd_err_file.seek(SeekFrom::Start(0)).unwrap();
let mut buffered_stderr = BufReader::new(cmd_err_file);
while buffered_stderr.read_line(&mut buffer).unwrap() > 0 {
let b = buffer.to_owned();
buffer.clear();
let _ = handle.write(b.as_bytes());
if let Some(caps) = re.captures(&b) {
if !caps.is_empty() {
errors.push(caps.name("error").unwrap_or("").to_owned());
}
};
}
let e = if errors.is_empty() { None } else { Some(errors) };
let te = TelemetryEvent::RustcRun { duration_ms: ms,
exit_code: exit_code,
errors: e };
let _ = t.log_telemetry(te).map_err(|xe| {
(cfg.notify_handler)(Notification::TelemetryCleanupError(&xe));
});
process::exit(exit_code);
},
Err(e) => {
let exit_code = e.raw_os_error().unwrap_or(1);
let te = TelemetryEvent::RustcRun { duration_ms: ms,
exit_code: exit_code,
errors: None };
let _ = t.log_telemetry(te).map_err(|xe| {
(cfg.notify_handler)(Notification::TelemetryCleanupError(&xe));
});
Err(e).chain_err(|| rustup_utils::ErrorKind::RunningCommand {
name: OsStr::new(arg0).to_owned(),
})
},
}
}
fn run_command_for_dir_without_telemetry<S: AsRef<OsStr>>(
mut cmd: Command, arg0: &str, args: &[S]) -> Result<()>
{
cmd.args(args);
// FIXME rust-lang/rust#32254. It's not clear to me
// when and why this is needed.
cmd.stdin(process::Stdio::inherit());
match cmd.status() {
Ok(status) => {
// Ensure correct exit code is returned
let code = status.code().unwrap_or(1);
process::exit(code);
}
Err(e) => {
Err(e).chain_err(|| rustup_utils::ErrorKind::RunningCommand {
name: OsStr::new(arg0).to_owned(),
})
}
}
}
#[cfg(unix)]
fn stderr_isatty() -> bool {
use libc;
unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
}
#[cfg(windows)]
fn stderr_isatty() -> bool {
type DWORD = u32;
type BOOL = i32;
type HANDLE = *mut u8;
const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD;
extern "system" {
fn GetStdHandle(which: DWORD) -> HANDLE;
fn GetConsoleMode(hConsoleHandle: HANDLE,
lpMode: *mut DWORD) -> BOOL;
}
unsafe {
let handle = GetStdHandle(STD_ERROR_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}<|fim▁end|> | }
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__docformat__="restructuredtext"
from rest import RestClient, Result, ResponseFormats
from datetime import datetime
class NeocortexRestClient(object):
BASE_URL = "http://api.meaningtool.com/0.2/neocortex"
__builder__ = None
class Builder(RestClient):
_functions = {}
_params = {}
_input = None
_format = None
_tree_key = None
def format(self, value):
self._format = value
return self
def input(self, text):
self._input = self._params["input"] = text
return self
def categories(self, tree_key=None, additionals=None):
params = dict(additionals or [])
if tree_key is not None:
params.update(dict(tree_key=tree_key))
self._functions["categories"] = params
return self
def keywords(self):
self._functions["keywords"] = True
return self
def entities(self):
self._functions["entities"] = True
return self
def language(self):
self._functions["language"] = True
return self
def meaningfy(self):
fs = []
for k,v in self._functions.items():
kk = k
if isinstance(v, dict):
if v.has_key("additionals"):
for a in v["additionals"]:
kk = "%s+%s" % (kk, a)
if v.has_key("tree_key") and v["tree_key"] is not None and kk == 'categories':
self._params["tree_key"] = v["tree_key"]
fs.append(kk)
fs = ";".join(fs)
url = "%s.%s" % (fs, self._format)
try:
res = self.post(url, self._params, response_format=self._format)
except Exception, e:
raise e
finally:
self._reset()
return res
def _reset(self):
self._functions = {}
self._params = {}
def __init__(self, api_key, base_url=None):
self.api_key = api_key
self.BASE_URL = base_url or self.BASE_URL
def get_builder(self):
if self.__builder__ is None:
self.__builder__ = NeocortexRestClient.Builder(self.BASE_URL, self.api_key).format(ResponseFormats.JSON)
self.__builder__._reset()
return self.__builder__
def categories(self, input, tree_key=None, additionals=None):
builder = self.get_builder()
return builder.format(ResponseFormats.JSON).input(input).categories(tree_key, additionals).meaningfy().payload["categories"]
def keywords(self, input):
builder = self.get_builder()<|fim▁hole|> return builder.format(ResponseFormats.JSON).input(input).entities().meaningfy().payload["entities"]
def language(self, input):
builder = self.get_builder()
return builder.format(ResponseFormats.JSON).input(input).language().meaningfy().payload["language"]<|fim▁end|> | return builder.format(ResponseFormats.JSON).input(input).keywords().meaningfy().payload["keywords"]
def entities(self, input):
builder = self.get_builder() |
<|file_name|>cfgs-on-items.rs<|end_file_name|><|fim▁begin|>// run-pass
// compile-flags: --cfg fooA --cfg fooB
// fooA AND !bar
#[cfg(all(fooA, not(bar)))]
fn foo1() -> isize { 1 }
// !fooA AND !bar<|fim▁hole|>// fooC OR (fooB AND !bar)
#[cfg(any(fooC, all(fooB, not(bar))))]
fn foo2() -> isize { 3 }
// fooA AND bar
#[cfg(all(fooA, bar))]
fn foo3() -> isize { 2 }
// !(fooA AND bar)
#[cfg(not(all(fooA, bar)))]
fn foo3() -> isize { 3 }
pub fn main() {
assert_eq!(1, foo1());
assert_eq!(3, foo2());
assert_eq!(3, foo3());
}<|fim▁end|> | #[cfg(all(not(fooA), not(bar)))]
fn foo2() -> isize { 2 }
|
<|file_name|>baked.py<|end_file_name|><|fim▁begin|># sqlalchemy/ext/baked.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Baked query extension.
Provides a creational pattern for the :class:`.query.Query` object which
allows the fully constructed object, Core select statement, and string
compiled result to be fully cached.
"""
import copy
import logging
from .. import exc as sa_exc
from .. import util
from ..orm import exc as orm_exc
from ..orm import strategy_options
from ..orm.query import Query
from ..orm.session import Session
from ..sql import func
from ..sql import literal_column
from ..sql import util as sql_util
log = logging.getLogger(__name__)
class Bakery(object):
"""Callable which returns a :class:`.BakedQuery`.
This object is returned by the class method
:meth:`.BakedQuery.bakery`. It exists as an object
so that the "cache" can be easily inspected.
.. versionadded:: 1.2
"""
__slots__ = "cls", "cache"
def __init__(self, cls_, cache):
self.cls = cls_
self.cache = cache
def __call__(self, initial_fn, *args):
return self.cls(self.cache, initial_fn, args)
class BakedQuery(object):
"""A builder object for :class:`.query.Query` objects."""
__slots__ = "steps", "_bakery", "_cache_key", "_spoiled"
def __init__(self, bakery, initial_fn, args=()):
self._cache_key = ()
self._update_cache_key(initial_fn, args)
self.steps = [initial_fn]
self._spoiled = False
self._bakery = bakery
@classmethod
def bakery(cls, size=200, _size_alert=None):
"""Construct a new bakery.
:return: an instance of :class:`.Bakery`
"""
return Bakery(cls, util.LRUCache(size, size_alert=_size_alert))
def _clone(self):
b1 = BakedQuery.__new__(BakedQuery)
b1._cache_key = self._cache_key
b1.steps = list(self.steps)
b1._bakery = self._bakery
b1._spoiled = self._spoiled
return b1
def _update_cache_key(self, fn, args=()):
self._cache_key += (fn.__code__,) + args
def __iadd__(self, other):
if isinstance(other, tuple):
self.add_criteria(*other)
else:
self.add_criteria(other)
return self
def __add__(self, other):
if isinstance(other, tuple):
return self.with_criteria(*other)
else:
return self.with_criteria(other)
def add_criteria(self, fn, *args):
"""Add a criteria function to this :class:`.BakedQuery`.
This is equivalent to using the ``+=`` operator to
modify a :class:`.BakedQuery` in-place.
"""
self._update_cache_key(fn, args)
self.steps.append(fn)
return self
def with_criteria(self, fn, *args):
"""Add a criteria function to a :class:`.BakedQuery` cloned from this one.
This is equivalent to using the ``+`` operator to
produce a new :class:`.BakedQuery` with modifications.
"""
return self._clone().add_criteria(fn, *args)
def for_session(self, session):
"""Return a :class:`.Result` object for this :class:`.BakedQuery`.
This is equivalent to calling the :class:`.BakedQuery` as a
Python callable, e.g. ``result = my_baked_query(session)``.
"""
return Result(self, session)
def __call__(self, session):
return self.for_session(session)
def spoil(self, full=False):
"""Cancel any query caching that will occur on this BakedQuery object.
The BakedQuery can continue to be used normally, however additional
creational functions will not be cached; they will be called
on every invocation.
This is to support the case where a particular step in constructing
a baked query disqualifies the query from being cacheable, such
as a variant that relies upon some uncacheable value.
:param full: if False, only functions added to this
:class:`.BakedQuery` object subsequent to the spoil step will be
non-cached; the state of the :class:`.BakedQuery` up until
this point will be pulled from the cache. If True, then the
entire :class:`_query.Query` object is built from scratch each
time, with all creational functions being called on each
invocation.
"""
if not full and not self._spoiled:
_spoil_point = self._clone()
_spoil_point._cache_key += ("_query_only",)
self.steps = [_spoil_point._retrieve_baked_query]
self._spoiled = True
return self
def _effective_key(self, session):
"""Return the key that actually goes into the cache dictionary for
this :class:`.BakedQuery`, taking into account the given
:class:`.Session`.
This basically means we also will include the session's query_class,
as the actual :class:`_query.Query` object is part of what's cached
and needs to match the type of :class:`_query.Query` that a later
session will want to use.
"""
return self._cache_key + (session._query_cls,)
def _with_lazyload_options(self, options, effective_path, cache_path=None):
"""Cloning version of _add_lazyload_options.
"""
q = self._clone()
q._add_lazyload_options(options, effective_path, cache_path=cache_path)
return q
def _add_lazyload_options(self, options, effective_path, cache_path=None):
"""Used by per-state lazy loaders to add options to the
"lazy load" query from a parent query.
Creates a cache key based on given load path and query options;
if a repeatable cache key cannot be generated, the query is
"spoiled" so that it won't use caching.
"""
key = ()
if not cache_path:
cache_path = effective_path
if cache_path.path[0].is_aliased_class:
# paths that are against an AliasedClass are unsafe to cache
# with since the AliasedClass is an ad-hoc object.
self.spoil(full=True)
else:
for opt in options:
cache_key = opt._generate_cache_key(cache_path)
if cache_key is False:
self.spoil(full=True)
elif cache_key is not None:
key += cache_key
self.add_criteria(
lambda q: q._with_current_path(
effective_path
)._conditional_options(*options),
cache_path.path,
key,
)
def _retrieve_baked_query(self, session):
query = self._bakery.get(self._effective_key(session), None)
if query is None:
query = self._as_query(session)
self._bakery[self._effective_key(session)] = query.with_session(
None
)
return query.with_session(session)
def _bake(self, session):
query = self._as_query(session)
context = query._compile_context()
self._bake_subquery_loaders(session, context)
context.session = None
context.query = query = context.query.with_session(None)
query._execution_options = query._execution_options.union(
{"compiled_cache": self._bakery}
)
# we'll be holding onto the query for some of its state,
# so delete some compilation-use-only attributes that can take up
# space
for attr in (
"_correlate",
"_from_obj",
"_mapper_adapter_map",
"_joinpath",
"_joinpoint",
):
query.__dict__.pop(attr, None)
# if the query is not safe to cache, we still do everything as though
# we did cache it, since the receiver of _bake() assumes subqueryload
# context was set up, etc.
if context.query._bake_ok:
self._bakery[self._effective_key(session)] = context
return context
def to_query(self, query_or_session):
"""Return the :class:`_query.Query` object for use as a subquery.
This method should be used within the lambda callable being used
to generate a step of an enclosing :class:`.BakedQuery`. The
parameter should normally be the :class:`_query.Query` object that
is passed to the lambda::
sub_bq = self.bakery(lambda s: s.query(User.name))
sub_bq += lambda q: q.filter(
User.id == Address.user_id).correlate(Address)
main_bq = self.bakery(lambda s: s.query(Address))
main_bq += lambda q: q.filter(
sub_bq.to_query(q).exists())
In the case where the subquery is used in the first callable against
a :class:`.Session`, the :class:`.Session` is also accepted::
sub_bq = self.bakery(lambda s: s.query(User.name))
sub_bq += lambda q: q.filter(
User.id == Address.user_id).correlate(Address)
main_bq = self.bakery(
lambda s: s.query(Address.id, sub_bq.to_query(q).as_scalar())
)
:param query_or_session: a :class:`_query.Query` object or a class
:class:`.Session` object, that is assumed to be within the context
of an enclosing :class:`.BakedQuery` callable.
.. versionadded:: 1.3
"""
if isinstance(query_or_session, Session):
session = query_or_session
elif isinstance(query_or_session, Query):
session = query_or_session.session
if session is None:
raise sa_exc.ArgumentError(
"Given Query needs to be associated with a Session"
)
else:
raise TypeError(
"Query or Session object expected, got %r."
% type(query_or_session)
)
return self._as_query(session)
def _as_query(self, session):
query = self.steps[0](session)
for step in self.steps[1:]:
query = step(query)
return query
def _bake_subquery_loaders(self, session, context):
"""convert subquery eager loaders in the cache into baked queries.
For subquery eager loading to work, all we need here is that the
Query point to the correct session when it is run. However, since
we are "baking" anyway, we may as well also turn the query into
a "baked" query so that we save on performance too.
"""
context.attributes["baked_queries"] = baked_queries = []
for k, v in list(context.attributes.items()):
if isinstance(v, Query):
if "subquery" in k:
bk = BakedQuery(self._bakery, lambda *args: v)
bk._cache_key = self._cache_key + k
bk._bake(session)
baked_queries.append((k, bk._cache_key, v))
del context.attributes[k]
def _unbake_subquery_loaders(
self, session, context, params, post_criteria
):
"""Retrieve subquery eager loaders stored by _bake_subquery_loaders
and turn them back into Result objects that will iterate just
like a Query object.
"""
if "baked_queries" not in context.attributes:
return
for k, cache_key, query in context.attributes["baked_queries"]:
bk = BakedQuery(
self._bakery, lambda sess, q=query: q.with_session(sess)
)
bk._cache_key = cache_key
q = bk.for_session(session)
for fn in post_criteria:
q = q.with_post_criteria(fn)
context.attributes[k] = q.params(**params)
class Result(object):
"""Invokes a :class:`.BakedQuery` against a :class:`.Session`.
The :class:`.Result` object is where the actual :class:`.query.Query`
object gets created, or retrieved from the cache,
against a target :class:`.Session`, and is then invoked for results.
"""
__slots__ = "bq", "session", "_params", "_post_criteria"
def __init__(self, bq, session):
self.bq = bq
self.session = session
self._params = {}
self._post_criteria = []
def params(self, *args, **kw):
"""Specify parameters to be replaced into the string SQL statement."""
if len(args) == 1:
kw.update(args[0])
elif len(args) > 0:
raise sa_exc.ArgumentError(
"params() takes zero or one positional argument, "
"which is a dictionary."
)
self._params.update(kw)
return self
def _using_post_criteria(self, fns):
if fns:
self._post_criteria.extend(fns)
return self
def with_post_criteria(self, fn):
"""Add a criteria function that will be applied post-cache.
This adds a function that will be run against the
:class:`_query.Query` object after it is retrieved from the
cache. Functions here can be used to alter the query in ways
that **do not affect the SQL output**, such as execution options
and shard identifiers (when using a shard-enabled query object)
.. warning:: :meth:`.Result.with_post_criteria` functions are applied
to the :class:`_query.Query`
object **after** the query's SQL statement
object has been retrieved from the cache. Any operations here
which intend to modify the SQL should ensure that
:meth:`.BakedQuery.spoil` was called first.
.. versionadded:: 1.2
"""
return self._using_post_criteria([fn])
def _as_query(self):
q = self.bq._as_query(self.session).params(self._params)
for fn in self._post_criteria:
q = fn(q)
return q
def __str__(self):
return str(self._as_query())
def __iter__(self):
bq = self.bq
if not self.session.enable_baked_queries or bq._spoiled:
return iter(self._as_query())
baked_context = bq._bakery.get(bq._effective_key(self.session), None)
if baked_context is None:
baked_context = bq._bake(self.session)
context = copy.copy(baked_context)
context.session = self.session
context.attributes = context.attributes.copy()
bq._unbake_subquery_loaders(
self.session, context, self._params, self._post_criteria
)
context.statement.use_labels = True
if context.autoflush and not context.populate_existing:
self.session._autoflush()
q = context.query.params(self._params).with_session(self.session)
for fn in self._post_criteria:
q = fn(q)
return q._execute_and_instances(context)
def count(self):
"""return the 'count'.
Equivalent to :meth:`_query.Query.count`.
Note this uses a subquery to ensure an accurate count regardless
of the structure of the original statement.
.. versionadded:: 1.1.6
"""
col = func.count(literal_column("*"))
bq = self.bq.with_criteria(lambda q: q.from_self(col))
return bq.for_session(self.session).params(self._params).scalar()
def scalar(self):
"""Return the first element of the first result or None
if no rows present. If multiple rows are returned,
raises MultipleResultsFound.
Equivalent to :meth:`_query.Query.scalar`.
.. versionadded:: 1.1.6
"""
try:
ret = self.one()
if not isinstance(ret, tuple):
return ret
return ret[0]
except orm_exc.NoResultFound:
return None
def first(self):
"""Return the first row.
Equivalent to :meth:`_query.Query.first`.
"""
bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
ret = list(
bq.for_session(self.session)
.params(self._params)
._using_post_criteria(self._post_criteria)
)
if len(ret) > 0:
return ret[0]
else:
return None
def one(self):
"""Return exactly one result or raise an exception.
Equivalent to :meth:`_query.Query.one`.
"""
try:
ret = self.one_or_none()
except orm_exc.MultipleResultsFound as err:
util.raise_(
orm_exc.MultipleResultsFound(
"Multiple rows were found for one()"
),
replace_context=err,
)
else:
if ret is None:
raise orm_exc.NoResultFound("No row was found for one()")
return ret
def one_or_none(self):
"""Return one or zero results, or raise an exception for multiple
rows.
Equivalent to :meth:`_query.Query.one_or_none`.
.. versionadded:: 1.0.9
"""
ret = list(self)
l = len(ret)
if l == 1:
return ret[0]
elif l == 0:
return None
else:
raise orm_exc.MultipleResultsFound(
"Multiple rows were found for one_or_none()"
)
def all(self):
"""Return all rows.
Equivalent to :meth:`_query.Query.all`.
"""
return list(self)
def get(self, ident):
"""Retrieve an object based on identity.
Equivalent to :meth:`_query.Query.get`.
"""
query = self.bq.steps[0](self.session)
return query._get_impl(ident, self._load_on_pk_identity)
def _load_on_pk_identity(self, query, primary_key_identity):
"""Load the given primary key identity from the database."""
mapper = query._mapper_zero()
_get_clause, _get_params = mapper._get_clause
def setup(query):
_lcl_get_clause = _get_clause
q = query._clone()
q._get_condition()
q._order_by = None
# None present in ident - turn those comparisons
# into "IS NULL"
if None in primary_key_identity:
nones = set(
[
_get_params[col].key
for col, value in zip(
mapper.primary_key, primary_key_identity
)
if value is None
]
)
_lcl_get_clause = sql_util.adapt_criterion_to_null(
_lcl_get_clause, nones
)
_lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
q._criterion = _lcl_get_clause
for fn in self._post_criteria:
q = fn(q)
return q
# cache the query against a key that includes
# which positions in the primary key are NULL
# (remember, we can map to an OUTER JOIN)
bq = self.bq
# add the clause we got from mapper._get_clause to the cache
# key so that if a race causes multiple calls to _get_clause,
# we've cached on ours
bq = bq._clone()
bq._cache_key += (_get_clause,)
bq = bq.with_criteria(
setup, tuple(elem is None for elem in primary_key_identity)
)
params = dict(
[
(_get_params[primary_key].key, id_val)
for id_val, primary_key in zip(
primary_key_identity, mapper.primary_key
)
]
)
result = list(bq.for_session(self.session).params(**params))
l = len(result)
if l > 1:
raise orm_exc.MultipleResultsFound()
elif l:
return result[0]
else:
return None
@util.deprecated(
"1.2", "Baked lazy loading is now the default implementation."
)
def bake_lazy_loaders():
"""Enable the use of baked queries for all lazyloaders systemwide.
The "baked" implementation of lazy loading is now the sole implementation
for the base lazy loader; this method has no effect except for a warning.
<|fim▁hole|>
@util.deprecated(
"1.2", "Baked lazy loading is now the default implementation."
)
def unbake_lazy_loaders():
"""Disable the use of baked queries for all lazyloaders systemwide.
This method now raises NotImplementedError() as the "baked" implementation
is the only lazy load implementation. The
:paramref:`_orm.relationship.bake_queries` flag may be used to disable
the caching of queries on a per-relationship basis.
"""
raise NotImplementedError(
"Baked lazy loading is now the default implementation"
)
@strategy_options.loader_option()
def baked_lazyload(loadopt, attr):
"""Indicate that the given attribute should be loaded using "lazy"
loading with a "baked" query used in the load.
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"})
@baked_lazyload._add_unbound_fn
@util.deprecated(
"1.2",
"Baked lazy loading is now the default "
"implementation for lazy loading.",
)
def baked_lazyload(*keys):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.baked_lazyload, keys, False, {}
)
@baked_lazyload._add_unbound_all_fn
@util.deprecated(
"1.2",
"Baked lazy loading is now the default "
"implementation for lazy loading.",
)
def baked_lazyload_all(*keys):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.baked_lazyload, keys, True, {}
)
baked_lazyload = baked_lazyload._unbound_fn
baked_lazyload_all = baked_lazyload_all._unbound_all_fn
bakery = BakedQuery.bakery<|fim▁end|> | """
pass
|
<|file_name|>Dataset.py<|end_file_name|><|fim▁begin|>import math
import random
import onmt
from torch.autograd import Variable
class Dataset(object):
def __init__(self, srcData, tgtData, batchSize, cuda, volatile=False):
self.src = srcData
if tgtData:
self.tgt = tgtData
assert(len(self.src) == len(self.tgt))
else:
self.tgt = None
self.cuda = cuda
self.batchSize = batchSize
self.numBatches = math.ceil(len(self.src)/batchSize)
self.volatile = volatile
def _batchify(self, data, align_right=False):
max_length = max(x.size(0) for x in data)
out = data[0].new(len(data), max_length).fill_(onmt.Constants.PAD)
for i in range(len(data)):
data_length = data[i].size(0)
offset = max_length - data_length if align_right else 0
out[i].narrow(0, offset, data_length).copy_(data[i])<|fim▁hole|> out = out.t().contiguous()
if self.cuda:
out = out.cuda()
v = Variable(out, volatile=self.volatile)
return v
def __getitem__(self, index):
assert index < self.numBatches, "%d > %d" % (index, self.numBatches)
srcBatch = self._batchify(
self.src[index*self.batchSize:(index+1)*self.batchSize], align_right=True)
if self.tgt:
tgtBatch = self._batchify(
self.tgt[index*self.batchSize:(index+1)*self.batchSize])
else:
tgtBatch = None
return srcBatch, tgtBatch
def __len__(self):
return self.numBatches
def shuffle(self):
zipped = list(zip(self.src, self.tgt))
random.shuffle(zipped)
self.src, self.tgt = [x[0] for x in zipped], [x[1] for x in zipped]<|fim▁end|> | |
<|file_name|>table_view_text_box.py<|end_file_name|><|fim▁begin|>import tkinter as tk<|fim▁hole|> tk.Tk.__init__(self)
t = SimpleTable(self, 10,2)
t.pack(side="top", fill="x")
t.set(0,0,"Hello, world")
class SimpleTable(tk.Frame):
def __init__(self, parent, rows=10, columns=2):
# use black background so it "peeks through" to
# form grid lines
tk.Frame.__init__(self, parent, background="black")
self._widgets = []
for row in range(rows):
current_row = []
for column in range(columns):
label = tk.Label(self, text="%s/%s" % (row, column),
borderwidth=0, width=10, height = 10)
label.grid(row=row, column=column, sticky="nsew", padx=1, pady=1)
current_row.append(label)
self._widgets.append(current_row)
for column in range(columns):
self.grid_columnconfigure(column, weight=1)
def set(self, row, column, value):
widget = self._widgets[row][column]
widget.configure(text=value)
if __name__ == "__main__":
app = ExampleApp()
app.mainloop()<|fim▁end|> |
class ExampleApp(tk.Tk):
def __init__(self): |
<|file_name|>file_loader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# (C) Copyright 2015 by Marek Hakala <hakala.marek@gmail.com>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class FileLoader(object):<|fim▁hole|> def loadFile(self):
self.file = open(self.filename, mode='rb')
self.fileContent = self.file.read()
def closeFile(self):
self.file.close()
def getContent(self):
return self.fileContent<|fim▁end|> | def __init__(self, filename):
self.filename = filename
|
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, absolute_import
import numpy as np
from numba import cuda, int32, float32
from numba.cuda.testing import unittest
from numba.config import ENABLE_CUDASIM
def useless_sync(ary):
i = cuda.grid(1)
cuda.syncthreads()
ary[i] = i
def simple_smem(ary):
N = 100
sm = cuda.shared.array(N, int32)
i = cuda.grid(1)
if i == 0:
for j in range(N):
sm[j] = j
cuda.syncthreads()
ary[i] = sm[i]
def coop_smem2d(ary):
i, j = cuda.grid(2)
sm = cuda.shared.array((10, 20), float32)
sm[i, j] = (i + 1) / (j + 1)
cuda.syncthreads()
ary[i, j] = sm[i, j]
def dyn_shared_memory(ary):
i = cuda.grid(1)
sm = cuda.shared.array(0, float32)
sm[i] = i * 2
cuda.syncthreads()
ary[i] = sm[i]
def use_threadfence(ary):
ary[0] += 123
cuda.threadfence()
ary[0] += 321
def use_threadfence_block(ary):
ary[0] += 123
cuda.threadfence_block()
ary[0] += 321
def use_threadfence_system(ary):
ary[0] += 123
cuda.threadfence_system()
ary[0] += 321
class TestCudaSync(unittest.TestCase):
def test_useless_sync(self):
compiled = cuda.jit("void(int32[::1])")(useless_sync)
nelem = 10
ary = np.empty(nelem, dtype=np.int32)
exp = np.arange(nelem, dtype=np.int32)
compiled[1, nelem](ary)
self.assertTrue(np.all(ary == exp))
def test_simple_smem(self):
compiled = cuda.jit("void(int32[::1])")(simple_smem)
nelem = 100
ary = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary)
self.assertTrue(np.all(ary == np.arange(nelem, dtype=np.int32)))
def test_coop_smem2d(self):
compiled = cuda.jit("void(float32[:,::1])")(coop_smem2d)
shape = 10, 20
ary = np.empty(shape, dtype=np.float32)
compiled[1, shape](ary)
exp = np.empty_like(ary)
for i in range(ary.shape[0]):
for j in range(ary.shape[1]):
exp[i, j] = (i + 1) / (j + 1)
self.assertTrue(np.allclose(ary, exp))
def test_dyn_shared_memory(self):
compiled = cuda.jit("void(float32[::1])")(dyn_shared_memory)
shape = 50
ary = np.empty(shape, dtype=np.float32)
compiled[1, shape, 0, ary.size * 4](ary)
self.assertTrue(np.all(ary == 2 * np.arange(ary.size, dtype=np.int32)))
def test_threadfence_codegen(self):
# Does not test runtime behavior, just the code generation.
compiled = cuda.jit("void(int32[:])")(use_threadfence)
ary = np.zeros(10, dtype=np.int32)
compiled[1, 1](ary)
self.assertEqual(123 + 321, ary[0])
if not ENABLE_CUDASIM:
self.assertIn("membar.gl;", compiled.ptx)
def test_threadfence_block_codegen(self):
# Does not test runtime behavior, just the code generation.
compiled = cuda.jit("void(int32[:])")(use_threadfence_block)
ary = np.zeros(10, dtype=np.int32)
compiled[1, 1](ary)
self.assertEqual(123 + 321, ary[0])
if not ENABLE_CUDASIM:
self.assertIn("membar.cta;", compiled.ptx)
def test_threadfence_system_codegen(self):
# Does not test runtime behavior, just the code generation.
compiled = cuda.jit("void(int32[:])")(use_threadfence_system)<|fim▁hole|> ary = np.zeros(10, dtype=np.int32)
compiled[1, 1](ary)
self.assertEqual(123 + 321, ary[0])
if not ENABLE_CUDASIM:
self.assertIn("membar.sys;", compiled.ptx)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>UserProfileDB.py<|end_file_name|><|fim▁begin|>""" UserProfileDB class is a front-end to the User Profile Database
"""
__RCSID__ = "$Id$"
import types
import os
import sys
import hashlib
from DIRAC import S_OK, S_ERROR, gLogger, gConfig
from DIRAC.Core.Utilities import Time
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
from DIRAC.Core.Base.DB import DB
class UserProfileDB( DB ):
""" UserProfileDB class is a front-end to the User Profile Database
"""
tableDict = { 'up_Users' : { 'Fields' : { 'Id' : 'INTEGER AUTO_INCREMENT NOT NULL',
'UserName' : 'VARCHAR(32) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : 'Id',
'UniqueIndexes' : { 'U' : [ 'UserName' ] },
'Engine': 'InnoDB',
},
'up_Groups': { 'Fields' : { 'Id' : 'INTEGER AUTO_INCREMENT NOT NULL',
'UserGroup' : 'VARCHAR(32) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : 'Id',
'UniqueIndexes' : { 'G' : [ 'UserGroup' ] },
'Engine': 'InnoDB',
},
'up_VOs': { 'Fields' : { 'Id' : 'INTEGER AUTO_INCREMENT NOT NULL',
'VO' : 'VARCHAR(32) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : 'Id',
'UniqueIndexes' : { 'VO' : [ 'VO' ] },
'Engine': 'InnoDB',
},
'up_ProfilesData': { 'Fields' : { 'UserId' : 'INTEGER',
'GroupId' : 'INTEGER',
'VOId' : 'INTEGER',
'Profile' : 'VARCHAR(255) NOT NULL',
'VarName' : 'VARCHAR(255) NOT NULL',
'Data' : 'BLOB',
'ReadAccess' : 'VARCHAR(10) DEFAULT "USER"',
'PublishAccess' : 'VARCHAR(10) DEFAULT "USER"',
},
'PrimaryKey' : [ 'UserId', 'GroupId', 'Profile', 'VarName' ],
'Indexes' : { 'ProfileKey' : [ 'UserId', 'GroupId', 'Profile' ],
'UserKey' : [ 'UserId' ] ,
},
'Engine': 'InnoDB',
},
'up_HashTags': { 'Fields' : { 'UserId' : 'INTEGER',
'GroupId' : 'INTEGER',
'VOId' : 'INTEGER',
'HashTag' : 'VARCHAR(32) NOT NULL',
'TagName' : 'VARCHAR(255) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : [ 'UserId', 'GroupId', 'TagName' ],
'Indexes' : { 'HashKey' : [ 'UserId', 'HashTag' ] },
'Engine': 'InnoDB',
},
}
def __init__( self ):
""" Constructor
"""
self.__permValues = [ 'USER', 'GROUP', 'VO', 'ALL' ]
self.__permAttrs = [ 'ReadAccess', 'PublishAccess' ]
DB.__init__( self, 'UserProfileDB', 'Framework/UserProfileDB', 10 )
retVal = self.__initializeDB()
if not retVal[ 'OK' ]:
raise Exception( "Can't create tables: %s" % retVal[ 'Message' ] )
def _checkTable( self ):
""" Make sure the tables are created
"""
return self.__initializeDB()
def __initializeDB( self ):
"""
Create the tables
"""
retVal = self._query( "show tables" )
if not retVal[ 'OK' ]:
return retVal
tablesInDB = [ t[0] for t in retVal[ 'Value' ] ]
tablesD = {}
if 'up_Users' not in tablesInDB:
tablesD[ 'up_Users' ] = self.tableDict['up_Users']
if 'up_Groups' not in tablesInDB:
tablesD[ 'up_Groups' ] = self.tableDict[ 'up_Groups']
if 'up_VOs' not in tablesInDB:
tablesD[ 'up_VOs' ] = self.tableDict['up_VOs']
if 'up_ProfilesData' not in tablesInDB:
tablesD[ 'up_ProfilesData' ] = self.tableDict['up_ProfilesData']
if 'up_HashTags' not in tablesInDB:
tablesD[ 'up_HashTags' ] = self.tableDict['up_HashTags']
return self._createTables( tablesD )
def __getUserId( self, userName, insertIfMissing = True ):
return self.__getObjId( userName, 'UserName', 'up_Users', insertIfMissing )
def __getGroupId( self, groupName, insertIfMissing = True ):
return self.__getObjId( groupName, 'UserGroup', 'up_Groups', insertIfMissing )
def __getVOId( self, voName, insertIfMissing = True ):
return self.__getObjId( voName, 'VO', 'up_VOs', insertIfMissing )
def __getObjId( self, objValue, varName, tableName, insertIfMissing = True ):
result = self.getFields( tableName, ['Id'], { varName: objValue } )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
objId = data[0][0]
self.updateFields( tableName, ['LastAccess'], ['UTC_TIMESTAMP()'], { 'Id': objId } )
return S_OK( objId )
if not insertIfMissing:
return S_ERROR( "No entry %s for %s defined in the DB" % ( objValue, varName ) )
result = self.insertFields( tableName, [ varName, 'LastAccess' ], [ objValue, 'UTC_TIMESTAMP()' ] )
if not result[ 'OK' ]:
return result
return S_OK( result[ 'lastRowId' ] )
def getUserGroupIds( self, userName, userGroup, insertIfMissing = True ):
result = self.__getUserId( userName, insertIfMissing )
if not result[ 'OK' ]:
return result
userId = result[ 'Value' ]
result = self.__getGroupId( userGroup, insertIfMissing )
if not result[ 'OK' ]:
return result
groupId = result[ 'Value' ]
userVO = Registry.getVOForGroup( userGroup )
if not userVO:
userVO = "undefined"
result = self.__getVOId( userVO, insertIfMissing )
if not result[ 'OK' ]:
return result
voId = result[ 'Value' ]
return S_OK( ( userId, groupId, voId ) )
def deleteUserProfile( self, userName, userGroup = False ):
"""
Delete the profiles for a user
"""
result = self.__getUserId( userName )
if not result[ 'OK' ]:
return result
userId = result[ 'Value' ]
condDict = { 'UserId': userId }
if userGroup:
result = self.__getGroupId( userGroup )
if not result[ 'OK' ]:
return result
groupId = result[ 'Value' ]
condDict['GroupId'] = groupId
result = self.deleteEntries( 'up_ProfilesData', condDict )
if not result[ 'OK' ] or not userGroup:
return result
return self.deleteEntries( 'up_Users', { 'Id': userId } )
def __webProfileUserDataCond( self, userIds, sqlProfileName = False, sqlVarName = False ):
condSQL = [ '`up_ProfilesData`.UserId=%s' % userIds[0],
'`up_ProfilesData`.GroupId=%s' % userIds[1],
'`up_ProfilesData`.VOId=%s' % userIds[2] ]
if sqlProfileName:
condSQL.append( '`up_ProfilesData`.Profile=%s' % sqlProfileName )
if sqlVarName:
condSQL.append( '`up_ProfilesData`.VarName=%s' % sqlVarName )
return " AND ".join( condSQL )
<|fim▁hole|> def __webProfileReadAccessDataCond( self, userIds, ownerIds, sqlProfileName, sqlVarName = False, match = False ):
permCondSQL = []
sqlCond = []
if match:
sqlCond.append( '`up_ProfilesData`.UserId = %s AND `up_ProfilesData`.GroupId = %s' % ( ownerIds[0], ownerIds[1] ) )
else:
permCondSQL.append( '`up_ProfilesData`.UserId = %s AND `up_ProfilesData`.GroupId = %s' % ( ownerIds[0], ownerIds[1] ) )
permCondSQL.append( '`up_ProfilesData`.GroupId=%s AND `up_ProfilesData`.ReadAccess="GROUP"' % userIds[1] )
permCondSQL.append( '`up_ProfilesData`.VOId=%s AND `up_ProfilesData`.ReadAccess="VO"' % userIds[2] )
permCondSQL.append( '`up_ProfilesData`.ReadAccess="ALL"' )
sqlCond.append( '`up_ProfilesData`.Profile = %s' % sqlProfileName )
if sqlVarName:
sqlCond.append( "`up_ProfilesData`.VarName = %s" % ( sqlVarName ) )
#Perms
sqlCond.append( "( ( %s ) )" % " ) OR ( ".join( permCondSQL ) )
return " AND ".join( sqlCond )
def __parsePerms( self, perms, addMissing = True ):
normPerms = {}
for pName in self.__permAttrs:
if not perms or pName not in perms:
if addMissing:
normPerms[ pName ] = self.__permValues[0]
continue
else:
permVal = perms[ pName ].upper()
for nV in self.__permValues:
if nV == permVal:
normPerms[ pName ] = nV
break
if pName not in normPerms and addMissing:
normPerms[ pName ] = self.__permValues[0]
return normPerms
def retrieveVarById( self, userIds, ownerIds, profileName, varName ):
"""
Get a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlCond = self.__webProfileReadAccessDataCond( userIds, ownerIds, sqlProfileName, sqlVarName, True )
#when we retrieve the user profile we have to take into account the user.
selectSQL = "SELECT data FROM `up_ProfilesData` WHERE %s" % sqlCond
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
return S_OK( data[0][0] )
return S_ERROR( "No data for userIds %s profileName %s varName %s" % ( userIds, profileName, varName ) )
def retrieveAllUserVarsById( self, userIds, profileName ):
"""
Get a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
sqlCond = self.__webProfileUserDataCond( userIds, sqlProfileName )
selectSQL = "SELECT varName, data FROM `up_ProfilesData` WHERE %s" % sqlCond
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
return S_OK( dict( data ) )
def retrieveUserProfilesById( self, userIds ):
"""
Get all profiles and data for a user
"""
sqlCond = self.__webProfileUserDataCond( userIds )
selectSQL = "SELECT Profile, varName, data FROM `up_ProfilesData` WHERE %s" % sqlCond
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
dataDict = {}
for row in data:
if row[0] not in dataDict:
dataDict[ row[0] ] = {}
dataDict[ row[0] ][ row[1] ] = row[2 ]
return S_OK( dataDict )
def retrieveVarPermsById( self, userIds, ownerIds, profileName, varName ):
"""
Get a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlCond = self.__webProfileReadAccessDataCond( userIds, ownerIds, sqlProfileName, sqlVarName )
selectSQL = "SELECT %s FROM `up_ProfilesData` WHERE %s" % ( ", ".join( self.__permAttrs ), sqlCond )
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
permDict = {}
for i in range( len( self.__permAttrs ) ):
permDict[ self.__permAttrs[ i ] ] = data[0][i]
return S_OK( permDict )
return S_ERROR( "No data for userIds %s profileName %s varName %s" % ( userIds, profileName, varName ) )
def deleteVarByUserId( self, userIds, profileName, varName ):
"""
Remove a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlCond = self.__webProfileUserDataCond( userIds, sqlProfileName, sqlVarName )
selectSQL = "DELETE FROM `up_ProfilesData` WHERE %s" % sqlCond
return self._update( selectSQL )
def storeVarByUserId( self, userIds, profileName, varName, data, perms ):
"""
Set a data entry for a profile
"""
sqlInsertValues = []
sqlInsertKeys = []
sqlInsertKeys.append( ( 'UserId', userIds[0] ) )
sqlInsertKeys.append( ( 'GroupId', userIds[1] ) )
sqlInsertKeys.append( ( 'VOId', userIds[2] ) )
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
sqlInsertKeys.append( ( 'Profile', sqlProfileName ) )
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlInsertKeys.append( ( 'VarName', sqlVarName ) )
result = self._escapeString( data )
if not result[ 'OK' ]:
return result
sqlInsertValues.append( ( 'Data', result[ 'Value' ] ) )
normPerms = self.__parsePerms( perms )
for k in normPerms:
sqlInsertValues.append( ( k, '"%s"' % normPerms[ k ] ) )
sqlInsert = sqlInsertKeys + sqlInsertValues
insertSQL = "INSERT INTO `up_ProfilesData` ( %s ) VALUES ( %s )" % ( ", ".join( [ f[0] for f in sqlInsert ] ),
", ".join( [ str( f[1] ) for f in sqlInsert ] ) )
result = self._update( insertSQL )
if result[ 'OK' ]:
return result
#If error and not duplicate -> real error
if result[ 'Message' ].find( "Duplicate entry" ) == -1:
return result
updateSQL = "UPDATE `up_ProfilesData` SET %s WHERE %s" % ( ", ".join( [ "%s=%s" % f for f in sqlInsertValues ] ),
self.__webProfileUserDataCond( userIds,
sqlProfileName,
sqlVarName ) )
return self._update( updateSQL )
def setUserVarPermsById( self, userIds, profileName, varName, perms ):
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
nPerms = self.__parsePerms( perms, False )
if not nPerms:
return S_OK()
sqlPerms = ",".join( [ "%s='%s'" % ( k, nPerms[k] ) for k in nPerms ] )
updateSql = "UPDATE `up_ProfilesData` SET %s WHERE %s" % ( sqlPerms,
self.__webProfileUserDataCond( userIds,
sqlProfileName,
sqlVarName ) )
return self._update( updateSql )
def retrieveVar( self, userName, userGroup, ownerName, ownerGroup, profileName, varName ):
"""
Get a data entry for a profile
"""
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
result = self.getUserGroupIds( ownerName, ownerGroup )
if not result[ 'OK' ]:
return result
ownerIds = result[ 'Value' ]
return self.retrieveVarById( userIds, ownerIds, profileName, varName )
def retrieveUserProfiles( self, userName, userGroup ):
"""
Helper for getting data
"""
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveUserProfilesById( userIds )
def retrieveAllUserVars( self, userName, userGroup, profileName ):
"""
Helper for getting data
"""
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveAllUserVarsById( userIds, profileName )
def retrieveVarPerms( self, userName, userGroup, ownerName, ownerGroup, profileName, varName ):
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
result = self.getUserGroupIds( ownerName, ownerGroup, False )
if not result[ 'OK' ]:
return result
ownerIds = result[ 'Value' ]
return self.retrieveVarPermsById( userIds, ownerIds, profileName, varName )
def setUserVarPerms( self, userName, userGroup, profileName, varName, perms ):
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.setUserVarPermsById( userIds, profileName, varName, perms )
def storeVar( self, userName, userGroup, profileName, varName, data, perms = None ):
"""
Helper for setting data
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.storeVarByUserId( userIds, profileName, varName, data, perms = perms )
finally:
pass
def deleteVar( self, userName, userGroup, profileName, varName ):
"""
Helper for deleting data
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.deleteVarByUserId( userIds, profileName, varName )
finally:
pass
def __profilesCondGenerator( self, value, varType, initialValue = False ):
if type( value ) in types.StringTypes:
value = [ value ]
ids = []
if initialValue:
ids.append( initialValue )
for val in value:
if varType == 'user':
result = self.__getUserId( val, insertIfMissing = False )
elif varType == 'group':
result = self.__getGroupId( val, insertIfMissing = False )
else:
result = self.__getVOId( val, insertIfMissing = False )
if not result[ 'OK' ]:
continue
ids.append( result[ 'Value' ] )
if varType == 'user':
fieldName = 'UserId'
elif varType == 'group':
fieldName = 'GroupId'
else:
fieldName = 'VOId'
return "`up_ProfilesData`.%s in ( %s )" % ( fieldName, ", ".join( [ str( iD ) for iD in ids ] ) )
def listVarsById( self, userIds, profileName, filterDict = None ):
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
sqlCond = [ "`up_Users`.Id = `up_ProfilesData`.UserId",
"`up_Groups`.Id = `up_ProfilesData`.GroupId",
"`up_VOs`.Id = `up_ProfilesData`.VOId",
self.__webProfileReadAccessDataCond( userIds, userIds, sqlProfileName ) ]
if filterDict:
fD = {}
for k in filterDict:
fD[ k.lower() ] = filterDict[ k ]
filterDict = fD
for k in ( 'user', 'group', 'vo' ):
if k in filterDict:
sqlCond.append( self.__profilesCondGenerator( filterDict[ k ], k ) )
sqlVars2Get = [ "`up_Users`.UserName", "`up_Groups`.UserGroup", "`up_VOs`.VO", "`up_ProfilesData`.VarName" ]
sqlQuery = "SELECT %s FROM `up_Users`, `up_Groups`, `up_VOs`, `up_ProfilesData` WHERE %s" % ( ", ".join( sqlVars2Get ),
" AND ".join( sqlCond ) )
return self._query( sqlQuery )
def listVars( self, userName, userGroup, profileName, filterDict = None ):
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.listVarsById( userIds, profileName, filterDict )
def storeHashTagById( self, userIds, tagName, hashTag = False ):
"""
Set a data entry for a profile
"""
if not hashTag:
hashTag = hashlib.md5()
hashTag.update( "%s;%s;%s" % ( Time.dateTime(), userIds, tagName ) )
hashTag = hashTag.hexdigest()
result = self.insertFields( 'up_HashTags', [ 'UserId', 'GroupId', 'VOId', 'TagName', 'HashTag' ],
[ userIds[0], userIds[1], userIds[2], tagName, hashTag ] )
if result[ 'OK' ]:
return S_OK( hashTag )
#If error and not duplicate -> real error
if result[ 'Message' ].find( "Duplicate entry" ) == -1:
return result
result = self.updateFields( 'up_HashTags', ['HashTag'], [hashTag], { 'UserId': userIds[0],
'GroupId': userIds[1],
'VOId': userIds[2],
'TagName': tagName } )
if not result[ 'OK' ]:
return result
return S_OK( hashTag )
def retrieveHashTagById( self, userIds, hashTag ):
"""
Get a data entry for a profile
"""
result = self.getFields( 'up_HashTags', ['TagName'], { 'UserId': userIds[0],
'GroupId': userIds[1],
'VOId': userIds[2],
'HashTag': hashTag } )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
return S_OK( data[0][0] )
return S_ERROR( "No data for combo userId %s hashTag %s" % ( userIds, hashTag ) )
def retrieveAllHashTagsById( self, userIds ):
"""
Get a data entry for a profile
"""
result = self.getFields( 'up_HashTags', ['HashTag', 'TagName'], { 'UserId': userIds[0],
'GroupId': userIds[1],
'VOId': userIds[2] } )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
return S_OK( dict( data ) )
def storeHashTag( self, userName, userGroup, tagName, hashTag = False ):
"""
Helper for storing HASH
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.storeHashTagById( userIds, tagName, hashTag )
finally:
pass
def retrieveHashTag( self, userName, userGroup, hashTag ):
"""
Helper for retrieving HASH
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveHashTagById( userIds, hashTag )
finally:
pass
def retrieveAllHashTags( self, userName, userGroup ):
"""
Helper for retrieving HASH
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveAllHashTagsById( userIds )
finally:
pass
def getUserProfileNames( self, permission ):
"""
it returns the available profile names by not taking account the permission: ReadAccess and PublishAccess
"""
result = None
permissions = self.__parsePerms( permission, False )
if not permissions:
return S_OK()
condition = ",".join( [ "%s='%s'" % ( k, permissions[k] ) for k in permissions ] )
query = "SELECT distinct Profile from `up_ProfilesData` where %s" % condition
retVal = self._query( query )
if retVal['OK']:
result = S_OK( [i[0] for i in retVal['Value']] )
else:
result = retVal
return result
def testUserProfileDB():
""" Some test cases
"""
# building up some fake CS values
gConfig.setOptionValue( 'DIRAC/Setup', 'Test' )
gConfig.setOptionValue( '/DIRAC/Setups/Test/Framework', 'Test' )
host = '127.0.0.1'
user = 'Dirac'
pwd = 'Dirac'
db = 'AccountingDB'
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/Host', host )
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/DBName', db )
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/User', user )
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/Password', pwd )
db = UserProfileDB()
assert db._connect()['OK']
userName = 'testUser'
userGroup = 'testGroup'
profileName = 'testProfile'
varName = 'testVar'
tagName = 'testTag'
hashTag = '237cadc4af90277e9524e6386e264630'
data = 'testData'
perms = 'USER'
try:
if False:
for tableName in db.tableDict.keys():
result = db._update( 'DROP TABLE `%s`' % tableName )
assert result['OK']
gLogger.info( '\n Creating Table\n' )
# Make sure it is there and it has been created for this test
result = db._checkTable()
assert result == {'OK': True, 'Value': None }
result = db._checkTable()
assert result == {'OK': True, 'Value': 0}
gLogger.info( '\n Adding some data\n' )
result = db.storeVar( userName, userGroup, profileName, varName, data, perms )
assert result['OK']
assert result['Value'] == 1
gLogger.info( '\n Some queries\n' )
result = db.getUserGroupIds( userName, userGroup )
assert result['OK']
assert result['Value'] == ( 1, 1, 1 )
result = db.listVars( userName, userGroup, profileName )
assert result['OK']
assert result['Value'][0][3] == varName
result = db.retrieveUserProfiles( userName, userGroup )
assert result['OK']
assert result['Value'] == { profileName: { varName: data } }
result = db.storeHashTag( userName, userGroup, tagName, hashTag )
assert result['OK']
assert result['Value'] == hashTag
result = db.retrieveAllHashTags( userName, userGroup )
assert result['OK']
assert result['Value'] == { hashTag: tagName }
result = db.retrieveHashTag( userName, userGroup, hashTag )
assert result['OK']
assert result['Value'] == tagName
gLogger.info( '\n OK\n' )
except AssertionError:
print 'ERROR ',
if not result['OK']:
print result['Message']
else:
print result
sys.exit( 1 )
if __name__ == '__main__':
from DIRAC.Core.Base import Script
Script.parseCommandLine()
gLogger.setLevel( 'VERBOSE' )
if 'PYTHONOPTIMIZE' in os.environ and os.environ['PYTHONOPTIMIZE']:
gLogger.info( 'Unset pyhthon optimization "PYTHONOPTIMIZE"' )
sys.exit( 0 )
testUserProfileDB()<|fim▁end|> | |
<|file_name|>ServerRequestsTest.java<|end_file_name|><|fim▁begin|>package com.opentrain.app;
import android.test.InstrumentationTestCase;
import com.opentrain.app.network.NetowrkManager;
import java.util.concurrent.CountDownLatch;
/**
* Created by noam on 27/07/15.
*/
public class ServerRequestsTest extends InstrumentationTestCase {
CountDownLatch countDownLatch = new CountDownLatch(1);
public void test1GetMapFromServer() throws Throwable {
NetowrkManager.getInstance().getMapFromServer(new NetowrkManager.RequestListener() {
@Override
public void onResponse(Object response) {
assertNotNull(response);
countDownLatch.countDown();
}
@Override
public void onError() {
fail();
countDownLatch.countDown();
}
});
countDownLatch.await();
}
// public void test2AddMappingToServer() throws Throwable {
//
// Set<String> bssids = new HashSet<>();
// bssids.add("b4:c7:99:0b:aa:c1");<|fim▁hole|>// Station station = new Station(bssids, System.currentTimeMillis());
//
// NetowrkManager.getInstance().addMappingToServer(station.getPostParam(stationName), new NetowrkManager.RequestListener() {
// @Override
// public void onResponse(Object response) {
//
// assertNotNull(response);
// countDownLatch.countDown();
// }
//
// @Override
// public void onError() {
// fail();
// countDownLatch.countDown();
// }
// });
//
// countDownLatch.await();
// }
}<|fim▁end|> | // bssids.add("b4:c7:99:0b:d4:90");
// String stationName = "StationNameTest";
// |
<|file_name|>CharGen.py<|end_file_name|><|fim▁begin|># -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003-2005 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Character Generation
###################################################
import GemRB
from GUIDefines import *
from ie_stats import *
from ie_spells import LS_MEMO
import GUICommon
import Spellbook
import CommonTables
import LUSkillsSelection
CharGenWindow = 0
CharGenState = 0
TextArea = 0
PortraitButton = 0
AcceptButton = 0
GenderButton = 0
GenderWindow = 0
GenderTextArea = 0
GenderDoneButton = 0
Portrait = 0
PortraitsTable = 0
PortraitPortraitButton = 0
RaceButton = 0
RaceWindow = 0
RaceTextArea = 0
RaceDoneButton = 0
ClassButton = 0
ClassWindow = 0
ClassTextArea = 0
ClassDoneButton = 0
ClassMultiWindow = 0
ClassMultiTextArea = 0
ClassMultiDoneButton = 0
KitTable = 0
KitWindow = 0
KitTextArea = 0
KitDoneButton = 0
AlignmentButton = 0
AlignmentWindow = 0
AlignmentTextArea = 0
AlignmentDoneButton = 0
AbilitiesButton = 0
AbilitiesWindow = 0
AbilitiesTable = 0
AbilitiesRaceAddTable = 0
AbilitiesRaceReqTable = 0
AbilitiesClassReqTable = 0
AbilitiesMinimum = 0
AbilitiesMaximum = 0
AbilitiesModifier = 0
AbilitiesTextArea = 0
AbilitiesRecallButton = 0
AbilitiesDoneButton = 0
SkillsButton = 0
SkillsWindow = 0
SkillsTable = 0
SkillsTextArea = 0
SkillsDoneButton = 0
SkillsPointsLeft = 0
SkillsState = 0
RacialEnemyButton = 0
RacialEnemyWindow = 0
RacialEnemyTable = 0
RacialEnemyTextArea = 0
RacialEnemyDoneButton = 0
ProficienciesWindow = 0
ProficienciesTable = 0
ProfsMaxTable = 0
ProficienciesTextArea = 0
ProficienciesDoneButton = 0
ProficienciesPointsLeft = 0
MageSpellsWindow = 0
MageSpellsTextArea = 0
MageSpellsDoneButton = 0
MageSpellsSelectPointsLeft = 0
MageMemorizeWindow = 0
MageMemorizeTextArea = 0
MageMemorizeDoneButton = 0
MageMemorizePointsLeft = 0
PriestMemorizeWindow = 0
PriestMemorizeTextArea = 0
PriestMemorizeDoneButton = 0
PriestMemorizePointsLeft = 0
AppearanceButton = 0
AppearanceWindow = 0
AppearanceTable = 0
AppearanceAvatarButton = 0
AppearanceHairButton = 0
AppearanceSkinButton = 0
AppearanceMajorButton = 0
AppearanceMinorButton = 0
HairColor = 0
SkinColor = 0
MajorColor = 0
MinorColor = 0
CharSoundWindow = 0
CharSoundTable = 0
CharSoundStrings = 0
BiographyButton = 0
BiographyWindow = 0
BiographyField = 0
NameButton = 0
NameWindow = 0
NameField = 0
NameDoneButton = 0
SoundIndex = 0
VerbalConstants = None
HasStrExtra = 0
MyChar = 0
ImportedChar = 0
def OnLoad():
global CharGenWindow, CharGenState, TextArea, PortraitButton, AcceptButton
global GenderButton, RaceButton, ClassButton, AlignmentButton
global AbilitiesButton, SkillsButton, AppearanceButton, BiographyButton, NameButton
global KitTable, ProficienciesTable, RacialEnemyTable
global AbilitiesTable, SkillsTable, PortraitsTable
global MyChar, ImportedChar
KitTable = GemRB.LoadTable ("magesch")
ProficienciesTable = GemRB.LoadTable ("weapprof")
RacialEnemyTable = GemRB.LoadTable ("haterace")
AbilitiesTable = GemRB.LoadTable ("ability")
SkillsTable = GemRB.LoadTable ("skills")
PortraitsTable = GemRB.LoadTable ("pictures")
GemRB.LoadWindowPack ("GUICG", 640, 480)
CharGenWindow = GemRB.LoadWindow (0)
CharGenWindow.SetFrame ()
CharGenState = 0
MyChar = GemRB.GetVar ("Slot")
ImportedChar = 0
GenderButton = CharGenWindow.GetControl (0)
GenderButton.SetState (IE_GUI_BUTTON_ENABLED)
GenderButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
GenderButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, GenderPress)
GenderButton.SetText (11956)
RaceButton = CharGenWindow.GetControl (1)
RaceButton.SetState (IE_GUI_BUTTON_DISABLED)
RaceButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, RacePress)
RaceButton.SetText (11957)
ClassButton = CharGenWindow.GetControl (2)
ClassButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassPress)
ClassButton.SetText (11959)
AlignmentButton = CharGenWindow.GetControl (3)
AlignmentButton.SetState (IE_GUI_BUTTON_DISABLED)
AlignmentButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AlignmentPress)
AlignmentButton.SetText (11958)
AbilitiesButton = CharGenWindow.GetControl (4)
AbilitiesButton.SetState (IE_GUI_BUTTON_DISABLED)
AbilitiesButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesPress)
AbilitiesButton.SetText (11960)
SkillsButton = CharGenWindow.GetControl (5)
SkillsButton.SetState (IE_GUI_BUTTON_DISABLED)
SkillsButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, SkillsPress)
SkillsButton.SetText (11983)
AppearanceButton = CharGenWindow.GetControl (6)
AppearanceButton.SetState (IE_GUI_BUTTON_DISABLED)
AppearanceButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearancePress)
AppearanceButton.SetText (11961)
BiographyButton = CharGenWindow.GetControl (16)
BiographyButton.SetState (IE_GUI_BUTTON_DISABLED)
BiographyButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BiographyPress)
BiographyButton.SetText (18003)
NameButton = CharGenWindow.GetControl (7)
NameButton.SetState (IE_GUI_BUTTON_DISABLED)
NameButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, NamePress)
NameButton.SetText (11963)
BackButton = CharGenWindow.GetControl (11)
BackButton.SetState (IE_GUI_BUTTON_ENABLED)
BackButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BackPress)
BackButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
PortraitButton = CharGenWindow.GetControl (12)
PortraitButton.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_NO_IMAGE, OP_SET)
PortraitButton.SetState (IE_GUI_BUTTON_LOCKED)
ImportButton = CharGenWindow.GetControl (13)
ImportButton.SetState (IE_GUI_BUTTON_ENABLED)
ImportButton.SetText (13955)
ImportButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ImportPress)
CancelButton = CharGenWindow.GetControl (15)
CancelButton.SetState (IE_GUI_BUTTON_ENABLED)
CancelButton.SetText (13727)
CancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CancelPress)
AcceptButton = CharGenWindow.GetControl (8)
AcceptButton.SetState (IE_GUI_BUTTON_DISABLED)
AcceptButton.SetText (11962)
AcceptButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AcceptPress)
TextArea = CharGenWindow.GetControl (9)
TextArea.SetText (16575)
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def BackPress():
global CharGenWindow, CharGenState, SkillsState
global GenderButton, RaceButton, ClassButton, AlignmentButton, AbilitiesButton, SkillsButton, AppearanceButton, BiographyButton, NameButton
if CharGenState > 0:
CharGenState = CharGenState - 1
else:
CancelPress()
return
if CharGenState > 6:
CharGenState = 6
GemRB.SetToken ("CHARNAME","")
if CharGenState == 0:
RaceButton.SetState (IE_GUI_BUTTON_DISABLED)
RaceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
GenderButton.SetState (IE_GUI_BUTTON_ENABLED)
GenderButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
elif CharGenState == 1:
ClassButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
RaceButton.SetState (IE_GUI_BUTTON_ENABLED)
RaceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
elif CharGenState == 2:
AlignmentButton.SetState (IE_GUI_BUTTON_DISABLED)
AlignmentButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
ClassButton.SetState (IE_GUI_BUTTON_ENABLED)
ClassButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
elif CharGenState == 3:
AbilitiesButton.SetState (IE_GUI_BUTTON_DISABLED)
AbilitiesButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AlignmentButton.SetState (IE_GUI_BUTTON_ENABLED)
AlignmentButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
elif CharGenState == 4:
SkillsButton.SetState (IE_GUI_BUTTON_DISABLED)
SkillsButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AbilitiesButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
elif CharGenState == 5:
AppearanceButton.SetState (IE_GUI_BUTTON_DISABLED)
SkillsButton.SetState (IE_GUI_BUTTON_ENABLED)
SkillsButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
SkillsState = 0
elif CharGenState == 6:
NameButton.SetState (IE_GUI_BUTTON_DISABLED)
BiographyButton.SetState (IE_GUI_BUTTON_DISABLED)
AppearanceButton.SetState (IE_GUI_BUTTON_ENABLED)
AcceptButton.SetState (IE_GUI_BUTTON_DISABLED)
SetCharacterDescription()
return
def CancelPress():
global CharGenWindow
if CharGenWindow:
CharGenWindow.Unload ()
GemRB.CreatePlayer ("", MyChar | 0x8000 )
GemRB.SetNextScript ("PartyFormation")
return
def AcceptPress():
#mage spells
Kit = GemRB.GetPlayerStat (MyChar, IE_KIT)
KitIndex = KitTable.FindValue (3, Kit)
ClassName = GUICommon.GetClassRowName (MyChar)
t = GemRB.GetPlayerStat (MyChar, IE_ALIGNMENT)
TableName = CommonTables.ClassSkills.GetValue (ClassName, "MAGESPELL", GTV_STR)
if TableName != "*":
#todo: set up ALL spell levels not just level 1
Spellbook.SetupSpellLevels (MyChar, TableName, IE_SPELL_TYPE_WIZARD, 1)
Learnable = Spellbook.GetLearnableMageSpells (KitIndex, t, 1)
SpellBook = GemRB.GetVar ("MageSpellBook")
MemoBook = GemRB.GetVar ("MageMemorized")
j=1
for i in range (len(Learnable) ):
if SpellBook & j:
if MemoBook & j:
memorize = LS_MEMO
else:
memorize = 0
GemRB.LearnSpell (MyChar, Learnable[i], memorize)
j=j<<1
#priest spells
TableName = CommonTables.ClassSkills.GetValue (ClassName, "CLERICSPELL", GTV_STR)
# druids and rangers have a column of their own
if TableName == "*":
TableName = CommonTables.ClassSkills.GetValue (ClassName, "DRUIDSPELL", GTV_STR)
if TableName != "*":
if TableName == "MXSPLPRS" or TableName == "MXSPLPAL":
ClassFlag = 0x8000
elif TableName == "MXSPLDRU":
#there is no separate druid table, falling back to priest
TableName = "MXSPLPRS"
ClassFlag = 0x4000
elif TableName == "MXSPLRAN":
ClassFlag = 0x4000
else:
ClassFlag = 0
Spellbook.SetupSpellLevels (MyChar, TableName, IE_SPELL_TYPE_PRIEST, 1)
Learnable = Spellbook.GetLearnablePriestSpells (ClassFlag, t, 1)
PriestMemorized = GemRB.GetVar ("PriestMemorized")
j = 1
while (PriestMemorized and PriestMemorized != 1<<(j-1)):
j = j + 1
for i in range (len(Learnable) ):
GemRB.LearnSpell (MyChar, Learnable[i], 0)
GemRB.MemorizeSpell (MyChar, IE_SPELL_TYPE_PRIEST, 0, j, 1)
# apply class/kit abilities
GUICommon.ResolveClassAbilities (MyChar, ClassName)
# save all the skills
LUSkillsSelection.SkillsSave (MyChar)
TmpTable = GemRB.LoadTable ("repstart")
t = CommonTables.Aligns.FindValue (3, t)
t = TmpTable.GetValue (t, 0) * 10
GemRB.SetPlayerStat (MyChar, IE_REPUTATION, t)
# set the party rep if this in the main char
if MyChar == 1:
GemRB.GameSetReputation (t)
print "Reputation", t
TmpTable = GemRB.LoadTable ("strtgold")
a = TmpTable.GetValue (ClassName, "ROLLS") #number of dice
b = TmpTable.GetValue (ClassName, "SIDES") #size
c = TmpTable.GetValue (ClassName, "MODIFIER") #adjustment
d = TmpTable.GetValue (ClassName, "MULTIPLIER") #external multiplier
e = TmpTable.GetValue (ClassName, "BONUS_PER_LEVEL") #level bonus rate (iwd only!)
t = GemRB.GetPlayerStat (MyChar, IE_LEVEL)
if t>1:
e=e*(t-1)
else:
e=0
t = GemRB.Roll (a,b,c)*d+e
GemRB.SetPlayerStat (MyChar, IE_GOLD, t)
GemRB.SetPlayerStat (MyChar, IE_EA, 2 )
GemRB.SetPlayerName (MyChar, GemRB.GetToken ("CHARNAME"), 0)
GemRB.SetToken ("CHARNAME","")
# don't reset imported char's xp back to start
if not ImportedChar:
GemRB.SetPlayerStat (MyChar, IE_XP, CommonTables.ClassSkills.GetValue (ClassName, "STARTXP"))
GUICommon.SetColorStat (MyChar, IE_SKIN_COLOR, GemRB.GetVar ("SkinColor") )
GUICommon.SetColorStat (MyChar, IE_HAIR_COLOR, GemRB.GetVar ("HairColor") )
GUICommon.SetColorStat (MyChar, IE_MAJOR_COLOR, GemRB.GetVar ("MajorColor") )
GUICommon.SetColorStat (MyChar, IE_MINOR_COLOR, GemRB.GetVar ("MinorColor") )
GUICommon.SetColorStat (MyChar, IE_METAL_COLOR, 0x1B )
GUICommon.SetColorStat (MyChar, IE_LEATHER_COLOR, 0x16 )
GUICommon.SetColorStat (MyChar, IE_ARMOR_COLOR, 0x17 )
#does all the rest
LargePortrait = GemRB.GetToken ("LargePortrait")
SmallPortrait = GemRB.GetToken ("SmallPortrait")
GemRB.FillPlayerInfo (MyChar, LargePortrait, SmallPortrait)
#10 is a weapon slot (see slottype.2da row 10)
GemRB.CreateItem (MyChar, "staf01", 10, 1, 0, 0)
GemRB.SetEquippedQuickSlot (MyChar, 0)
if CharGenWindow:
CharGenWindow.Unload ()
GemRB.SetNextScript ("PartyFormation")
return
def SetCharacterDescription():
global CharGenWindow, TextArea, CharGenState, ClassFlag
global MyChar
TextArea.Clear()
if CharGenState > 7:
TextArea.Append (1047)
TextArea.Append (": ")
TextArea.Append (GemRB.GetToken ("CHARNAME"))
TextArea.Append ("\n")
if CharGenState > 0:
TextArea.Append (12135)
TextArea.Append (": ")
if GemRB.GetPlayerStat (MyChar, IE_SEX) == 1:
TextArea.Append (1050)
else:
TextArea.Append (1051)
TextArea.Append ("\n")
if CharGenState > 2:
ClassName = GUICommon.GetClassRowName (MyChar)
TextArea.Append (12136)
TextArea.Append (": ")
#this is only mage school in iwd
Kit = GemRB.GetPlayerStat (MyChar, IE_KIT)
KitIndex = KitTable.FindValue (3, Kit)
if KitIndex <= 0:
ClassTitle = CommonTables.Classes.GetValue (ClassName, "CAP_REF")
else:
ClassTitle = KitTable.GetValue (KitIndex, 2)
TextArea.Append (ClassTitle)
TextArea.Append ("\n")
if CharGenState > 1:
TextArea.Append (1048)
TextArea.Append (": ")
Race = GemRB.GetPlayerStat (MyChar, IE_RACE)
Race = CommonTables.Races.FindValue (3, GemRB.GetPlayerStat (MyChar, IE_RACE) )
TextArea.Append (CommonTables.Races.GetValue (Race, 2) )
TextArea.Append ("\n")
if CharGenState > 3:
TextArea.Append (1049)
TextArea.Append (": ")
Alignment = CommonTables.Aligns.FindValue (3, GemRB.GetPlayerStat(MyChar, IE_ALIGNMENT))
TextArea.Append (CommonTables.Aligns.GetValue (Alignment, 2))
TextArea.Append ("\n")
if CharGenState > 4:
strextra = GemRB.GetPlayerStat (MyChar, IE_STREXTRA)
TextArea.Append ("\n")
for i in range (6):
TextArea.Append (AbilitiesTable.GetValue (i, 2))
TextArea.Append (": " )
StatID = AbilitiesTable.GetValue (i, 3)
stat = GemRB.GetPlayerStat (MyChar, StatID)
if (i == 0) and HasStrExtra and (stat==18):
TextArea.Append (str(stat) + "/" + str(strextra) )
else:
TextArea.Append (str(stat) )
TextArea.Append ("\n")
if CharGenState > 5:
DruidSpell = CommonTables.ClassSkills.GetValue (ClassName, "DRUIDSPELL")
PriestSpell = CommonTables.ClassSkills.GetValue (ClassName, "CLERICSPELL")
MageSpell = CommonTables.ClassSkills.GetValue (ClassName, "MAGESPELL")
IsBard = CommonTables.ClassSkills.GetValue (ClassName, "BARDSKILL")
IsThief = CommonTables.ClassSkills.GetValue (ClassName, "THIEFSKILL")
if IsThief!="*":
TextArea.Append ("\n")
TextArea.Append (8442)
TextArea.Append ("\n")
for i in range (4):
TextArea.Append (SkillsTable.GetValue (i+2, 2))
StatID = SkillsTable.GetValue (i+2, 3)
TextArea.Append (": " )
TextArea.Append (str(GemRB.GetPlayerStat (MyChar, StatID)) )
TextArea.Append ("%\n")
elif DruidSpell!="*":
TextArea.Append ("\n")
TextArea.Append (8442)
TextArea.Append ("\n")
for i in range (4):
StatID = SkillsTable.GetValue (i+2, 3)
Stat = GemRB.GetPlayerStat (MyChar, StatID)
if Stat>0:
TextArea.Append (SkillsTable.GetValue (i+2, 2))
TextArea.Append (": " )
TextArea.Append (str(Stat) )
TextArea.Append ("%\n")
TextArea.Append ("\n")
TextArea.Append (15982)
TextArea.Append (": " )
RacialEnemy = GemRB.GetVar ("RacialEnemyIndex") + GemRB.GetVar ("RacialEnemy") - 1
TextArea.Append (RacialEnemyTable.GetValue (RacialEnemy, 3) )
TextArea.Append ("\n")
elif IsBard!="*":
TextArea.Append ("\n")
TextArea.Append (8442)
TextArea.Append ("\n")
for i in range (4):
StatID = SkillsTable.GetValue (i+2, 3)
Stat = GemRB.GetPlayerStat (MyChar, StatID)
if Stat>0:
TextArea.Append (SkillsTable.GetValue (i+2, 2))
TextArea.Append (": " )
TextArea.Append (str(Stat) )
TextArea.Append ("%\n")
TextArea.Append ("\n")
TextArea.Append (9466)
TextArea.Append ("\n")
for i in range (15):
StatID = ProficienciesTable.GetValue (i, 0)
ProficiencyValue = GemRB.GetPlayerStat (MyChar, StatID )
if ProficiencyValue > 0:
TextArea.Append (ProficienciesTable.GetValue (i, 3))
TextArea.Append (" ")
j = 0
while j < ProficiencyValue:
TextArea.Append ("+")
j = j + 1
TextArea.Append ("\n")
if MageSpell !="*":
TextArea.Append ("\n")
TextArea.Append (11027)
TextArea.Append (":\n")
t = GemRB.GetPlayerStat (MyChar, IE_ALIGNMENT)
Learnable = Spellbook.GetLearnableMageSpells (GemRB.GetPlayerStat (MyChar, IE_KIT), t,1)
MageSpellBook = GemRB.GetVar ("MageSpellBook")
MageMemorized = GemRB.GetVar ("MageMemorized")
for i in range (len(Learnable)):
if (1 << i) & MageSpellBook:
Spell = GemRB.GetSpell (Learnable[i])
TextArea.Append (Spell["SpellName"])
if (1 << i) & MageMemorized:
TextArea.Append (" +")
TextArea.Append ("\n")
if PriestSpell == "*":
PriestSpell = DruidSpell
if PriestSpell!="*":
TextArea.Append ("\n")
TextArea.Append (11028)
TextArea.Append (":\n")
t = GemRB.GetPlayerStat (MyChar, IE_ALIGNMENT)
if PriestSpell == "MXSPLPRS" or PriestSpell == "MXSPLPAL":
ClassFlag = 0x4000
elif PriestSpell == "MXSPLDRU" or PriestSpell == "MXSPLRAN":
ClassFlag = 0x8000
else:
ClassFlag = 0
Learnable = Spellbook.GetLearnablePriestSpells( ClassFlag, t, 1)
PriestMemorized = GemRB.GetVar ("PriestMemorized")
for i in range (len(Learnable)):
if (1 << i) & PriestMemorized:
Spell = GemRB.GetSpell (Learnable[i])
TextArea.Append (Spell["SpellName"])
TextArea.Append (" +\n")
return
# Gender Selection
def GenderPress():
global CharGenWindow, GenderWindow, GenderDoneButton, GenderTextArea
global MyChar
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
GenderWindow = GemRB.LoadWindow (1)
GemRB.SetVar ("Gender", 0)
GemRB.CreatePlayer ("charbase", MyChar | 0x8000 )
MaleButton = GenderWindow.GetControl (2)
MaleButton.SetState (IE_GUI_BUTTON_ENABLED)
MaleButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
MaleButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, MalePress)
FemaleButton = GenderWindow.GetControl (3)
FemaleButton.SetState (IE_GUI_BUTTON_ENABLED)
FemaleButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
FemaleButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, FemalePress)
MaleButton.SetVarAssoc ("Gender", 1)
FemaleButton.SetVarAssoc ("Gender", 2)
GenderTextArea = GenderWindow.GetControl (5)
GenderTextArea.SetText (17236)
GenderDoneButton = GenderWindow.GetControl (0)
GenderDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
GenderDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, GenderDonePress)
GenderDoneButton.SetText (11973)
GenderDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
GenderCancelButton = GenderWindow.GetControl (6)
GenderCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
GenderCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, GenderCancelPress)
GenderCancelButton.SetText (13727)
GenderCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
GenderWindow.SetVisible (WINDOW_VISIBLE)
return
def MalePress():
global GenderWindow, GenderDoneButton, GenderTextArea
GenderTextArea.SetText (13083)
GenderDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def FemalePress():
global GenderWindow, GenderDoneButton, GenderTextArea
GenderTextArea.SetText (13084)
GenderDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def GenderDonePress():
global CharGenWindow, GenderWindow
global MyChar
if GenderWindow:
GenderWindow.Unload ()
Gender = GemRB.GetVar ("Gender")
GemRB.SetPlayerStat (MyChar, IE_SEX, Gender)
CharGenWindow.SetVisible (WINDOW_VISIBLE)
PortraitSelect()
return
def GenderCancelPress():
global CharGenWindow, GenderWindow
global MyChar
GemRB.SetVar ("Gender", 0)
GemRB.SetPlayerStat (MyChar, IE_SEX, 0)
if GenderWindow:
GenderWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def PortraitSelect():
global CharGenWindow, PortraitWindow, Portrait, PortraitPortraitButton
global MyChar
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
PortraitWindow = GemRB.LoadWindow (11)
# this is not the correct one, but I don't know which is
Portrait = 0
PortraitPortraitButton = PortraitWindow.GetControl (1)
PortraitPortraitButton.SetState (IE_GUI_BUTTON_DISABLED)
PortraitPortraitButton.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_NO_IMAGE, OP_SET)
PortraitLeftButton = PortraitWindow.GetControl (2)
PortraitLeftButton.SetState (IE_GUI_BUTTON_ENABLED)
PortraitLeftButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CGPortraitLeftPress)
PortraitLeftButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
PortraitRightButton = PortraitWindow.GetControl (3)
PortraitRightButton.SetState (IE_GUI_BUTTON_ENABLED)
PortraitRightButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CGPortraitRightPress)
PortraitRightButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
PortraitCustomButton = PortraitWindow.GetControl (6)
PortraitCustomButton.SetState (IE_GUI_BUTTON_ENABLED)
PortraitCustomButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, PortraitCustomPress)
PortraitCustomButton.SetText (17545)
PortraitDoneButton = PortraitWindow.GetControl (0)
PortraitDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
PortraitDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CGPortraitDonePress)
PortraitDoneButton.SetText (11973)
PortraitDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
PortraitCancelButton = PortraitWindow.GetControl (5)
PortraitCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
PortraitCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CGPortraitCancelPress)
PortraitCancelButton.SetText (13727)
PortraitCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
while PortraitsTable.GetValue (Portrait, 0) != GemRB.GetPlayerStat (MyChar, IE_SEX):
Portrait = Portrait + 1
PortraitPortraitButton.SetPicture (PortraitsTable.GetRowName (Portrait) + "G")
PortraitWindow.SetVisible (WINDOW_VISIBLE)
return
def CGPortraitLeftPress():
global PortraitWindow, Portrait, PortraitPortraitButton
global MyChar
while True:
Portrait = Portrait - 1
if Portrait < 0:
Portrait = PortraitsTable.GetRowCount () - 1
if PortraitsTable.GetValue (Portrait, 0) == GemRB.GetPlayerStat(MyChar, IE_SEX):
PortraitPortraitButton.SetPicture (PortraitsTable.GetRowName (Portrait) + "G")
return
def CGPortraitRightPress():
global PortraitWindow, Portrait, PortraitPortraitButton
global MyChar
while True:
Portrait = Portrait + 1
if Portrait == PortraitsTable.GetRowCount():
Portrait = 0
if PortraitsTable.GetValue (Portrait, 0) == GemRB.GetPlayerStat(MyChar, IE_SEX):
PortraitPortraitButton.SetPicture (PortraitsTable.GetRowName (Portrait) + "G")
return
def CustomDone():
global CharGenWindow, PortraitWindow
global PortraitButton, GenderButton, RaceButton
global CharGenState, Portrait
Window = CustomWindow
PortraitName = PortraitList2.QueryText ()
GemRB.SetToken ("SmallPortrait", PortraitName)
PortraitName = PortraitList1.QueryText ()
GemRB.SetToken ("LargePortrait", PortraitName)
if Window:
Window.Unload ()
if PortraitWindow:
PortraitWindow.Unload ()
PortraitButton.SetPicture(PortraitName)
GenderButton.SetState (IE_GUI_BUTTON_DISABLED)
GenderButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
RaceButton.SetState (IE_GUI_BUTTON_ENABLED)
RaceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
CharGenState = 1
Portrait = -1
SetCharacterDescription()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def CustomAbort():
if CustomWindow:
CustomWindow.Unload ()
return
def CGLargeCustomPortrait():
Window = CustomWindow
Portrait = PortraitList1.QueryText ()
#small hack
if GemRB.GetVar ("Row1") == RowCount1:
return
Label = Window.GetControl (0x10000007)
Label.SetText (Portrait)
Button = Window.GetControl (6)
if Portrait=="":
Portrait = "NOPORTMD"
Button.SetState (IE_GUI_BUTTON_DISABLED)
else:
if PortraitList2.QueryText ()!="":
Button.SetState (IE_GUI_BUTTON_ENABLED)
Button = Window.GetControl (0)
Button.SetPicture (Portrait, "NOPORTMD")
return
def CGSmallCustomPortrait():
Window = CustomWindow
Portrait = PortraitList2.QueryText ()
#small hack
if GemRB.GetVar ("Row2") == RowCount2:
return
Label = Window.GetControl (0x10000008)
Label.SetText (Portrait)
Button = Window.GetControl (6)
if Portrait=="":
Portrait = "NOPORTSM"
Button.SetState (IE_GUI_BUTTON_DISABLED)
else:
if PortraitList1.QueryText ()!="":
Button.SetState (IE_GUI_BUTTON_ENABLED)
Button = Window.GetControl (1)
Button.SetPicture (Portrait, "NOPORTSM")
return
def PortraitCustomPress():
global PortraitList1, PortraitList2
global RowCount1, RowCount2
global CustomWindow
CustomWindow = Window = GemRB.LoadWindow (18)
PortraitList1 = Window.GetControl (2)
RowCount1 = PortraitList1.ListResources (CHR_PORTRAITS, 1)
PortraitList1.SetEvent (IE_GUI_TEXTAREA_ON_SELECT, CGLargeCustomPortrait)
GemRB.SetVar ("Row1", RowCount1)
PortraitList1.SetVarAssoc ("Row1",RowCount1)
PortraitList2 = Window.GetControl (4)
RowCount2 = PortraitList2.ListResources (CHR_PORTRAITS, 0)
PortraitList2.SetEvent (IE_GUI_TEXTAREA_ON_SELECT, CGSmallCustomPortrait)
GemRB.SetVar ("Row2", RowCount2)
PortraitList2.SetVarAssoc ("Row2",RowCount2)
Button = Window.GetControl (6)
Button.SetText (11973)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, CustomDone)
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button = Window.GetControl (7)
Button.SetText (13727)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, CustomAbort)
Button = Window.GetControl (0)
PortraitName = PortraitsTable.GetRowName (Portrait)+"L"
Button.SetPicture (PortraitName, "NOPORTMD")
Button.SetState (IE_GUI_BUTTON_LOCKED)
Button = Window.GetControl (1)
PortraitName = PortraitsTable.GetRowName (Portrait)+"S"
Button.SetPicture (PortraitName, "NOPORTSM")
Button.SetState (IE_GUI_BUTTON_LOCKED)
Window.ShowModal (MODAL_SHADOW_NONE)
return
def CGPortraitDonePress():
global CharGenWindow, PortraitWindow, PortraitButton, GenderButton, RaceButton
global CharGenState, Portrait
PortraitName = PortraitsTable.GetRowName (Portrait )
GemRB.SetToken ("SmallPortrait", PortraitName+"S")
GemRB.SetToken ("LargePortrait", PortraitName+"L")
PortraitButton.SetPicture(PortraitsTable.GetRowName (Portrait) + "L")
GenderButton.SetState (IE_GUI_BUTTON_DISABLED)
GenderButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
RaceButton.SetState (IE_GUI_BUTTON_ENABLED)
RaceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
CharGenState = 1
SetCharacterDescription()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
if PortraitWindow:
PortraitWindow.Unload ()
return
def CGPortraitCancelPress():
global CharGenWindow, PortraitWindow
if PortraitWindow:
PortraitWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Race Selection
def RacePress():
global CharGenWindow, RaceWindow, RaceDoneButton, RaceTextArea
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
RaceWindow = GemRB.LoadWindow (8)
GemRB.SetVar ("Race", 0)
for i in range (2, 8):
RaceSelectButton = RaceWindow.GetControl (i)
RaceSelectButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
for i in range (2, 8):
RaceSelectButton = RaceWindow.GetControl (i)
RaceSelectButton.SetState (IE_GUI_BUTTON_ENABLED)
RaceSelectButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, RaceSelectPress)
RaceSelectButton.SetText (CommonTables.Races.GetValue (i - 2, 0))
RaceSelectButton.SetVarAssoc ("Race", i - 1)
RaceTextArea = RaceWindow.GetControl (8)
RaceTextArea.SetText (17237)
RaceDoneButton = RaceWindow.GetControl (0)
RaceDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
RaceDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, RaceDonePress)
RaceDoneButton.SetText (11973)
RaceDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
RaceCancelButton = RaceWindow.GetControl (10)
RaceCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
RaceCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, RaceCancelPress)
RaceCancelButton.SetText (13727)
RaceCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
RaceWindow.SetVisible (WINDOW_VISIBLE)
return
def RaceSelectPress():
global RaceWindow, RaceDoneButton, RaceTextArea
Race = GemRB.GetVar ("Race") - 1
RaceTextArea.SetText (CommonTables.Races.GetValue (Race, 1) )
RaceDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def RaceDonePress():
global CharGenWindow, CharGenState, RaceWindow, RaceButton, ClassButton
if RaceWindow:
RaceWindow.Unload ()
RaceButton.SetState (IE_GUI_BUTTON_DISABLED)
RaceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
ClassButton.SetState (IE_GUI_BUTTON_ENABLED)
ClassButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
CharGenState = 2
Race = GemRB.GetVar ("Race")-1
Race = CommonTables.Races.GetValue (Race, 3)
GemRB.SetPlayerStat (MyChar, IE_RACE, Race)
SetCharacterDescription()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def RaceCancelPress():
global CharGenWindow, RaceWindow
if RaceWindow:
RaceWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Class Selection
def ClassPress():
global CharGenWindow, ClassWindow, ClassTextArea, ClassDoneButton
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
ClassWindow = GemRB.LoadWindow (2)
ClassCount = CommonTables.Classes.GetRowCount ()
RaceRow = CommonTables.Races.FindValue (3, GemRB.GetPlayerStat (MyChar, IE_RACE) )
RaceName = CommonTables.Races.GetRowName (RaceRow)
GemRB.SetVar ("Class", 0)
GemRB.SetVar ("Class Kit", 0)
GemRB.SetVar ("MAGESCHOOL", 0)
for i in range (2, 10):
ClassSelectButton = ClassWindow.GetControl (i)
ClassSelectButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
HasMulti = 0
j = 2
for i in range (ClassCount):
ClassRowName = CommonTables.Classes.GetRowName (i)
Allowed = CommonTables.Classes.GetValue (ClassRowName, RaceName)
if CommonTables.Classes.GetValue (ClassRowName, "MULTI"):
if Allowed != 0:
HasMulti = 1
else:
ClassSelectButton = ClassWindow.GetControl (j)
j = j + 1
if Allowed > 0:
ClassSelectButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
ClassSelectButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassSelectButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassSelectPress)
ClassSelectButton.SetText (CommonTables.Classes.GetValue (ClassRowName, "NAME_REF"))
ClassSelectButton.SetVarAssoc ("Class", i + 1)
ClassMultiButton = ClassWindow.GetControl (10)
if HasMulti == 0:
ClassMultiButton.SetState (IE_GUI_BUTTON_DISABLED)
else:
ClassMultiButton.SetState (IE_GUI_BUTTON_ENABLED)
ClassMultiButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassMultiPress)
ClassMultiButton.SetText (11993)
KitButton = ClassWindow.GetControl (11)
#only the mage class has schools
Allowed = CommonTables.Classes.GetValue ("MAGE", RaceName)
if Allowed:
KitButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
KitButton.SetState (IE_GUI_BUTTON_DISABLED)
KitButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, KitPress)
KitButton.SetText (11994)
ClassTextArea = ClassWindow.GetControl (13)
ClassTextArea.SetText (17242)
ClassDoneButton = ClassWindow.GetControl (0)
ClassDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassDonePress)
ClassDoneButton.SetText (11973)
ClassDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
ClassCancelButton = ClassWindow.GetControl (14)
ClassCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
ClassCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassCancelPress)
ClassCancelButton.SetText (13727)
ClassCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
ClassWindow.SetVisible (WINDOW_VISIBLE)
return
def ClassSelectPress():
global ClassWindow, ClassTextArea, ClassDoneButton
ClassName = GUICommon.GetClassRowName (GemRB.GetVar ("Class")-1, "index")
ClassTextArea.SetText (CommonTables.Classes.GetValue (ClassName, "DESC_REF"))
ClassDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def ClassMultiPress():
global ClassWindow, ClassMultiWindow, ClassMultiTextArea, ClassMultiDoneButton
ClassWindow.SetVisible (WINDOW_INVISIBLE)
ClassMultiWindow = GemRB.LoadWindow (10)
ClassCount = CommonTables.Classes.GetRowCount ()
RaceRow = CommonTables.Races.FindValue (3, GemRB.GetPlayerStat (MyChar, IE_RACE) )
RaceName = CommonTables.Races.GetRowName (RaceRow)
print "Multi racename:", RaceName
for i in range (2, 10):
ClassMultiSelectButton = ClassMultiWindow.GetControl (i)
ClassMultiSelectButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
j = 2
for i in range (ClassCount):
ClassName = CommonTables.Classes.GetRowName (i)
if (CommonTables.Classes.GetValue (ClassName, "MULTI") > 0):
ClassMultiSelectButton = ClassMultiWindow.GetControl (j)
j = j + 1
if (CommonTables.Classes.GetValue (ClassName, RaceName) > 0):
ClassMultiSelectButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
ClassMultiSelectButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassMultiSelectButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassMultiSelectPress)
ClassMultiSelectButton.SetText (CommonTables.Classes.GetValue (ClassName, "NAME_REF"))
ClassMultiSelectButton.SetVarAssoc ("Class", i + 1)
ClassMultiTextArea = ClassMultiWindow.GetControl (12)
ClassMultiTextArea.SetText (17244)
ClassMultiDoneButton = ClassMultiWindow.GetControl (0)
ClassMultiDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassMultiDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassMultiDonePress)
ClassMultiDoneButton.SetText (11973)
ClassMultiDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
ClassMultiCancelButton = ClassMultiWindow.GetControl (14)
ClassMultiCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
ClassMultiCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ClassMultiCancelPress)
ClassMultiCancelButton.SetText (13727)
ClassMultiCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
ClassMultiWindow.SetVisible (WINDOW_VISIBLE)
return
def ClassMultiSelectPress():
global ClassMultiWindow, ClassMultiTextArea, ClassMultiDoneButton
ClassName = GUICommon.GetClassRowName (GemRB.GetVar ("Class")-1, "index")
ClassMultiTextArea.SetText (CommonTables.Classes.GetValue (ClassName, "DESC_REF"))
ClassMultiDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def ClassMultiDonePress():
global ClassMultiWindow
if ClassMultiWindow:
ClassMultiWindow.Unload ()
ClassDonePress()
return
def ClassMultiCancelPress():
global ClassWindow, ClassMultiWindow
if ClassMultiWindow:
ClassMultiWindow.Unload ()
ClassWindow.SetVisible (WINDOW_VISIBLE)
return
def KitPress():
global ClassWindow, KitWindow, KitTextArea, KitDoneButton
ClassWindow.SetVisible (WINDOW_INVISIBLE)
KitWindow = GemRB.LoadWindow (12)
#only mage class (1) has schools. It is the sixth button
GemRB.SetVar ("Class", 6)
GemRB.SetVar ("Class Kit",0)
GemRB.SetVar ("MAGESCHOOL",0)
for i in range (8):
Button = KitWindow.GetControl (i+2)
Button.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
Button.SetText (KitTable.GetValue (i+1, 0) )
Button.SetVarAssoc ("MAGESCHOOL", i+1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, KitSelectPress)
KitTextArea = KitWindow.GetControl (11)
KitTextArea.SetText (17245)
KitDoneButton = KitWindow.GetControl (0)
KitDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
KitDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, KitDonePress)
KitDoneButton.SetText (11973)
KitDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
KitCancelButton = KitWindow.GetControl (12)
KitCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
KitCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, KitCancelPress)
KitCancelButton.SetText (13727)
KitCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
KitWindow.SetVisible (WINDOW_VISIBLE)
return
def KitSelectPress():
global KitWindow, KitTextArea
Kit = GemRB.GetVar ("MAGESCHOOL")
KitTextArea.SetText (KitTable.GetValue (Kit, 1))
KitDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def KitDonePress():
global KitWindow
if KitWindow:
KitWindow.Unload ()
ClassDonePress()
return
def KitCancelPress():
global ClassWindow, KitWindow
if KitWindow:
KitWindow.Unload ()
ClassWindow.SetVisible (WINDOW_VISIBLE)
return
def ClassDonePress():
global CharGenWindow, CharGenState, ClassWindow, ClassButton, AlignmentButton
global MyChar
if ClassWindow:
ClassWindow.Unload ()
ClassButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AlignmentButton.SetState (IE_GUI_BUTTON_ENABLED)
AlignmentButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
ClassName = GUICommon.GetClassRowName (GemRB.GetVar ("Class")-1, "index")
Class = CommonTables.Classes.GetValue (ClassName, "ID")
GemRB.SetPlayerStat (MyChar, IE_CLASS, Class)
Kit = KitTable.GetValue (GemRB.GetVar ("MAGESCHOOL"), 3 )
if (Kit == -1 ):
Kit = 0x4000
GemRB.SetPlayerStat (MyChar, IE_KIT, Kit)
CharGenState = 3
SetCharacterDescription()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def ClassCancelPress():
global CharGenWindow, ClassWindow
if ClassWindow:
ClassWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Alignment Selection
def AlignmentPress():
global CharGenWindow, AlignmentWindow, AlignmentTextArea, AlignmentDoneButton
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
AlignmentWindow = GemRB.LoadWindow (3)
ClassAlignmentTable = GemRB.LoadTable ("alignmnt")
ClassName = GUICommon.GetClassRowName (MyChar)
GemRB.SetVar ("Alignment", 0)
for i in range (2, 11):
AlignmentSelectButton = AlignmentWindow.GetControl (i)
AlignmentSelectButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
for i in range (9):
AlignmentSelectButton = AlignmentWindow.GetControl (i + 2)
if ClassAlignmentTable.GetValue (ClassName, CommonTables.Aligns.GetValue(i, 4)) == 0:
AlignmentSelectButton.SetState (IE_GUI_BUTTON_DISABLED)
else:
AlignmentSelectButton.SetState (IE_GUI_BUTTON_ENABLED)
AlignmentSelectButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AlignmentSelectPress)
AlignmentSelectButton.SetText (CommonTables.Aligns.GetValue (i, 0))
AlignmentSelectButton.SetVarAssoc ("Alignment", i + 1)
AlignmentTextArea = AlignmentWindow.GetControl (11)
AlignmentTextArea.SetText (9602)
AlignmentDoneButton = AlignmentWindow.GetControl (0)
AlignmentDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
AlignmentDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AlignmentDonePress)
AlignmentDoneButton.SetText (11973)
AlignmentDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
AlignmentCancelButton = AlignmentWindow.GetControl (13)
AlignmentCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
AlignmentCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AlignmentCancelPress)
AlignmentCancelButton.SetText (13727)
AlignmentCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
AlignmentWindow.SetVisible (WINDOW_VISIBLE)
return
def AlignmentSelectPress():
global AlignmentWindow, AlignmentTextArea, AlignmentDoneButton
Alignment = GemRB.GetVar ("Alignment") - 1
AlignmentTextArea.SetText (CommonTables.Aligns.GetValue (Alignment, 1))
AlignmentDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def AlignmentDonePress():
global CharGenWindow, CharGenState, AlignmentWindow, AlignmentButton, AbilitiesButton
global MyChar
if AlignmentWindow:
AlignmentWindow.Unload ()
AlignmentButton.SetState (IE_GUI_BUTTON_DISABLED)
AlignmentButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AbilitiesButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
Alignment = CommonTables.Aligns.GetValue (GemRB.GetVar ("Alignment")-1, 3)
GemRB.SetPlayerStat (MyChar, IE_ALIGNMENT, Alignment )
CharGenState = 4
SetCharacterDescription()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def AlignmentCancelPress():
global CharGenWindow, AlignmentWindow
if AlignmentWindow:
AlignmentWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Abilities Selection
def AbilitiesPress():
global CharGenWindow, AbilitiesWindow
global AbilitiesTextArea, AbilitiesRecallButton, AbilitiesDoneButton
global AbilitiesRaceAddTable, AbilitiesRaceReqTable, AbilitiesClassReqTable
global HasStrExtra
GemRB.SetRepeatClickFlags(GEM_RK_DISABLE, OP_NAND)
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
AbilitiesWindow = GemRB.LoadWindow (4)
AbilitiesRaceAddTable = GemRB.LoadTable ("ABRACEAD")
AbilitiesRaceReqTable = GemRB.LoadTable ("ABRACERQ")
AbilitiesClassReqTable = GemRB.LoadTable ("ABCLASRQ")
PointsLeftLabel = AbilitiesWindow.GetControl (0x10000002)
PointsLeftLabel.SetUseRGB (1)
ClassName = GUICommon.GetClassRowName (MyChar)
HasStrExtra = CommonTables.Classes.GetValue (ClassName, "SAVE") == "SAVEWAR"
for i in range (6):
AbilitiesLabelButton = AbilitiesWindow.GetControl (30 + i)
AbilitiesLabelButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesLabelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesLabelPress)
AbilitiesLabelButton.SetVarAssoc ("AbilityIndex", i + 1)
AbilitiesPlusButton = AbilitiesWindow.GetControl (16 + i * 2)
AbilitiesPlusButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesPlusButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesPlusPress)
AbilitiesPlusButton.SetVarAssoc ("AbilityIndex", i + 1)
AbilitiesMinusButton = AbilitiesWindow.GetControl (17 + i * 2)
AbilitiesMinusButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesMinusButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesMinusPress)
AbilitiesMinusButton.SetVarAssoc ("AbilityIndex", i + 1)
AbilityLabel = AbilitiesWindow.GetControl (0x10000003 + i)
AbilityLabel.SetUseRGB (1)
AbilitiesStoreButton = AbilitiesWindow.GetControl (37)
AbilitiesStoreButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesStoreButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesStorePress)
AbilitiesStoreButton.SetText (17373)
AbilitiesRecallButton = AbilitiesWindow.GetControl (38)
AbilitiesRecallButton.SetState (IE_GUI_BUTTON_DISABLED)
AbilitiesRecallButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesRecallPress)
AbilitiesRecallButton.SetText (17374)
AbilitiesRerollButton = AbilitiesWindow.GetControl (2)
AbilitiesRerollButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesRerollButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesRerollPress)
AbilitiesRerollButton.SetText (11982)
AbilitiesTextArea = AbilitiesWindow.GetControl (29)
AbilitiesTextArea.SetText (17247)
AbilitiesDoneButton = AbilitiesWindow.GetControl (0)
AbilitiesDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesDonePress)
AbilitiesDoneButton.SetText (11973)
AbilitiesDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
AbilitiesCancelButton = AbilitiesWindow.GetControl (36)
AbilitiesCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
AbilitiesCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbilitiesCancelPress)
AbilitiesCancelButton.SetText (13727)
AbilitiesCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
AbilitiesRerollPress()
AbilitiesWindow.SetVisible (WINDOW_VISIBLE)
return
def AbilitiesCalcLimits(Index):
global AbilitiesRaceReqTable, AbilitiesRaceAddTable, AbilitiesClassReqTable
global AbilitiesMinimum, AbilitiesMaximum, AbilitiesModifier
Race = CommonTables.Races.FindValue (3, GemRB.GetPlayerStat (MyChar, IE_RACE))
RaceName = CommonTables.Races.GetRowName (Race)
Race = AbilitiesRaceReqTable.GetRowIndex (RaceName)
AbilitiesMinimum = AbilitiesRaceReqTable.GetValue (Race, Index * 2)
AbilitiesMaximum = AbilitiesRaceReqTable.GetValue (Race, Index * 2 + 1)
AbilitiesModifier = AbilitiesRaceAddTable.GetValue (Race, Index)
ClassName = GUICommon.GetClassRowName (MyChar)
ClassIndex = AbilitiesClassReqTable.GetRowIndex (ClassName)
Min = AbilitiesClassReqTable.GetValue (ClassIndex, Index)
if Min > 0 and AbilitiesMinimum < Min:
AbilitiesMinimum = Min
AbilitiesMinimum = AbilitiesMinimum + AbilitiesModifier
AbilitiesMaximum = AbilitiesMaximum + AbilitiesModifier
return
def AbilitiesLabelPress():
global AbilitiesWindow, AbilitiesTextArea
AbilityIndex = GemRB.GetVar ("AbilityIndex") - 1
AbilitiesCalcLimits(AbilityIndex)
GemRB.SetToken ("MINIMUM", str(AbilitiesMinimum) )
GemRB.SetToken ("MAXIMUM", str(AbilitiesMaximum) )
AbilitiesTextArea.SetText (AbilitiesTable.GetValue (AbilityIndex, 1) )
return
def AbilitiesPlusPress():
global AbilitiesWindow, AbilitiesTextArea
global AbilitiesMinimum, AbilitiesMaximum
Abidx = GemRB.GetVar ("AbilityIndex") - 1
AbilitiesCalcLimits(Abidx)
GemRB.SetToken ("MINIMUM", str(AbilitiesMinimum) )
GemRB.SetToken ("MAXIMUM", str(AbilitiesMaximum) )
AbilitiesTextArea.SetText (AbilitiesTable.GetValue (Abidx, 1) )
PointsLeft = GemRB.GetVar ("Ability0")
Ability = GemRB.GetVar ("Ability" + str(Abidx + 1) )
if PointsLeft > 0 and Ability < AbilitiesMaximum:
PointsLeft = PointsLeft - 1
GemRB.SetVar ("Ability0", PointsLeft)
PointsLeftLabel = AbilitiesWindow.GetControl (0x10000002)
PointsLeftLabel.SetText (str(PointsLeft) )
Ability = Ability + 1
GemRB.SetVar ("Ability" + str(Abidx + 1), Ability)
Label = AbilitiesWindow.GetControl (0x10000003 + Abidx)
StrExtra = GemRB.GetVar("StrExtra")
if Abidx==0 and Ability==18 and HasStrExtra:
Label.SetText("18/"+str(StrExtra) )
else:
Label.SetText(str(Ability) )
if PointsLeft == 0:
AbilitiesDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def AbilitiesMinusPress():
global AbilitiesWindow, AbilitiesTextArea
global AbilitiesMinimum, AbilitiesMaximum
Abidx = GemRB.GetVar ("AbilityIndex") - 1
AbilitiesCalcLimits(Abidx)
GemRB.SetToken ("MINIMUM", str(AbilitiesMinimum) )
GemRB.SetToken ("MAXIMUM", str(AbilitiesMaximum) )
AbilitiesTextArea.SetText (AbilitiesTable.GetValue (Abidx, 1) )
PointsLeft = GemRB.GetVar ("Ability0")
Ability = GemRB.GetVar ("Ability" + str(Abidx + 1) )
if Ability > AbilitiesMinimum:
Ability = Ability - 1
GemRB.SetVar ("Ability" + str(Abidx + 1), Ability)
Label = AbilitiesWindow.GetControl (0x10000003 + Abidx)
StrExtra = GemRB.GetVar("StrExtra")
if Abidx==0 and Ability==18 and HasStrExtra:
Label.SetText("18/"+str(StrExtra) )
else:
Label.SetText(str(Ability) )
PointsLeft = PointsLeft + 1
GemRB.SetVar ("Ability0", PointsLeft)
PointsLeftLabel = AbilitiesWindow.GetControl (0x10000002)
PointsLeftLabel.SetText (str(PointsLeft) )
AbilitiesDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
return
def AbilitiesStorePress():
global AbilitiesWindow, AbilitiesRecallButton
GemRB.SetVar("StoredStrExtra", GemRB.GetVar ("StrExtra") )
for i in range (7):
GemRB.SetVar ("Stored" + str(i), GemRB.GetVar ("Ability" + str(i)))
AbilitiesRecallButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def AbilitiesRecallPress():
global AbilitiesWindow
AbilitiesWindow.Invalidate ()
e=GemRB.GetVar("StoredStrExtra")
GemRB.SetVar("StrExtra",e)
for i in range (7):
v = GemRB.GetVar ("Stored" + str(i))
GemRB.SetVar ("Ability" + str(i), v)
Label = AbilitiesWindow.GetControl (0x10000002 + i)
if i==0 and v==18 and HasStrExtra==1:
Label.SetText("18/"+str(e) )
else:
Label.SetText(str(v) )
PointsLeft = GemRB.GetVar("Ability0")
if PointsLeft == 0:
AbilitiesDoneButton.SetState(IE_GUI_BUTTON_ENABLED)
else:
AbilitiesDoneButton.SetState(IE_GUI_BUTTON_DISABLED)
return
def AbilitiesRerollPress():
global AbilitiesWindow, AbilitiesMinimum, AbilitiesMaximum, AbilitiesModifier
AbilitiesWindow.Invalidate ()
GemRB.SetVar ("Ability0", 0)
PointsLeftLabel = AbilitiesWindow.GetControl (0x10000002)
PointsLeftLabel.SetText ("0")
Dices = 3
Sides = 5
#roll strextra even when the current stat is not 18
if HasStrExtra:
e = GemRB.Roll (1,100,0)
else:
e = 0
GemRB.SetVar("StrExtra", e)
for i in range (6):
AbilitiesCalcLimits(i)
Value = GemRB.Roll (Dices, Sides, AbilitiesModifier+3)
if Value < AbilitiesMinimum:
Value = AbilitiesMinimum
if Value > AbilitiesMaximum:
Value = AbilitiesMaximum
GemRB.SetVar ("Ability" + str(i + 1), Value)
Label = AbilitiesWindow.GetControl (0x10000003 + i)
if i==0 and HasStrExtra and Value==18:
Label.SetText("18/"+str(e) )
else:
Label.SetText(str(Value) )
AbilitiesDoneButton.SetState(IE_GUI_BUTTON_ENABLED)
return
def AbilitiesDonePress():
global CharGenWindow, CharGenState, AbilitiesWindow, AbilitiesButton, SkillsButton, SkillsState
if AbilitiesWindow:
AbilitiesWindow.Unload ()
AbilitiesButton.SetState (IE_GUI_BUTTON_DISABLED)
AbilitiesButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
SkillsButton.SetState (IE_GUI_BUTTON_ENABLED)
SkillsButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
Str = GemRB.GetVar ("Ability1")
GemRB.SetPlayerStat (MyChar, IE_STR, Str)
if Str == 18:
GemRB.SetPlayerStat (MyChar, IE_STREXTRA, GemRB.GetVar ("StrExtra"))
else:
GemRB.SetPlayerStat (MyChar, IE_STREXTRA, 0)
GemRB.SetPlayerStat (MyChar, IE_DEX, GemRB.GetVar ("Ability2"))
GemRB.SetPlayerStat (MyChar, IE_CON, GemRB.GetVar ("Ability3"))
GemRB.SetPlayerStat (MyChar, IE_INT, GemRB.GetVar ("Ability4"))
GemRB.SetPlayerStat (MyChar, IE_WIS, GemRB.GetVar ("Ability5"))
GemRB.SetPlayerStat (MyChar, IE_CHR, GemRB.GetVar ("Ability6"))
CharGenState = 5
SkillsState = 0
SetCharacterDescription()
GemRB.SetRepeatClickFlags(GEM_RK_DISABLE, OP_OR)
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def AbilitiesCancelPress():
global CharGenWindow, AbilitiesWindow
if AbilitiesWindow:
AbilitiesWindow.Unload ()
GemRB.SetRepeatClickFlags(GEM_RK_DISABLE, OP_OR)
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Skills Selection
def SkillsPress():
global CharGenWindow, AppearanceButton
global SkillsState, SkillsButton, CharGenState, ClassFlag
Level = 1
SpellLevel = 1
ClassName = GUICommon.GetClassRowName (MyChar)
DruidSpell = CommonTables.ClassSkills.GetValue (ClassName, "DRUIDSPELL")
PriestSpell = CommonTables.ClassSkills.GetValue (ClassName, "CLERICSPELL")
MageSpell = CommonTables.ClassSkills.GetValue (ClassName, "MAGESPELL")
IsBard = CommonTables.ClassSkills.GetValue (ClassName, "BARDSKILL")
IsThief = CommonTables.ClassSkills.GetValue (ClassName, "THIEFSKILL")
if SkillsState == 0:
GemRB.SetVar ("HatedRace", 0)
if IsThief!="*":
SkillsSelect()
elif DruidSpell!="*":
Skill = GemRB.LoadTable("SKILLRNG").GetValue(str(Level), "STEALTH")
GemRB.SetPlayerStat (MyChar, IE_STEALTH, Skill)
RacialEnemySelect()
elif IsBard!="*":
Skill = GemRB.LoadTable(IsBard).GetValue(str(Level), "PICK_POCKETS")
GemRB.SetPlayerStat (MyChar, IE_PICKPOCKET, Skill)
SkillsState = 1
else:
SkillsState = 1
if SkillsState == 1:
ProficienciesSelect()
if SkillsState == 2:
if MageSpell!="*":
MageSpellsSelect(MageSpell, Level, SpellLevel)
else:
SkillsState = 3
if SkillsState == 3:
if MageSpell!="*":
MageSpellsMemorize(MageSpell, Level, SpellLevel)
else:
SkillsState = 4
if SkillsState == 4:
if PriestSpell=="MXSPLPRS" or PriestSpell =="MXSPLPAL":
ClassFlag = 0x4000
PriestSpellsMemorize(PriestSpell, Level, SpellLevel)
elif DruidSpell=="MXSPLDRU" or DruidSpell =="MXSPLRAN":
#no separate spell progression
if DruidSpell == "MXSPLDRU":
DruidSpell = "MXSPLPRS"
ClassFlag = 0x8000
PriestSpellsMemorize(DruidSpell, Level, SpellLevel)
else:
SkillsState = 5
if SkillsState == 5:
SkillsButton.SetState (IE_GUI_BUTTON_DISABLED)
SkillsButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AppearanceButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
Race = GemRB.GetVar ("HatedRace")
GemRB.SetPlayerStat (MyChar, IE_HATEDRACE, Race)
ProfCount = ProficienciesTable.GetRowCount ()
for i in range(ProfCount):
StatID = ProficienciesTable.GetValue (i, 0)
Value = GemRB.GetVar ("Proficiency"+str(i) )
GemRB.SetPlayerStat (MyChar, StatID, Value )
CharGenState = 6
SetCharacterDescription()
return
def SkillsSelect():
global CharGenWindow, SkillsWindow, SkillsTextArea, SkillsDoneButton, SkillsPointsLeft
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
SkillsWindow = GemRB.LoadWindow (6)
Levels = [GemRB.GetPlayerStat (MyChar, IE_LEVEL), \
GemRB.GetPlayerStat (MyChar, IE_LEVEL2), \
GemRB.GetPlayerStat (MyChar, IE_LEVEL3)]
LUSkillsSelection.SetupSkillsWindow (MyChar, \
LUSkillsSelection.LUSKILLS_TYPE_CHARGEN, SkillsWindow, RedrawSkills, [0,0,0], Levels, 0, False)
SkillsPointsLeft = GemRB.GetVar ("SkillPointsLeft")
if SkillsPointsLeft<=0:
SkillsDonePress()
return
SkillsDoneButton = SkillsWindow.GetControl (0)
SkillsDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
SkillsDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, SkillsDonePress)
SkillsDoneButton.SetText (11973)
SkillsDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
SkillsCancelButton = SkillsWindow.GetControl (25)
SkillsCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
SkillsCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, SkillsCancelPress)
SkillsCancelButton.SetText (13727)
SkillsCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
GemRB.SetRepeatClickFlags(GEM_RK_DISABLE, OP_NAND)
RedrawSkills()
SkillsWindow.SetVisible (WINDOW_VISIBLE)
return
def RedrawSkills():
PointsLeft = GemRB.GetVar ("SkillPointsLeft")
if PointsLeft == 0:
SkillsDoneButton.SetState(IE_GUI_BUTTON_ENABLED)
else:
SkillsDoneButton.SetState(IE_GUI_BUTTON_DISABLED)
return
def SkillsDonePress():
global CharGenWindow, SkillsWindow, SkillsState
if SkillsWindow:
SkillsWindow.Unload ()
SkillsState = 1
CharGenWindow.SetVisible (WINDOW_VISIBLE)
SkillsPress()
return
def SkillsCancelPress():
global CharGenWindow, SkillsWindow, SkillsState
if SkillsWindow:
SkillsWindow.Unload ()
SkillsState = 0
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Racial Enemy Selection
def RacialEnemySelect():
global CharGenWindow, RacialEnemyWindow, RacialEnemyTextArea, RacialEnemyDoneButton
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
RacialEnemyWindow = GemRB.LoadWindow (15)
RacialEnemyCount = RacialEnemyTable.GetRowCount ()
for i in range (2, 8):
RacialEnemySelectButton = RacialEnemyWindow.GetControl (i)
RacialEnemySelectButton.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
for i in range (2, 8):
RacialEnemySelectButton = RacialEnemyWindow.GetControl (i)
RacialEnemySelectButton.SetState (IE_GUI_BUTTON_ENABLED)
RacialEnemySelectButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, RacialEnemySelectPress)
RacialEnemySelectButton.SetVarAssoc ("RacialEnemy", i - 1)
GemRB.SetVar ("RacialEnemyIndex", 0)
GemRB.SetVar ("HatedRace", 0)
RacialEnemyScrollBar = RacialEnemyWindow.GetControl (1)
RacialEnemyScrollBar.SetVarAssoc ("RacialEnemyIndex", RacialEnemyCount - 5)
RacialEnemyScrollBar.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, DisplayRacialEnemies)
RacialEnemyTextArea = RacialEnemyWindow.GetControl (8)
RacialEnemyTextArea.SetText (17256)
RacialEnemyDoneButton = RacialEnemyWindow.GetControl (11)
RacialEnemyDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
RacialEnemyDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, RacialEnemyDonePress)
RacialEnemyDoneButton.SetText (11973)
RacialEnemyDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
RacialEnemyCancelButton = RacialEnemyWindow.GetControl (10)
RacialEnemyCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
RacialEnemyCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, RacialEnemyCancelPress)
RacialEnemyCancelButton.SetText (13727)
RacialEnemyCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
DisplayRacialEnemies()
RacialEnemyWindow.SetVisible (WINDOW_VISIBLE)
return
def DisplayRacialEnemies():
global RacialEnemyWindow
RacialEnemyIndex = GemRB.GetVar ("RacialEnemyIndex")
for i in range (2, 8):
RacialEnemySelectButton = RacialEnemyWindow.GetControl (i)
RacialEnemySelectButton.SetText (RacialEnemyTable.GetValue (RacialEnemyIndex + i - 2, 0))
return
def RacialEnemySelectPress():
global RacialEnemyWindow, RacialEnemyDoneButton, RacialEnemyTextArea
RacialEnemy = GemRB.GetVar ("RacialEnemyIndex") + GemRB.GetVar ("RacialEnemy") - 1
RacialEnemyTextArea.SetText (RacialEnemyTable.GetValue (RacialEnemy, 2) )
GemRB.SetVar ("HatedRace", RacialEnemyTable.GetValue (RacialEnemy, 1) )
RacialEnemyDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def RacialEnemyDonePress():
global CharGenWindow, RacialEnemyWindow, SkillsState
if RacialEnemyWindow:
RacialEnemyWindow.Unload ()
SkillsState = 1
CharGenWindow.SetVisible (WINDOW_VISIBLE)
SkillsPress()
return
def RacialEnemyCancelPress():
global CharGenWindow, RacialEnemyWindow, SkillsState
if RacialEnemyWindow:
RacialEnemyWindow.Unload ()
SkillsState = 0
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Weapon Proficiencies Selection
def ProficienciesSelect():
global CharGenWindow, ProficienciesWindow, ProficienciesTextArea
global ProficienciesPointsLeft, ProficienciesDoneButton, ProfsMaxTable
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
ProficienciesWindow = GemRB.LoadWindow (9)
ProfsTable = GemRB.LoadTable ("profs")
ProfsMaxTable = GemRB.LoadTable ("profsmax")
ClassWeaponsTable = GemRB.LoadTable ("clasweap")
ClassName = GUICommon.GetClassRowName (MyChar)
ProficienciesPointsLeft = ProfsTable.GetValue (ClassName, "FIRST_LEVEL")
PointsLeftLabel = ProficienciesWindow.GetControl (0x10000009)
PointsLeftLabel.SetUseRGB (1)
PointsLeftLabel.SetText (str(ProficienciesPointsLeft))
for i in range (8):
ProficienciesLabel = ProficienciesWindow.GetControl (69 + i)
ProficienciesLabel.SetState (IE_GUI_BUTTON_ENABLED)
ProficienciesLabel.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesLabelPress)
ProficienciesLabel.SetVarAssoc ("ProficienciesIndex", i + 1)
for j in range (5):
ProficienciesMark = ProficienciesWindow.GetControl (27 + i * 5 + j)
ProficienciesMark.SetSprites("GUIPFC", 0, 0, 0, 0, 0)
ProficienciesMark.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesMark.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Allowed = ClassWeaponsTable.GetValue (ClassName, ProficienciesTable.GetRowName (i))
ProficienciesPlusButton = ProficienciesWindow.GetControl (11 + i * 2)
if Allowed == 0:
ProficienciesPlusButton.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesPlusButton.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)<|fim▁hole|> ProficienciesPlusButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesPlusPress)
ProficienciesPlusButton.SetVarAssoc ("ProficienciesIndex", i + 1)
ProficienciesMinusButton = ProficienciesWindow.GetControl (12 + i * 2)
if Allowed == 0:
ProficienciesMinusButton.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesMinusButton.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
else:
ProficienciesMinusButton.SetState (IE_GUI_BUTTON_ENABLED)
ProficienciesMinusButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesMinusPress)
ProficienciesMinusButton.SetVarAssoc ("ProficienciesIndex", i + 1)
for i in range (7):
ProficienciesLabel = ProficienciesWindow.GetControl (85 + i)
ProficienciesLabel.SetState (IE_GUI_BUTTON_ENABLED)
ProficienciesLabel.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesLabelPress)
ProficienciesLabel.SetVarAssoc ("ProficienciesIndex", i + 9)
for j in range (5):
ProficienciesMark = ProficienciesWindow.GetControl (92 + i * 5 + j)
ProficienciesMark.SetSprites("GUIPFC", 0, 0, 0, 0, 0)
ProficienciesMark.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesMark.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Allowed = ClassWeaponsTable.GetValue (ClassName, ProficienciesTable.GetRowName (i + 8))
ProficienciesPlusButton = ProficienciesWindow.GetControl (127 + i * 2)
if Allowed == 0:
ProficienciesPlusButton.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesPlusButton.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
else:
ProficienciesPlusButton.SetState (IE_GUI_BUTTON_ENABLED)
ProficienciesPlusButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesPlusPress)
ProficienciesPlusButton.SetVarAssoc ("ProficienciesIndex", i + 9)
ProficienciesMinusButton = ProficienciesWindow.GetControl (128 + i * 2)
if Allowed == 0:
ProficienciesMinusButton.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesMinusButton.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
else:
ProficienciesMinusButton.SetState (IE_GUI_BUTTON_ENABLED)
ProficienciesMinusButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesMinusPress)
ProficienciesMinusButton.SetVarAssoc ("ProficienciesIndex", i + 9)
for i in range (15):
GemRB.SetVar ("Proficiency" + str(i), 0)
GemRB.SetToken ("number", str(ProficienciesPointsLeft) )
ProficienciesTextArea = ProficienciesWindow.GetControl (68)
ProficienciesTextArea.SetText (9588)
ProficienciesDoneButton = ProficienciesWindow.GetControl (0)
ProficienciesDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesDonePress)
ProficienciesDoneButton.SetText (11973)
ProficienciesDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
ProficienciesCancelButton = ProficienciesWindow.GetControl (77)
ProficienciesCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
ProficienciesCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ProficienciesCancelPress)
ProficienciesCancelButton.SetText (13727)
ProficienciesCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
ProficienciesWindow.SetVisible (WINDOW_VISIBLE)
return
def ProficienciesLabelPress():
global ProficienciesWindow, ProficienciesTextArea
ProficienciesIndex = GemRB.GetVar ("ProficienciesIndex") - 1
ProficienciesTextArea.SetText (ProficienciesTable.GetValue (ProficienciesIndex, 2) )
return
def ProficienciesPlusPress():
global ProficienciesWindow, ProficienciesTextArea
global ProficienciesPointsLeft, ProfsMaxTable
ProficienciesIndex = GemRB.GetVar ("ProficienciesIndex") - 1
ProficienciesValue = GemRB.GetVar ("Proficiency" + str(ProficienciesIndex) )
ClassName = GUICommon.GetClassRowName (MyChar)
if ProficienciesPointsLeft > 0 and ProficienciesValue < ProfsMaxTable.GetValue (ClassName, "FIRST_LEVEL"):
ProficienciesPointsLeft = ProficienciesPointsLeft - 1
PointsLeftLabel = ProficienciesWindow.GetControl (0x10000009)
PointsLeftLabel.SetText (str(ProficienciesPointsLeft))
if ProficienciesPointsLeft == 0:
ProficienciesDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
ProficienciesValue = ProficienciesValue + 1
GemRB.SetVar ("Proficiency" + str(ProficienciesIndex), ProficienciesValue)
if ProficienciesIndex < 8:
ControlID = 26 + ProficienciesIndex * 5 + ProficienciesValue
else:
ControlID = 51 + ProficienciesIndex * 5 + ProficienciesValue
ProficienciesMark = ProficienciesWindow.GetControl (ControlID)
ProficienciesMark.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_NAND)
ProficienciesTextArea.SetText (ProficienciesTable.GetValue (ProficienciesIndex, 2) )
return
def ProficienciesMinusPress():
global ProficienciesWindow, ProficienciesTextArea, ProficienciesPointsLeft
ProficienciesIndex = GemRB.GetVar ("ProficienciesIndex") - 1
ProficienciesValue = GemRB.GetVar ("Proficiency" + str(ProficienciesIndex) )
if ProficienciesValue > 0:
ProficienciesValue = ProficienciesValue - 1
GemRB.SetVar ("Proficiency" + str(ProficienciesIndex), ProficienciesValue)
if ProficienciesIndex < 8:
ControlID = 27 + ProficienciesIndex * 5 + ProficienciesValue
else:
ControlID = 52 + ProficienciesIndex * 5 + ProficienciesValue
ProficienciesMark = ProficienciesWindow.GetControl (ControlID)
ProficienciesMark.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
ProficienciesPointsLeft = ProficienciesPointsLeft + 1
PointsLeftLabel = ProficienciesWindow.GetControl (0x10000009)
PointsLeftLabel.SetText (str(ProficienciesPointsLeft))
ProficienciesDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
ProficienciesTextArea.SetText (ProficienciesTable.GetValue (ProficienciesIndex, 2) )
return
def ProficienciesDonePress():
global CharGenWindow, ProficienciesWindow, SkillsState
if ProficienciesWindow:
ProficienciesWindow.Unload ()
SkillsState = 2
CharGenWindow.SetVisible (WINDOW_VISIBLE)
SkillsPress()
return
def ProficienciesCancelPress():
global CharGenWindow, ProficienciesWindow, SkillsState
if ProficienciesWindow:
ProficienciesWindow.Unload ()
SkillsState = 0
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Spells Selection
def MageSpellsSelect(SpellTable, Level, SpellLevel):
global CharGenWindow, MageSpellsWindow, MageSpellsTextArea, MageSpellsDoneButton, MageSpellsSelectPointsLeft, Learnable
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
MageSpellsWindow = GemRB.LoadWindow (7)
#kit (school), alignment, level
k = GemRB.GetPlayerStat (MyChar, IE_KIT)
t = GemRB.GetPlayerStat (MyChar, IE_ALIGNMENT)
Learnable = Spellbook.GetLearnableMageSpells(k, t, SpellLevel)
GemRB.SetVar ("MageSpellBook", 0)
GemRB.SetVar ("SpellMask", 0)
if len(Learnable)<1:
MageSpellsDonePress()
return
if k>0:
MageSpellsSelectPointsLeft = 3
else:
MageSpellsSelectPointsLeft = 2
PointsLeftLabel = MageSpellsWindow.GetControl (0x1000001b)
PointsLeftLabel.SetUseRGB (1)
PointsLeftLabel.SetText (str(MageSpellsSelectPointsLeft))
for i in range (24):
SpellButton = MageSpellsWindow.GetControl (i + 2)
SpellButton.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_CHECKBOX, OP_OR)
if i < len(Learnable):
Spell = GemRB.GetSpell (Learnable[i])
SpellButton.SetSpellIcon(Learnable[i], 1)
SpellButton.SetState (IE_GUI_BUTTON_ENABLED)
SpellButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, MageSpellsSelectPress)
SpellButton.SetVarAssoc ("SpellMask", 1 << i)
SpellButton.SetTooltip(Spell["SpellName"])
else:
SpellButton.SetState (IE_GUI_BUTTON_DISABLED)
GemRB.SetToken ("number", str(MageSpellsSelectPointsLeft))
MageSpellsTextArea = MageSpellsWindow.GetControl (27)
MageSpellsTextArea.SetText (17250)
MageSpellsDoneButton = MageSpellsWindow.GetControl (0)
MageSpellsDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
MageSpellsDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, MageSpellsDonePress)
MageSpellsDoneButton.SetText (11973)
MageSpellsDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
MageSpellsCancelButton = MageSpellsWindow.GetControl (29)
MageSpellsCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
MageSpellsCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, MageSpellsCancelPress)
MageSpellsCancelButton.SetText (13727)
MageSpellsCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
MageSpellsWindow.SetVisible (WINDOW_VISIBLE)
return
def MageSpellsSelectPress():
global MageSpellsWindow, MageSpellsTextArea, MageSpellsDoneButton, MageSpellsSelectPointsLeft, Learnable
MageSpellBook = GemRB.GetVar ("MageSpellBook")
SpellMask = GemRB.GetVar ("SpellMask")
#getting the bit index
Spell = abs(MageSpellBook - SpellMask)
i = -1
while (Spell > 0):
i = i + 1
Spell = Spell >> 1
Spell = GemRB.GetSpell (Learnable[i])
MageSpellsTextArea.SetText (Spell["SpellDesc"])
if SpellMask < MageSpellBook:
MageSpellsSelectPointsLeft = MageSpellsSelectPointsLeft + 1
else:
if MageSpellsSelectPointsLeft==0:
SpellMask = MageSpellBook
GemRB.SetVar ("SpellMask", SpellMask)
else:
MageSpellsSelectPointsLeft = MageSpellsSelectPointsLeft - 1
if MageSpellsSelectPointsLeft == 0:
MageSpellsDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
MageSpellsDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
for i in range (len(Learnable)):
SpellButton = MageSpellsWindow.GetControl (i + 2)
if ((1 << i) & SpellMask) == 0:
SpellButton.SetState (IE_GUI_BUTTON_LOCKED)
PointsLeftLabel = MageSpellsWindow.GetControl (0x1000001b)
PointsLeftLabel.SetText (str(MageSpellsSelectPointsLeft))
GemRB.SetVar ("MageSpellBook", SpellMask)
return
def MageSpellsDonePress():
global CharGenWindow, MageSpellsWindow, SkillsState
if MageSpellsWindow:
MageSpellsWindow.Unload ()
SkillsState = 3
CharGenWindow.SetVisible (WINDOW_VISIBLE)
SkillsPress()
return
def MageSpellsCancelPress():
global CharGenWindow, MageSpellsWindow, SkillsState
if MageSpellsWindow:
MageSpellsWindow.Unload ()
SkillsState = 0
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Mage Spells Memorize
def MageSpellsMemorize(SpellTable, Level, SpellLevel):
global CharGenWindow, MageMemorizeWindow, MageMemorizeTextArea, MageMemorizeDoneButton, MageMemorizePointsLeft
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
MageMemorizeWindow = GemRB.LoadWindow (16)
MaxSpellsMageTable = GemRB.LoadTable (SpellTable)
MageSpellBook = GemRB.GetVar ("MageSpellBook")
GemRB.SetVar ("MageMemorized", 0)
GemRB.SetVar ("SpellMask", 0)
MageMemorizePointsLeft = MaxSpellsMageTable.GetValue (str(Level), str(SpellLevel) )
if MageMemorizePointsLeft<1 or len(Learnable)<1:
MageMemorizeDonePress()
return
PointsLeftLabel = MageMemorizeWindow.GetControl (0x1000001b)
PointsLeftLabel.SetUseRGB (1)
PointsLeftLabel.SetText (str(MageMemorizePointsLeft))
j = 0
for i in range (12):
SpellButton = MageMemorizeWindow.GetControl (i + 2)
SpellButton.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_CHECKBOX, OP_OR)
while (j < len(Learnable)) and (((1 << j) & MageSpellBook) == 0):
j = j + 1
if j < len(Learnable):
Spell = GemRB.GetSpell (Learnable[j])
SpellButton.SetTooltip(Spell["SpellName"])
SpellButton.SetSpellIcon(Learnable[j], 1)
SpellButton.SetState (IE_GUI_BUTTON_ENABLED)
SpellButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, MageMemorizeSelectPress)
SpellButton.SetVarAssoc ("SpellMask", 1 << j)
j = j + 1
else:
SpellButton.SetState (IE_GUI_BUTTON_DISABLED)
GemRB.SetToken ("number", str(MageMemorizePointsLeft))
MageMemorizeTextArea = MageMemorizeWindow.GetControl (27)
MageMemorizeTextArea.SetText (17253)
MageMemorizeDoneButton = MageMemorizeWindow.GetControl (0)
MageMemorizeDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
MageMemorizeDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, MageMemorizeDonePress)
MageMemorizeDoneButton.SetText (11973)
MageMemorizeDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
MageMemorizeCancelButton = MageMemorizeWindow.GetControl (29)
MageMemorizeCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
MageMemorizeCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, MageMemorizeCancelPress)
MageMemorizeCancelButton.SetText (13727)
MageMemorizeCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
MageMemorizeWindow.SetVisible (WINDOW_VISIBLE)
return
def MageMemorizeSelectPress():
global MageMemorizeWindow, MageMemorizeTextArea, MageMemorizeDoneButton, MageMemorizePointsLeft, Learnable
MageSpellBook = GemRB.GetVar ("MageSpellBook")
MageMemorized = GemRB.GetVar ("MageMemorized")
SpellMask = GemRB.GetVar ("SpellMask")
Spell = abs(MageMemorized - SpellMask)
i = -1
while (Spell > 0):
i = i + 1
Spell = Spell >> 1
Spell = GemRB.GetSpell (Learnable[i])
MageMemorizeTextArea.SetText (Spell["SpellDesc"])
if SpellMask < MageMemorized:
MageMemorizePointsLeft = MageMemorizePointsLeft + 1
j = 0
for i in range (12):
SpellButton = MageMemorizeWindow.GetControl (i + 2)
while (j < len(Learnable) ) and (((1 << j) & MageSpellBook) == 0):
j = j + 1
if j < len(Learnable):
SpellButton.SetState (IE_GUI_BUTTON_ENABLED)
j = j + 1
MageMemorizeDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
else:
MageMemorizePointsLeft = MageMemorizePointsLeft - 1
if MageMemorizePointsLeft == 0:
j = 0
for i in range (12):
SpellButton = MageMemorizeWindow.GetControl (i + 2)
while (j < len(Learnable) ) and (((1 << j) & MageSpellBook) == 0):
j = j + 1
if j < len(Learnable):
if ((1 << j) & SpellMask) == 0:
SpellButton.SetState (IE_GUI_BUTTON_DISABLED)
j = j + 1
MageMemorizeDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
PointsLeftLabel = MageMemorizeWindow.GetControl (0x1000001b)
PointsLeftLabel.SetText (str(MageMemorizePointsLeft))
GemRB.SetVar ("MageMemorized", SpellMask)
return
def MageMemorizeDonePress():
global CharGenWindow, MageMemorizeWindow, SkillsState
if MageMemorizeWindow:
MageMemorizeWindow.Unload ()
SkillsState = 4
CharGenWindow.SetVisible (WINDOW_VISIBLE)
SkillsPress()
return
def MageMemorizeCancelPress():
global CharGenWindow, MageMemorizeWindow, SkillsState
if MageMemorizeWindow:
MageMemorizeWindow.Unload ()
SkillsState = 0
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Priest Spells Memorize
def PriestSpellsMemorize(SpellTable, Level, SpellLevel):
global CharGenWindow, PriestMemorizeWindow, Learnable, ClassFlag
global PriestMemorizeTextArea, PriestMemorizeDoneButton, PriestMemorizePointsLeft
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
PriestMemorizeWindow = GemRB.LoadWindow (17)
t = CommonTables.Aligns.GetValue (GemRB.GetVar ("Alignment")-1, 3)
Learnable = Spellbook.GetLearnablePriestSpells( ClassFlag, t, SpellLevel)
MaxSpellsPriestTable = GemRB.LoadTable (SpellTable)
GemRB.SetVar ("PriestMemorized", 0)
GemRB.SetVar ("SpellMask", 0)
PriestMemorizePointsLeft = MaxSpellsPriestTable.GetValue (str(Level), str(SpellLevel) )
if PriestMemorizePointsLeft<1 or len(Learnable)<1:
PriestMemorizeDonePress()
return
PointsLeftLabel = PriestMemorizeWindow.GetControl (0x1000001b)
PointsLeftLabel.SetUseRGB (1)
PointsLeftLabel.SetText (str(PriestMemorizePointsLeft))
for i in range (12):
SpellButton = PriestMemorizeWindow.GetControl (i + 2)
SpellButton.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_CHECKBOX, OP_OR)
if i < len(Learnable):
Spell = GemRB.GetSpell (Learnable[i])
SpellButton.SetTooltip(Spell["SpellName"])
SpellButton.SetSpellIcon(Learnable[i], 1)
SpellButton.SetState (IE_GUI_BUTTON_ENABLED)
SpellButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, PriestMemorizeSelectPress)
SpellButton.SetVarAssoc ("SpellMask", 1 << i)
else:
SpellButton.SetState (IE_GUI_BUTTON_DISABLED)
GemRB.SetToken ("number", str(PriestMemorizePointsLeft))
PriestMemorizeTextArea = PriestMemorizeWindow.GetControl (27)
PriestMemorizeTextArea.SetText (17253)
PriestMemorizeDoneButton = PriestMemorizeWindow.GetControl (0)
PriestMemorizeDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
PriestMemorizeDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, PriestMemorizeDonePress)
PriestMemorizeDoneButton.SetText (11973)
PriestMemorizeDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
PriestMemorizeCancelButton = PriestMemorizeWindow.GetControl (29)
PriestMemorizeCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
PriestMemorizeCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, PriestMemorizeCancelPress)
PriestMemorizeCancelButton.SetText (13727)
PriestMemorizeCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
PriestMemorizeWindow.SetVisible (WINDOW_VISIBLE)
return
def PriestMemorizeSelectPress():
global PriestMemorizeWindow, Learnable, PriestMemorizeTextArea, PriestMemorizeDoneButton, PriestMemorizePointsLeft
PriestMemorized = GemRB.GetVar ("PriestMemorized")
SpellMask = GemRB.GetVar ("SpellMask")
Spell = abs(PriestMemorized - SpellMask)
i = -1
while (Spell > 0):
i = i + 1
Spell = Spell >> 1
Spell=GemRB.GetSpell (Learnable[i])
PriestMemorizeTextArea.SetText (Spell["SpellDesc"])
if SpellMask < PriestMemorized:
PriestMemorizePointsLeft = PriestMemorizePointsLeft + 1
for i in range (len(Learnable)):
SpellButton = PriestMemorizeWindow.GetControl (i + 2)
if (((1 << i) & SpellMask) == 0):
SpellButton.SetState (IE_GUI_BUTTON_ENABLED)
PriestMemorizeDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
else:
PriestMemorizePointsLeft = PriestMemorizePointsLeft - 1
if PriestMemorizePointsLeft == 0:
for i in range (len(Learnable)):
SpellButton = PriestMemorizeWindow.GetControl (i + 2)
if ((1 << i) & SpellMask) == 0:
SpellButton.SetState (IE_GUI_BUTTON_DISABLED)
PriestMemorizeDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
PointsLeftLabel = PriestMemorizeWindow.GetControl (0x1000001b)
PointsLeftLabel.SetText (str(PriestMemorizePointsLeft))
GemRB.SetVar ("PriestMemorized", SpellMask)
return
def PriestMemorizeDonePress():
global CharGenWindow, PriestMemorizeWindow, SkillsState
if PriestMemorizeWindow:
PriestMemorizeWindow.Unload ()
SkillsState = 5
CharGenWindow.SetVisible (WINDOW_VISIBLE)
SkillsPress()
return
def PriestMemorizeCancelPress():
global CharGenWindow, PriestMemorizeWindow, SkillsState
if PriestMemorizeWindow:
PriestMemorizeWindow.Unload ()
SkillsState = 0
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Appearance Selection
def AppearancePress():
global CharGenWindow, AppearanceWindow, AppearanceTable
global Portrait, AppearanceAvatarButton, PortraitName
global AppearanceHairButton, AppearanceSkinButton
global AppearanceMajorButton, AppearanceMinorButton
global HairColor, SkinColor, MajorColor, MinorColor
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
AppearanceWindow = GemRB.LoadWindow (13)
AppearanceTable = GemRB.LoadTable ("PORTCOLR")
if Portrait<0:
PortraitIndex = 0
else:
PortraitName = PortraitsTable.GetRowName (Portrait)
PortraitIndex = AppearanceTable.GetRowIndex (PortraitName + "L")
HairColor = AppearanceTable.GetValue (PortraitIndex, 1)
GemRB.SetVar ("HairColor", HairColor)
SkinColor = AppearanceTable.GetValue (PortraitIndex, 0)
GemRB.SetVar ("SkinColor", SkinColor)
MajorColor = AppearanceTable.GetValue (PortraitIndex, 2)
GemRB.SetVar ("MajorColor", MajorColor)
MinorColor = AppearanceTable.GetValue (PortraitIndex, 3)
GemRB.SetVar ("MinorColor", MinorColor)
AppearanceAvatarButton = AppearanceWindow.GetControl (1)
AppearanceAvatarButton.SetState (IE_GUI_BUTTON_LOCKED)
AppearanceAvatarButton.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_ANIMATED, OP_OR)
DrawAvatar()
AppearanceHairButton = AppearanceWindow.GetControl (2)
AppearanceHairButton.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
AppearanceHairButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceHairButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearanceHairPress)
AppearanceHairButton.SetBAM ("COLGRAD", 0, 0, HairColor)
AppearanceSkinButton = AppearanceWindow.GetControl (3)
AppearanceSkinButton.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
AppearanceSkinButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceSkinButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearanceSkinPress)
AppearanceSkinButton.SetBAM ("COLGRAD", 0, 0, SkinColor)
AppearanceMajorButton = AppearanceWindow.GetControl (4)
AppearanceMajorButton.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
AppearanceMajorButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceMajorButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearanceMajorPress)
AppearanceMajorButton.SetBAM ("COLGRAD", 0, 0, MajorColor)
AppearanceMinorButton = AppearanceWindow.GetControl (5)
AppearanceMinorButton.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
AppearanceMinorButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceMinorButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearanceMinorPress)
AppearanceMinorButton.SetBAM ("COLGRAD", 0, 0, MinorColor)
AppearanceDoneButton = AppearanceWindow.GetControl (0)
AppearanceDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearanceDonePress)
AppearanceDoneButton.SetText (11973)
AppearanceDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
AppearanceCancelButton = AppearanceWindow.GetControl (13)
AppearanceCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearanceCancelPress)
AppearanceCancelButton.SetText (13727)
AppearanceCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
AppearanceWindow.SetVisible (WINDOW_VISIBLE)
return
def DrawAvatar():
global AppearanceAvatarButton
global MyChar
AvatarID = 0x6000
table = GemRB.LoadTable ("avprefr")
lookup = CommonTables.Races.FindValue (3, GemRB.GetPlayerStat(MyChar, IE_RACE))
lookup = CommonTables.Races.GetRowName (lookup)
AvatarID = AvatarID+table.GetValue (lookup, "RACE")
table = GemRB.LoadTable ("avprefc")
lookup = GUICommon.GetClassRowName (MyChar)
AvatarID = AvatarID+table.GetValue (lookup, "PREFIX")
table = GemRB.LoadTable ("avprefg")
AvatarID = AvatarID + table.GetValue (GemRB.GetPlayerStat(MyChar,IE_SEX), GTV_STR)
AvatarRef = CommonTables.Pdolls.GetValue (hex(AvatarID), "LEVEL1")
AppearanceAvatarButton.SetPLT(AvatarRef, 0, MinorColor, MajorColor, SkinColor, 0, 0, HairColor, 0)
return
def AppearanceHairPress():
GemRB.SetVar ("ColorType", 0)
AppearanceColorChoice (GemRB.GetVar ("HairColor"))
return
def AppearanceSkinPress():
GemRB.SetVar ("ColorType", 1)
AppearanceColorChoice (GemRB.GetVar ("SkinColor"))
return
def AppearanceMajorPress():
GemRB.SetVar ("ColorType", 2)
AppearanceColorChoice (GemRB.GetVar ("MajorColor"))
return
def AppearanceMinorPress():
GemRB.SetVar ("ColorType", 3)
AppearanceColorChoice (GemRB.GetVar ("MinorColor"))
return
def AppearanceColorChoice (CurrentColor):
global AppearanceWindow, AppearanceColorWindow
AppearanceWindow.SetVisible (WINDOW_INVISIBLE)
AppearanceColorWindow = GemRB.LoadWindow (14)
AppearanceColorTable = GemRB.LoadTable ("clowncol")
ColorType = GemRB.GetVar ("ColorType")
GemRB.SetVar ("SelectedColor", CurrentColor)
for i in range (34):
ColorButton = AppearanceColorWindow.GetControl (i)
ColorButton.SetState (IE_GUI_BUTTON_ENABLED)
ColorButton.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
for i in range (34):
Color = AppearanceColorTable.GetValue (ColorType, i)
if Color != "*":
ColorButton = AppearanceColorWindow.GetControl (i)
ColorButton.SetBAM ("COLGRAD", 2, 0, Color)
ColorButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AppearanceColorSelected)
ColorButton.SetVarAssoc ("SelectedColor", Color)
AppearanceColorWindow.SetVisible (WINDOW_VISIBLE)
return
def AppearanceColorSelected():
global HairColor, SkinColor, MajorColor, MinorColor
global AppearanceWindow, AppearanceColorWindow
global AppearanceHairButton, AppearanceSkinButton
global AppearanceMajorButton, AppearanceMinorButton
if AppearanceColorWindow:
AppearanceColorWindow.Unload ()
ColorType = GemRB.GetVar ("ColorType")
if ColorType == 0:
HairColor = GemRB.GetVar ("SelectedColor")
GemRB.SetVar ("HairColor", HairColor)
AppearanceHairButton.SetBAM ("COLGRAD", 0, 0, HairColor)
elif ColorType == 1:
SkinColor = GemRB.GetVar ("SelectedColor")
GemRB.SetVar ("SkinColor", SkinColor)
AppearanceSkinButton.SetBAM ("COLGRAD", 0, 0, SkinColor)
elif ColorType == 2:
MajorColor = GemRB.GetVar ("SelectedColor")
GemRB.SetVar ("MajorColor", MajorColor)
AppearanceMajorButton.SetBAM ("COLGRAD", 0, 0, MajorColor)
elif ColorType == 3:
MinorColor = GemRB.GetVar ("SelectedColor")
GemRB.SetVar ("MinorColor", MinorColor)
AppearanceMinorButton.SetBAM ("COLGRAD", 0, 0, MinorColor)
DrawAvatar()
AppearanceWindow.SetVisible (WINDOW_VISIBLE)
return
def AppearanceDonePress():
global CharGenWindow, AppearanceWindow
if AppearanceWindow:
AppearanceWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
CharSoundSelect()
return
def AppearanceCancelPress():
global CharGenWindow, AppearanceWindow
if AppearanceWindow:
AppearanceWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def CharSoundSelect():
global CharGenWindow, CharSoundWindow, CharSoundTable, CharSoundStrings
global CharSoundVoiceList, VerbalConstants
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
CharSoundWindow = GemRB.LoadWindow (19)
CharSoundTable = GemRB.LoadTable ("CHARSND")
CharSoundStrings = GemRB.LoadTable ("CHARSTR")
VerbalConstants = [CharSoundTable.GetRowName(i) for i in range(CharSoundTable.GetRowCount())]
CharSoundVoiceList = CharSoundWindow.GetControl (45)
RowCount=CharSoundVoiceList.ListResources(CHR_SOUNDS)
CharSoundPlayButton = CharSoundWindow.GetControl (47)
CharSoundPlayButton.SetState (IE_GUI_BUTTON_ENABLED)
CharSoundPlayButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CharSoundPlayPress)
CharSoundPlayButton.SetText (17318)
CharSoundTextArea = CharSoundWindow.GetControl (50)
CharSoundTextArea.SetText (11315)
CharSoundDoneButton = CharSoundWindow.GetControl (0)
CharSoundDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
CharSoundDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CharSoundDonePress)
CharSoundDoneButton.SetText (11973)
CharSoundDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
CharSoundCancelButton = CharSoundWindow.GetControl (10)
CharSoundCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
CharSoundCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CharSoundCancelPress)
CharSoundCancelButton.SetText (13727)
CharSoundCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
CharSoundWindow.SetVisible (WINDOW_VISIBLE)
return
def CharSoundPlayPress():
global CharGenWindow, CharSoundWindow, CharSoundTable, CharSoundStrings
global CharSoundVoiceList, SoundIndex, VerbalConstants
row = CharSoundVoiceList.QueryText ()
GemRB.SetPlayerSound (MyChar, row)
#play sound as sound slot
GemRB.VerbalConstant (MyChar, int(VerbalConstants[SoundIndex]))
SoundIndex += 1
if SoundIndex >= len(VerbalConstants):
SoundIndex = 0
return
def CharSoundDonePress():
global CharGenWindow, CharSoundWindow, AppearanceButton, BiographyButton, NameButton, CharGenState
if CharSoundWindow:
CharSoundWindow.Unload ()
AppearanceButton.SetState (IE_GUI_BUTTON_DISABLED)
AppearanceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
BiographyButton.SetState (IE_GUI_BUTTON_ENABLED)
NameButton.SetState (IE_GUI_BUTTON_ENABLED)
NameButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
CharGenState = 7
SetCharacterDescription()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def CharSoundCancelPress():
global CharGenWindow, CharSoundWindow
if CharSoundWindow:
CharSoundWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Biography Selection
def BiographyPress():
global CharGenWindow, BiographyWindow, BiographyField
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
BiographyWindow = GemRB.LoadWindow (51)
BiographyField = BiographyWindow.GetControl (4)
BiographyTextArea = BiographyWindow.CreateTextArea(100, 0, 0, 0, 0, "NORMAL", IE_FONT_ALIGN_CENTER) # ID/position/size dont matter. we will substitute later
BiographyField = BiographyTextArea.SubstituteForControl(BiographyField)
BiographyField.SetStatus (IE_GUI_CONTROL_FOCUSED)
BIO = GemRB.GetToken("Biography")
if BIO:
BiographyField.SetText (BIO)
else:
BiographyField.SetText (19423)
BiographyClearButton = BiographyWindow.GetControl (5)
BiographyClearButton.SetState (IE_GUI_BUTTON_ENABLED)
BiographyClearButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BiographyClearPress)
BiographyClearButton.SetText (18622)
BiographyCancelButton = BiographyWindow.GetControl (2)
BiographyCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
BiographyCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BiographyCancelPress)
BiographyCancelButton.SetText (13727)
BiographyCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
BiographyDoneButton = BiographyWindow.GetControl (1)
BiographyDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
BiographyDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BiographyDonePress)
BiographyDoneButton.SetText (11973)
BiographyWindow.SetVisible (WINDOW_VISIBLE)
return
def BiographyClearPress():
global BiographyWindow, BiographyField
BiographyField.SetText ("")
return
def BiographyCancelPress():
global CharGenWindow, BiographyWindow
if BiographyWindow:
BiographyWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def BiographyDonePress():
global CharGenWindow, BiographyWindow, BiographyField
BIO = BiographyField.QueryText ()
GemRB.SetToken ("Biography", BIO) # just for any window reopens
BioStrRefSlot = 63
DefaultBIO = 19423
if BIO == GemRB.GetString (DefaultBIO):
GemRB.SetPlayerString (MyChar, BioStrRefSlot, DefaultBIO)
else:
# unlike tob, iwd has no marked placeholders (or strings) at 62015; but we have special magic in place ...
# still, use the returned strref in case anything unexpected happened
ref = GemRB.CreateString (62015+MyChar, BIO)
GemRB.SetPlayerString (MyChar, BioStrRefSlot, ref)
if BiographyWindow:
BiographyWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Name Selection
def NamePress():
global CharGenWindow, NameWindow, NameDoneButton, NameField
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
NameWindow = GemRB.LoadWindow (5)
NameDoneButton = NameWindow.GetControl (0)
NameDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
NameDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, NameDonePress)
NameDoneButton.SetText (11973)
NameDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
NameCancelButton = NameWindow.GetControl (3)
NameCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
NameCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, NameCancelPress)
NameCancelButton.SetText (13727)
NameCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
NameField = NameWindow.GetControl (2)
NameField.SetEvent (IE_GUI_EDIT_ON_CHANGE, NameEditChange)
NameField.SetText (GemRB.GetToken ("CHARNAME") )
NameField.SetStatus (IE_GUI_CONTROL_FOCUSED)
NameWindow.SetVisible (WINDOW_VISIBLE)
NameEditChange()
return
def NameEditChange():
global NameField
if NameField.QueryText () == "":
NameDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
else:
NameDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def NameDonePress():
global CharGenWindow, CharGenState, NameWindow, NameField, AcceptButton
GemRB.SetToken ("CHARNAME", NameField.QueryText () )
if NameWindow:
NameWindow.Unload ()
CharGenState = 8
AcceptButton.SetState (IE_GUI_BUTTON_ENABLED)
AcceptButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
SetCharacterDescription()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
def NameCancelPress():
global CharGenWindow, NameWindow
GemRB.SetToken ("CHARNAME", "")
if NameWindow:
NameWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return
# Import Character
def ImportPress():
global CharGenWindow, ImportWindow
global CharImportList
CharGenWindow.SetVisible (WINDOW_INVISIBLE)
ImportWindow = GemRB.LoadWindow (20)
TextAreaControl = ImportWindow.GetControl(4)
TextAreaControl.SetText(10963)
GemRB.SetVar ("Selected", 0)
CharImportList = ImportWindow.GetControl(2)
CharImportList.SetVarAssoc ("Selected",0)
CharImportList.ListResources(CHR_EXPORTS)
ImportDoneButton = ImportWindow.GetControl (0)
ImportDoneButton.SetState (IE_GUI_BUTTON_ENABLED)
ImportDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ImportDonePress)
ImportDoneButton.SetText (11973)
ImportDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
ImportCancelButton = ImportWindow.GetControl (1)
ImportCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
ImportCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ImportCancelPress)
ImportCancelButton.SetText (13727)
ImportCancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
ImportWindow.SetVisible (WINDOW_VISIBLE)
return
def ImportDonePress():
global CharGenWindow, ImportWindow, CharImportList
global CharGenState, SkillsState, Portrait, ImportedChar
# Import the character from the chosen name
GemRB.CreatePlayer (CharImportList.QueryText(), MyChar|0x8000, 1)
GemRB.SetToken ("CHARNAME", GemRB.GetPlayerName (MyChar) )
GemRB.SetToken ("SmallPortrait", GemRB.GetPlayerPortrait (MyChar, 1) )
PortraitName = GemRB.GetPlayerPortrait (MyChar, 0)
GemRB.SetToken ("LargePortrait", PortraitName )
PortraitButton.SetPicture (PortraitName)
Portrait = -1
ImportedChar = 1
CharGenState = 7
SkillsState = 5
SetCharacterDescription ()
GenderButton.SetState (IE_GUI_BUTTON_DISABLED)
GenderButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
RaceButton.SetState (IE_GUI_BUTTON_DISABLED)
RaceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
ClassButton.SetState (IE_GUI_BUTTON_DISABLED)
ClassButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AlignmentButton.SetState (IE_GUI_BUTTON_DISABLED)
AlignmentButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AbilitiesButton.SetState (IE_GUI_BUTTON_DISABLED)
AbilitiesButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
SkillsButton.SetState (IE_GUI_BUTTON_DISABLED)
SkillsButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_NAND)
AppearanceButton.SetState (IE_GUI_BUTTON_ENABLED)
AppearanceButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
BiographyButton.SetState (IE_GUI_BUTTON_DISABLED)
BiographyButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
NameButton.SetState (IE_GUI_BUTTON_DISABLED)
NameButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
CharGenWindow.SetVisible (WINDOW_VISIBLE)
if ImportWindow:
ImportWindow.Unload ()
return
def ImportCancelPress():
global CharGenWindow, ImportWindow
if ImportWindow:
ImportWindow.Unload ()
CharGenWindow.SetVisible (WINDOW_VISIBLE)
return<|fim▁end|> | else:
ProficienciesPlusButton.SetState (IE_GUI_BUTTON_ENABLED) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import os
from datetime import datetime
from .hooks import dispatch_hook, HOOKS
from .structures import CaseInsensitiveDict
from .status_codes import codes
from .auth import HTTPBasicAuth, HTTPProxyAuth
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3 import connectionpool, poolmanager
from .packages.urllib3.filepost import encode_multipart_formdata
from .defaults import SCHEMAS
from .exceptions import (
ConnectionError, HTTPError, RequestException, Timeout, TooManyRedirects,
URLRequired, SSLError, MissingSchema, InvalidSchema)
from .utils import (
get_encoding_from_headers, stream_untransfer, guess_filename, requote_uri,
dict_from_string, stream_decode_response_unicode, get_netrc_auth)
from .compat import (
urlparse, urlunparse, urljoin, urlsplit, urlencode, str, bytes,
SimpleCookie, is_py2)
# Import chardet if it is available.
try:
import chardet
except ImportError:
pass
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
class Request(object):
"""The :class:`Request <Request>` object. It carries out all functionality of
Requests. Recommended interface is with the Requests functions.
"""
def __init__(self,
url=None,
headers=dict(),
files=None,
method=None,
data=dict(),
params=dict(),
auth=None,
cookies=None,
timeout=None,
redirect=False,
allow_redirects=False,
proxies=None,
hooks=None,
config=None,
_poolmanager=None,
verify=None,
session=None,
cert=None):
#: Dictionary of configurations for this request.
self.config = dict(config or [])
#: Float describes the timeout of the request.
# (Use socket.setdefaulttimeout() as fallback)
self.timeout = timeout
#: Request URL.
self.url = url
#: Dictionary of HTTP Headers to attach to the :class:`Request <Request>`.
self.headers = dict(headers or [])
#: Dictionary of files to multipart upload (``{filename: content}``).
self.files = files
#: HTTP Method to use.
self.method = method
#: Dictionary or byte of request body data to attach to the
#: :class:`Request <Request>`.
self.data = None
#: Dictionary or byte of querystring data to attach to the
#: :class:`Request <Request>`.
self.params = None
#: True if :class:`Request <Request>` is part of a redirect chain (disables history
#: and HTTPError storage).
self.redirect = redirect
#: Set to True if full redirects are allowed (e.g. re-POST-ing of data at new ``Location``)
self.allow_redirects = allow_redirects
# Dictionary mapping protocol to the URL of the proxy (e.g. {'http': 'foo.bar:3128'})
self.proxies = dict(proxies or [])
# If no proxies are given, allow configuration by environment variables
# HTTP_PROXY and HTTPS_PROXY.
if not self.proxies and self.config.get('trust_env'):
if 'HTTP_PROXY' in os.environ:
self.proxies['http'] = os.environ['HTTP_PROXY']
if 'HTTPS_PROXY' in os.environ:
self.proxies['https'] = os.environ['HTTPS_PROXY']
self.data, self._enc_data = self._encode_params(data)
self.params, self._enc_params = self._encode_params(params)
#: :class:`Response <Response>` instance, containing
#: content and metadata of HTTP Response, once :attr:`sent <send>`.
self.response = Response()
#: Authentication tuple or object to attach to :class:`Request <Request>`.
self.auth = auth
#: CookieJar to attach to :class:`Request <Request>`.
self.cookies = dict(cookies or [])
#: True if Request has been sent.
self.sent = False
#: Event-handling hooks.
self.hooks = {}
for event in HOOKS:
self.hooks[event] = []
hooks = hooks or {}
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
#: Session.
self.session = session
#: SSL Verification.
self.verify = verify
#: SSL Certificate
self.cert = cert
if headers:
headers = CaseInsensitiveDict(self.headers)
else:
headers = CaseInsensitiveDict()
# Add configured base headers.
for (k, v) in list(self.config.get('base_headers', {}).items()):
if k not in headers:
headers[k] = v
self.headers = headers
self._poolmanager = _poolmanager
def __repr__(self):
return '<Request [%s]>' % (self.method)
def _build_response(self, resp):
"""Build internal :class:`Response <Response>` object
from given response.
"""
def build(resp):
response = Response()
# Pass settings over.
response.config = self.config
if resp:
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
# Start off with our local cookies.
cookies = self.cookies or dict()
# Add new cookies from the server.
if 'set-cookie' in response.headers:
cookie_header = response.headers['set-cookie']
cookies = dict_from_string(cookie_header)
# Save cookies in Response.
response.cookies = cookies
# No exceptions were harmed in the making of this request.
response.error = getattr(resp, 'error', None)
# Save original response for later.
response.raw = resp
if isinstance(self.full_url, bytes):
response.url = self.full_url.decode('utf-8')
else:
response.url = self.full_url
return response
history = []
r = build(resp)
self.cookies.update(r.cookies)
if r.status_code in REDIRECT_STATI and not self.redirect:
while (('location' in r.headers) and
((r.status_code is codes.see_other) or (self.allow_redirects))):
r.content # Consume socket so it can be released
if not len(history) < self.config.get('max_redirects'):
raise TooManyRedirects()
# Release the connection back into the pool.
r.raw.release_conn()
history.append(r)
url = r.headers['location']
data = self.data
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(r.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# Facilitate non-RFC2616-compliant 'location' headers
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
if not urlparse(url).netloc:
url = urljoin(r.url,
# Compliant with RFC3986, we percent
# encode the url.
requote_uri(url))
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
if r.status_code is codes.see_other:
method = 'GET'
data = None
else:
method = self.method
# Do what the browsers do if strict_mode is off...
if (not self.config.get('strict_mode')):
if r.status_code in (codes.moved, codes.found) and self.method == 'POST':
method = 'GET'
data = None
if (r.status_code == 303) and self.method != 'HEAD':
method = 'GET'
data = None
# Remove the cookie headers that were sent.
headers = self.headers
try:
del headers['Cookie']
except KeyError:
pass
request = Request(
url=url,
headers=headers,
files=self.files,
method=method,
params=self.session.params,
auth=self.auth,
cookies=self.cookies,
redirect=True,
data=data,
config=self.config,
timeout=self.timeout,
_poolmanager=self._poolmanager,
proxies=self.proxies,
verify=self.verify,
session=self.session,
cert=self.cert
)
request.send()
r = request.response
self.cookies.update(r.cookies)
r.history = history
self.response = r
self.response.request = self
self.response.cookies.update(self.cookies)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
If the data supplied is a dictionary, encodes each parameter in it, and
returns a list of tuples containing the encoded parameters, and a urlencoded
version of that.
Otherwise, assumes the data is already encoded appropriately, and
returns it twice.
"""
if isinstance(data, bytes):
return data, data
if hasattr(data, '__iter__') and not isinstance(data, str):
data = dict(data)
if hasattr(data, 'items'):
result = []
for k, vs in list(data.items()):
for v in isinstance(vs, list) and vs or [vs]:
result.append((k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return result, urlencode(result, doseq=True)
else:
return data, data
@property
def full_url(self):
"""Build the actual URL to use."""
if not self.url:
raise URLRequired()
url = self.url
# Support for unicode domain names and paths.
scheme, netloc, path, params, query, fragment = urlparse(url)
if not scheme:
raise MissingSchema("Invalid URL %r: No schema supplied" % url)
if not scheme in SCHEMAS:
raise InvalidSchema("Invalid scheme %r" % scheme)
netloc = netloc.encode('idna').decode('utf-8')
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(params, str):
params = params.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
url = (urlunparse([scheme, netloc, path, params, query, fragment]))
if self._enc_params:
if urlparse(url).query:
url = '%s&%s' % (url, self._enc_params)
else:
url = '%s?%s' % (url, self._enc_params)
if self.config.get('encode_uri', True):
url = requote_uri(url)
return url
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.full_url)
# Proxies use full URLs.
if p.scheme in self.proxies:
return self.full_url
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
def register_hook(self, event, hook):
"""Properly register a hook."""
return self.hooks[event].append(hook)
def send(self, anyway=False, prefetch=False):
"""Sends the request. Returns True of successful, False if not.
If there was an HTTPError during transmission,
self.response.status_code will contain the HTTPError code.
Once a request is successfully sent, `sent` will equal True.
:param anyway: If True, request will be sent, even if it has
already been sent.
"""
# Build the URL
url = self.full_url
# Logging
if self.config.get('verbose'):
self.config.get('verbose').write('%s %s %s\n' % (
datetime.now().isoformat(), self.method, url
))
# Nottin' on you.
body = None
content_type = None
# Multi-part file uploads.
if self.files:
if not isinstance(self.data, str):
try:
fields = self.data.copy()
except AttributeError:
fields = dict(self.data)
for (k, v) in list(self.files.items()):
# support for explicit filename
if isinstance(v, (tuple, list)):
fn, fp = v
else:
fn = guess_filename(v) or k
fp = v
fields.update({k: (fn, fp.read())})
(body, content_type) = encode_multipart_formdata(fields)
else:
pass
# TODO: Conflict?
else:
if self.data:
body = self._enc_data
if isinstance(self.data, str):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
# Use .netrc auth if none was provided.
if not self.auth and self.config.get('trust_env'):
self.auth = get_netrc_auth(url)
if self.auth:
if isinstance(self.auth, tuple) and len(self.auth) == 2:
# special-case basic HTTP auth
self.auth = HTTPBasicAuth(*self.auth)
# Allow auth to make its changes.
r = self.auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
_p = urlparse(url)
proxy = self.proxies.get(_p.scheme)
if proxy:
conn = poolmanager.proxy_from_url(proxy)
_proxy = urlparse(proxy)
if '@' in _proxy.netloc:
auth, url = _proxy.netloc.split('@', 1)
self.proxy_auth = HTTPProxyAuth(*auth.split(':', 1))
r = self.proxy_auth(self)
self.__dict__.update(r.__dict__)
else:
# Check to see if keep_alive is allowed.
if self.config.get('keep_alive'):
conn = self._poolmanager.connection_from_url(url)
else:
conn = connectionpool.connection_from_url(url)
if url.startswith('https') and self.verify:
cert_loc = None
# Allow self-specified cert location.
if self.verify is not True:
cert_loc = self.verify
# Look for configuration.
if not cert_loc and self.config.get('trust_env'):
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
# Curl compatiblity.
if not cert_loc and self.config.get('trust_env'):
cert_loc = os.environ.get('CURL_CA_BUNDLE')
# Use the awesome certifi list.
if not cert_loc:
cert_loc = __import__('certifi').where()
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
if self.cert and self.verify:
if len(self.cert) == 2:
conn.cert_file = self.cert[0]
conn.key_file = self.cert[1]
else:
conn.cert_file = self.cert
if not self.sent or anyway:
if self.cookies:
# Skip if 'cookie' header is explicitly set.
if 'cookie' not in self.headers:
# Simple cookie with our dict.
c = SimpleCookie()
for (k, v) in list(self.cookies.items()):
c[k] = v
# Turn it into a header.
cookie_header = c.output(header='', sep='; ').strip()
# Attach Cookie header to request.
self.headers['Cookie'] = cookie_header
# Pre-request hook.
r = dispatch_hook('pre_request', self.hooks, self)
self.__dict__.update(r.__dict__)
try:
# The inner try .. except re-raises certain exceptions as
# internal exception types; the outer suppresses exceptions
# when safe mode is set.
try:
# Send the request.
r = conn.urlopen(
method=self.method,
url=self.path_url,
body=body,
headers=self.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.config.get('max_retries', 0),
timeout=self.timeout,
)
self.sent = True
except MaxRetryError as e:
raise ConnectionError(e)
except (_SSLError, _HTTPError) as e:
if self.verify and isinstance(e, _SSLError):
raise SSLError(e)
raise Timeout('Request timed out.')
except RequestException as e:
if self.config.get('safe_mode', False):
# In safe mode, catch the exception and attach it to
# a blank urllib3.HTTPResponse object.
r = HTTPResponse()
r.error = e
else:
raise
self._build_response(r)
# Response manipulation hook.
self.response = dispatch_hook('response', self.hooks, self.response)
# Post-request hook.
r = dispatch_hook('post_request', self.hooks, self)<|fim▁hole|> self.__dict__.update(r.__dict__)
# If prefetch is True, mark content as consumed.
if prefetch:
# Save the response.
self.response.content
if self.config.get('danger_mode'):
self.response.raise_for_status()
return self.sent
class Response(object):
"""The core :class:`Response <Response>` object. All
:class:`Request <Request>` objects contain a
:class:`response <Response>` attribute, which is an instance
of this class.
"""
def __init__(self):
self._content = None
self._content_consumed = False
#: Integer Code of responded HTTP Status.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
self.raw = None
#: Final URL location of Response.
self.url = None
#: Resulting :class:`HTTPError` of request, if one occurred.
self.error = None
#: Encoding to decode with when accessing r.content.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here.
self.history = []
#: The :class:`Request <Request>` that created the Response.
self.request = None
#: A dictionary of Cookies the server sent back.
self.cookies = {}
#: Dictionary of configurations for this request.
self.config = {}
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
def iter_content(self, chunk_size=10 * 1024, decode_unicode=False):
"""Iterates over the response data. This avoids reading the content
at once into memory for large responses. The chunk size is the number
of bytes it should read into memory. This is not necessarily the
length of each item returned as decoding can take place.
"""
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed'
)
def generate():
while 1:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
gen = stream_untransfer(generate(), self)
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
def iter_lines(self, chunk_size=10 * 1024, decode_unicode=None):
"""Iterates over the response data, one line at a time. This
avoids reading the content at once into memory for large
responses.
"""
pending = None
for chunk in self.iter_content(
chunk_size=chunk_size,
decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is None:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code is 0:
self._content = None
else:
self._content = bytes().join(self.iter_content()) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
return self._content
def _detected_encoding(self):
try:
detected = chardet.detect(self.content) or {}
return detected.get('encoding')
# Trust that chardet isn't available or something went terribly wrong.
except Exception:
pass
@property
def text(self):
"""Content of the response, in unicode.
if Response.encoding is None and chardet module is available, encoding
will be guessed.
"""
# Try charset from content-type
content = None
encoding = self.encoding
# Fallback to auto-detected encoding if chardet is available.
if self.encoding is None:
encoding = self._detected_encoding()
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except LookupError:
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
except (UnicodeError, TypeError):
pass
return content
def raise_for_status(self, allow_redirects=True):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
if self.error:
raise self.error
if (self.status_code >= 300) and (self.status_code < 400) and not allow_redirects:
http_error = HTTPError('%s Redirection' % self.status_code)
http_error.response = self
raise http_error
elif (self.status_code >= 400) and (self.status_code < 500):
http_error = HTTPError('%s Client Error' % self.status_code)
http_error.response = self
raise http_error
elif (self.status_code >= 500) and (self.status_code < 600):
http_error = HTTPError('%s Server Error' % self.status_code)
http_error.response = self
raise http_error<|fim▁end|> | |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
fn main() {<|fim▁hole|> println!("cargo:rustc-flags=-l cryptdll");
}<|fim▁end|> | |
<|file_name|>test_open_file.py<|end_file_name|><|fim▁begin|>import os, sys; sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
<|fim▁hole|>import pytest
from lasio import read
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
def test_open_url():
l = read("https://raw.githubusercontent.com/kinverarity1/"
"lasio/master/standards/examples"
"/1.2/sample_curve_api.las")
def test_open_file_object():
with open(egfn("sample.las"), mode="r") as f:
l = read(f)
def test_open_filename():
l = read(egfn("sample.las"))
def test_open_incorrect_filename():
with pytest.raises(OSError):
l = read(egfn("sampleXXXDOES NOT EXIST.las"))
def test_open_string():
l = read("""~VERSION INFORMATION
VERS. 1.2: CWLS LOG ASCII STANDARD -VERSION 1.2
WRAP. NO: ONE LINE PER DEPTH STEP
~WELL INFORMATION BLOCK
#MNEM.UNIT DATA TYPE INFORMATION
#--------- ------------- ------------------------------
STRT.M 1670.000000:
STOP.M 1660.000000:
STEP.M -0.1250:
NULL. -999.2500:
COMP. COMPANY: # ANY OIL COMPANY LTD.
WELL. WELL: ANY ET AL OIL WELL #12
FLD . FIELD: EDAM
LOC . LOCATION: A9-16-49-20W3M
PROV. PROVINCE: SASKATCHEWAN
SRVC. SERVICE COMPANY: ANY LOGGING COMPANY LTD.
DATE. LOG DATE: 25-DEC-1988
UWI . UNIQUE WELL ID: 100091604920W300
~CURVE INFORMATION
#MNEM.UNIT API CODE CURVE DESCRIPTION
#--------- ------------- ------------------------------
DEPT.M : 1 DEPTH
DT .US/M : 2 SONIC TRANSIT TIME
RHOB.K/M3 : 3 BULK DENSITY
NPHI.V/V : 4 NEUTRON POROSITY
SFLU.OHMM : 5 RXO RESISTIVITY
SFLA.OHMM : 6 SHALLOW RESISTIVITY
ILM .OHMM : 7 MEDIUM RESISTIVITY
ILD .OHMM : 8 DEEP RESISTIVITY
~PARAMETER INFORMATION
#MNEM.UNIT VALUE DESCRIPTION
#--------- ------------- ------------------------------
BHT .DEGC 35.5000: BOTTOM HOLE TEMPERATURE
BS .MM 200.0000: BIT SIZE
FD .K/M3 1000.0000: FLUID DENSITY
MATR. 0.0000: NEUTRON MATRIX(0=LIME,1=SAND,2=DOLO)
MDEN. 2710.0000: LOGGING MATRIX DENSITY
RMF .OHMM 0.2160: MUD FILTRATE RESISTIVITY
DFD .K/M3 1525.0000: DRILL FLUID DENSITY
~Other
Note: The logging tools became stuck at 625 meters causing the data
between 625 meters and 615 meters to be invalid.
~A DEPTH DT RHOB NPHI SFLU SFLA ILM ILD
1670.000 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
1669.875 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
1669.750 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
""")<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.