code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
package com.nexusplay.containers;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.SQLException;
import org.apache.commons.io.IOUtils;
import com.nexusplay.db.SubtitlesDatabase;
import com.nexusplay.security.RandomContainer;
/**
* Contains a proposed change (to a subtitle).
* @author alex
*
*/
public class Change {
private String targetID, changedContent, id, originalContent, votes;
private int nrVotes;
/**
* Constructor for creating new objects, prior to storing them in the database.
* @param changedContent The change's original data
* @param originalContent The change's new data
* @param targetID The object targeted by the change
* @param votes The user IDs that voted this change
*/
public Change(String changedContent, String originalContent, String targetID, String votes){
this.changedContent = changedContent;
this.originalContent = originalContent;
this.targetID = targetID;
this.votes = votes;
nrVotes = votes.length() - votes.replace(";", "").length();
generateId();
}
/**
* This constructor should only be used for recreating a stored object.
* @param changedContent The change's original data
* @param originalContent The change's new data
* @param targetID The object targeted by the change
* @param votes The user IDs that voted this change
* @param id The change's unique ID
*/
public Change(String changedContent, String originalContent, String targetID, String votes, String id){
this.changedContent = changedContent;
this.originalContent = originalContent;
this.targetID = targetID;
this.votes = votes;
nrVotes = votes.length() - votes.replace(";", "").length();
this.id = id;
}
/**
* Commits a change to disk.
* @throws SQLException Thrown if the database is not accessible to us for whatever reason
* @throws FileNotFoundException Thrown if we're denied access to the subtitle file
* @throws IOException Thrown if an error appears while writing the file
*/
public void commitChange() throws SQLException, FileNotFoundException, IOException{
Subtitle sub = SubtitlesDatabase.getSubtitleByID(targetID);
FileInputStream input = new FileInputStream(SettingsContainer.getAbsoluteSubtitlePath() + File.separator + sub.getId() + ".vtt");
String content = IOUtils.toString(input, "UTF-8");
content = content.replaceAll(originalContent, changedContent);
content = content.replaceAll(originalContent.replaceAll("\n", "\r\n"), changedContent.replaceAll("\n", "\r\n"));
FileOutputStream output = new FileOutputStream(SettingsContainer.getAbsoluteSubtitlePath() + File.separator + sub.getId() + ".vtt");
IOUtils.write(content, output, "UTF-8");
output.close();
input.close();
}
/**
* Generates a new unique ID for the item
*/
public void generateId()
{
id = (new BigInteger(130, RandomContainer.getRandom())).toString(32);
}
/**
* @return The ID of the Media element associated to this object
*/
public String getTargetID() {
return targetID;
}
/**
* @param targetID The new ID of the Media element associated to this object
*/
public void setTargetID(String targetID) {
this.targetID = targetID;
}
/**
* @return The change itself
*/
public String getChangedContent() {
return changedContent;
}
/**
* @param content The new data to change
*/
public void setChangedContent(String content) {
this.changedContent = content;
}
/**
* @return The change's unique ID
*/
public String getId() {
return id;
}
/**
* @param id The change's new unique ID
*/
public void setId(String id) {
this.id = id;
}
/**
* @return The user IDs who voted for this change
*/
public String getVotes() {
return votes;
}
/**
* @param votes The new user IDs who voted for this change
*/
public void setVotes(String votes) {
this.votes = votes;
nrVotes = votes.length() - votes.replace(";", "").length();
}
/**
* @return The original content prior to changing
*/
public String getOriginalContent() {
return originalContent;
}
/**
* @param originalContent The new original content prior to changing
*/
public void setOriginalContent(String originalContent) {
this.originalContent = originalContent;
}
/**
* @return the nrVotes
*/
public int getNrVotes() {
return nrVotes;
}
/**
* @param nrVotes the nrVotes to set
*/
public void setNrVotes(int nrVotes) {
this.nrVotes = nrVotes;
}
}
| AlexCristian/NexusPlay | src/com/nexusplay/containers/Change.java | Java | agpl-3.0 | 4,573 |
from ctypes import *
import ctypes.util
import threading
import os
import sys
from warnings import warn
from functools import partial
import collections
import re
import traceback
# vim: ts=4 sw=4 et
if os.name == 'nt':
backend = CDLL('mpv-1.dll')
fs_enc = 'utf-8'
else:
import locale
lc, enc = locale.getlocale(locale.LC_NUMERIC)
# libmpv requires LC_NUMERIC to be set to "C". Since messing with global variables everyone else relies upon is
# still better than segfaulting, we are setting LC_NUMERIC to "C".
locale.setlocale(locale.LC_NUMERIC, 'C')
sofile = ctypes.util.find_library('mpv')
if sofile is None:
raise OSError("Cannot find libmpv in the usual places. Depending on your distro, you may try installing an "
"mpv-devel or mpv-libs package. If you have libmpv around but this script can't find it, maybe consult "
"the documentation for ctypes.util.find_library which this script uses to look up the library "
"filename.")
backend = CDLL(sofile)
fs_enc = sys.getfilesystemencoding()
class MpvHandle(c_void_p):
pass
class MpvOpenGLCbContext(c_void_p):
pass
class PropertyUnavailableError(AttributeError):
pass
class ErrorCode(object):
""" For documentation on these, see mpv's libmpv/client.h """
SUCCESS = 0
EVENT_QUEUE_FULL = -1
NOMEM = -2
UNINITIALIZED = -3
INVALID_PARAMETER = -4
OPTION_NOT_FOUND = -5
OPTION_FORMAT = -6
OPTION_ERROR = -7
PROPERTY_NOT_FOUND = -8
PROPERTY_FORMAT = -9
PROPERTY_UNAVAILABLE = -10
PROPERTY_ERROR = -11
COMMAND = -12
EXCEPTION_DICT = {
0: None,
-1: lambda *a: MemoryError('mpv event queue full', *a),
-2: lambda *a: MemoryError('mpv cannot allocate memory', *a),
-3: lambda *a: ValueError('Uninitialized mpv handle used', *a),
-4: lambda *a: ValueError('Invalid value for mpv parameter', *a),
-5: lambda *a: AttributeError('mpv option does not exist', *a),
-6: lambda *a: TypeError('Tried to set mpv option using wrong format', *a),
-7: lambda *a: ValueError('Invalid value for mpv option', *a),
-8: lambda *a: AttributeError('mpv property does not exist', *a),
# Currently (mpv 0.18.1) there is a bug causing a PROPERTY_FORMAT error to be returned instead of
# INVALID_PARAMETER when setting a property-mapped option to an invalid value.
-9: lambda *a: TypeError('Tried to get/set mpv property using wrong format, or passed invalid value', *a),
-10: lambda *a: PropertyUnavailableError('mpv property is not available', *a),
-11: lambda *a: RuntimeError('Generic error getting or setting mpv property', *a),
-12: lambda *a: SystemError('Error running mpv command', *a) }
@staticmethod
def default_error_handler(ec, *args):
return ValueError(_mpv_error_string(ec).decode('utf-8'), ec, *args)
@classmethod
def raise_for_ec(kls, ec, func, *args):
ec = 0 if ec > 0 else ec
ex = kls.EXCEPTION_DICT.get(ec , kls.default_error_handler)
if ex:
raise ex(ec, *args)
class MpvFormat(c_int):
NONE = 0
STRING = 1
OSD_STRING = 2
FLAG = 3
INT64 = 4
DOUBLE = 5
NODE = 6
NODE_ARRAY = 7
NODE_MAP = 8
BYTE_ARRAY = 9
def __eq__(self, other):
return self is other or self.value == other or self.value == int(other)
def __repr__(self):
return ['NONE', 'STRING', 'OSD_STRING', 'FLAG', 'INT64', 'DOUBLE', 'NODE', 'NODE_ARRAY', 'NODE_MAP',
'BYTE_ARRAY'][self.value]
class MpvEventID(c_int):
NONE = 0
SHUTDOWN = 1
LOG_MESSAGE = 2
GET_PROPERTY_REPLY = 3
SET_PROPERTY_REPLY = 4
COMMAND_REPLY = 5
START_FILE = 6
END_FILE = 7
FILE_LOADED = 8
TRACKS_CHANGED = 9
TRACK_SWITCHED = 10
IDLE = 11
PAUSE = 12
UNPAUSE = 13
TICK = 14
SCRIPT_INPUT_DISPATCH = 15
CLIENT_MESSAGE = 16
VIDEO_RECONFIG = 17
AUDIO_RECONFIG = 18
METADATA_UPDATE = 19
SEEK = 20
PLAYBACK_RESTART = 21
PROPERTY_CHANGE = 22
CHAPTER_CHANGE = 23
ANY = ( SHUTDOWN, LOG_MESSAGE, GET_PROPERTY_REPLY, SET_PROPERTY_REPLY, COMMAND_REPLY, START_FILE, END_FILE,
FILE_LOADED, TRACKS_CHANGED, TRACK_SWITCHED, IDLE, PAUSE, UNPAUSE, TICK, SCRIPT_INPUT_DISPATCH,
CLIENT_MESSAGE, VIDEO_RECONFIG, AUDIO_RECONFIG, METADATA_UPDATE, SEEK, PLAYBACK_RESTART, PROPERTY_CHANGE,
CHAPTER_CHANGE )
def __repr__(self):
return ['NONE', 'SHUTDOWN', 'LOG_MESSAGE', 'GET_PROPERTY_REPLY', 'SET_PROPERTY_REPLY', 'COMMAND_REPLY',
'START_FILE', 'END_FILE', 'FILE_LOADED', 'TRACKS_CHANGED', 'TRACK_SWITCHED', 'IDLE', 'PAUSE', 'UNPAUSE',
'TICK', 'SCRIPT_INPUT_DISPATCH', 'CLIENT_MESSAGE', 'VIDEO_RECONFIG', 'AUDIO_RECONFIG',
'METADATA_UPDATE', 'SEEK', 'PLAYBACK_RESTART', 'PROPERTY_CHANGE', 'CHAPTER_CHANGE'][self.value]
class MpvNodeList(Structure):
def array_value(self, decode_str=False):
return [ self.values[i].node_value(decode_str) for i in range(self.num) ]
def dict_value(self, decode_str=False):
return { self.keys[i].decode('utf-8'): self.values[i].node_value(decode_str) for i in range(self.num) }
class MpvNode(Structure):
_fields_ = [('val', c_longlong),
('format', MpvFormat)]
def node_value(self, decode_str=False):
return MpvNode.node_cast_value(byref(c_void_p(self.val)), self.format.value, decode_str)
@staticmethod
def node_cast_value(v, fmt, decode_str=False):
dwrap = lambda s: s.decode('utf-8') if decode_str else s
return {
MpvFormat.NONE: lambda v: None,
MpvFormat.STRING: lambda v: dwrap(cast(v, POINTER(c_char_p)).contents.value),
MpvFormat.OSD_STRING: lambda v: cast(v, POINTER(c_char_p)).contents.value.decode('utf-8'),
MpvFormat.FLAG: lambda v: bool(cast(v, POINTER(c_int)).contents.value),
MpvFormat.INT64: lambda v: cast(v, POINTER(c_longlong)).contents.value,
MpvFormat.DOUBLE: lambda v: cast(v, POINTER(c_double)).contents.value,
MpvFormat.NODE: lambda v: cast(v, POINTER(MpvNode)).contents.node_value(decode_str),
MpvFormat.NODE_ARRAY: lambda v: cast(v, POINTER(POINTER(MpvNodeList))).contents.contents.array_value(decode_str),
MpvFormat.NODE_MAP: lambda v: cast(v, POINTER(POINTER(MpvNodeList))).contents.contents.dict_value(decode_str),
MpvFormat.BYTE_ARRAY: lambda v: cast(v, POINTER(c_char_p)).contents.value,
}[fmt](v)
MpvNodeList._fields_ = [('num', c_int),
('values', POINTER(MpvNode)),
('keys', POINTER(c_char_p))]
class MpvSubApi(c_int):
MPV_SUB_API_OPENGL_CB = 1
class MpvEvent(Structure):
_fields_ = [('event_id', MpvEventID),
('error', c_int),
('reply_userdata', c_ulonglong),
('data', c_void_p)]
def as_dict(self):
dtype = {MpvEventID.END_FILE: MpvEventEndFile,
MpvEventID.PROPERTY_CHANGE: MpvEventProperty,
MpvEventID.GET_PROPERTY_REPLY: MpvEventProperty,
MpvEventID.LOG_MESSAGE: MpvEventLogMessage,
MpvEventID.SCRIPT_INPUT_DISPATCH: MpvEventScriptInputDispatch,
MpvEventID.CLIENT_MESSAGE: MpvEventClientMessage
}.get(self.event_id.value, None)
return {'event_id': self.event_id.value,
'error': self.error,
'reply_userdata': self.reply_userdata,
'event': cast(self.data, POINTER(dtype)).contents.as_dict() if dtype else None}
class MpvEventProperty(Structure):
_fields_ = [('name', c_char_p),
('format', MpvFormat),
('data', c_void_p)]
def as_dict(self):
if self.format.value == MpvFormat.STRING:
proptype, _access = ALL_PROPERTIES.get(self.name, (str, None))
return {'name': self.name.decode('utf-8'),
'format': self.format,
'data': self.data,
'value': proptype(cast(self.data, POINTER(c_char_p)).contents.value.decode('utf-8'))}
else:
return {'name': self.name.decode('utf-8'),
'format': self.format,
'data': self.data}
class MpvEventLogMessage(Structure):
_fields_ = [('prefix', c_char_p),
('level', c_char_p),
('text', c_char_p)]
def as_dict(self):
return { 'prefix': self.prefix.decode('utf-8'),
'level': self.level.decode('utf-8'),
'text': self.text.decode('utf-8').rstrip() }
class MpvEventEndFile(c_int):
EOF_OR_INIT_FAILURE = 0
RESTARTED = 1
ABORTED = 2
QUIT = 3
def as_dict(self):
return {'reason': self.value}
class MpvEventScriptInputDispatch(Structure):
_fields_ = [('arg0', c_int),
('type', c_char_p)]
def as_dict(self):
pass # TODO
class MpvEventClientMessage(Structure):
_fields_ = [('num_args', c_int),
('args', POINTER(c_char_p))]
def as_dict(self):
return { 'args': [ self.args[i].decode('utf-8') for i in range(self.num_args) ] }
WakeupCallback = CFUNCTYPE(None, c_void_p)
OpenGlCbUpdateFn = CFUNCTYPE(None, c_void_p)
OpenGlCbGetProcAddrFn = CFUNCTYPE(None, c_void_p, c_char_p)
def _handle_func(name, args, restype, errcheck, ctx=MpvHandle):
func = getattr(backend, name)
func.argtypes = [ctx] + args if ctx else args
if restype is not None:
func.restype = restype
if errcheck is not None:
func.errcheck = errcheck
globals()['_'+name] = func
def bytes_free_errcheck(res, func, *args):
notnull_errcheck(res, func, *args)
rv = cast(res, c_void_p).value
_mpv_free(res)
return rv
def notnull_errcheck(res, func, *args):
if res is None:
raise RuntimeError('Underspecified error in MPV when calling {} with args {!r}: NULL pointer returned.'\
'Please consult your local debugger.'.format(func.__name__, args))
return res
ec_errcheck = ErrorCode.raise_for_ec
def _handle_gl_func(name, args=[], restype=None):
_handle_func(name, args, restype, errcheck=None, ctx=MpvOpenGLCbContext)
backend.mpv_client_api_version.restype = c_ulong
def _mpv_client_api_version():
ver = backend.mpv_client_api_version()
return ver>>16, ver&0xFFFF
backend.mpv_free.argtypes = [c_void_p]
_mpv_free = backend.mpv_free
backend.mpv_free_node_contents.argtypes = [c_void_p]
_mpv_free_node_contents = backend.mpv_free_node_contents
backend.mpv_create.restype = MpvHandle
_mpv_create = backend.mpv_create
_handle_func('mpv_create_client', [c_char_p], MpvHandle, notnull_errcheck)
_handle_func('mpv_client_name', [], c_char_p, errcheck=None)
_handle_func('mpv_initialize', [], c_int, ec_errcheck)
_handle_func('mpv_detach_destroy', [], None, errcheck=None)
_handle_func('mpv_terminate_destroy', [], None, errcheck=None)
_handle_func('mpv_load_config_file', [c_char_p], c_int, ec_errcheck)
_handle_func('mpv_suspend', [], None, errcheck=None)
_handle_func('mpv_resume', [], None, errcheck=None)
_handle_func('mpv_get_time_us', [], c_ulonglong, errcheck=None)
_handle_func('mpv_set_option', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck)
_handle_func('mpv_set_option_string', [c_char_p, c_char_p], c_int, ec_errcheck)
_handle_func('mpv_command', [POINTER(c_char_p)], c_int, ec_errcheck)
_handle_func('mpv_command_string', [c_char_p, c_char_p], c_int, ec_errcheck)
_handle_func('mpv_command_async', [c_ulonglong, POINTER(c_char_p)], c_int, ec_errcheck)
_handle_func('mpv_set_property', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck)
_handle_func('mpv_set_property_string', [c_char_p, c_char_p], c_int, ec_errcheck)
_handle_func('mpv_set_property_async', [c_ulonglong, c_char_p, MpvFormat,c_void_p],c_int, ec_errcheck)
_handle_func('mpv_get_property', [c_char_p, MpvFormat, c_void_p], c_int, ec_errcheck)
_handle_func('mpv_get_property_string', [c_char_p], c_void_p, bytes_free_errcheck)
_handle_func('mpv_get_property_osd_string', [c_char_p], c_void_p, bytes_free_errcheck)
_handle_func('mpv_get_property_async', [c_ulonglong, c_char_p, MpvFormat], c_int, ec_errcheck)
_handle_func('mpv_observe_property', [c_ulonglong, c_char_p, MpvFormat], c_int, ec_errcheck)
_handle_func('mpv_unobserve_property', [c_ulonglong], c_int, ec_errcheck)
_handle_func('mpv_event_name', [c_int], c_char_p, errcheck=None, ctx=None)
_handle_func('mpv_error_string', [c_int], c_char_p, errcheck=None, ctx=None)
_handle_func('mpv_request_event', [MpvEventID, c_int], c_int, ec_errcheck)
_handle_func('mpv_request_log_messages', [c_char_p], c_int, ec_errcheck)
_handle_func('mpv_wait_event', [c_double], POINTER(MpvEvent), errcheck=None)
_handle_func('mpv_wakeup', [], None, errcheck=None)
_handle_func('mpv_set_wakeup_callback', [WakeupCallback, c_void_p], None, errcheck=None)
_handle_func('mpv_get_wakeup_pipe', [], c_int, errcheck=None)
_handle_func('mpv_get_sub_api', [MpvSubApi], c_void_p, notnull_errcheck)
_handle_gl_func('mpv_opengl_cb_set_update_callback', [OpenGlCbUpdateFn, c_void_p])
_handle_gl_func('mpv_opengl_cb_init_gl', [c_char_p, OpenGlCbGetProcAddrFn, c_void_p], c_int)
_handle_gl_func('mpv_opengl_cb_draw', [c_int, c_int, c_int], c_int)
_handle_gl_func('mpv_opengl_cb_render', [c_int, c_int], c_int)
_handle_gl_func('mpv_opengl_cb_report_flip', [c_ulonglong], c_int)
_handle_gl_func('mpv_opengl_cb_uninit_gl', [], c_int)
def _ensure_encoding(possibly_bytes):
return possibly_bytes.decode('utf-8') if type(possibly_bytes) is bytes else possibly_bytes
def _event_generator(handle):
while True:
event = _mpv_wait_event(handle, -1).contents
if event.event_id.value == MpvEventID.NONE:
raise StopIteration()
yield event
def load_lua():
""" Use this function if you intend to use mpv's built-in lua interpreter. This is e.g. needed for playback of
youtube urls. """
CDLL('liblua.so', mode=RTLD_GLOBAL)
def _event_loop(event_handle, playback_cond, event_callbacks, message_handlers, property_handlers, log_handler):
for event in _event_generator(event_handle):
try:
devent = event.as_dict() # copy data from ctypes
eid = devent['event_id']
for callback in event_callbacks:
callback(devent)
if eid in (MpvEventID.SHUTDOWN, MpvEventID.END_FILE):
with playback_cond:
playback_cond.notify_all()
if eid == MpvEventID.PROPERTY_CHANGE:
pc = devent['event']
name = pc['name']
if 'value' in pc:
proptype, _access = ALL_PROPERTIES[name]
if proptype is bytes:
args = (pc['value'],)
else:
args = (proptype(_ensure_encoding(pc['value'])),)
elif pc['format'] == MpvFormat.NONE:
args = (None,)
else:
args = (pc['data'], pc['format'])
for handler in property_handlers[name]:
handler(*args)
if eid == MpvEventID.LOG_MESSAGE and log_handler is not None:
ev = devent['event']
log_handler(ev['level'], ev['prefix'], ev['text'])
if eid == MpvEventID.CLIENT_MESSAGE:
# {'event': {'args': ['key-binding', 'foo', 'u-', 'g']}, 'reply_userdata': 0, 'error': 0, 'event_id': 16}
target, *args = devent['event']['args']
if target in message_handlers:
message_handlers[target](*args)
if eid == MpvEventID.SHUTDOWN:
_mpv_detach_destroy(event_handle)
return
except Exception as e:
traceback.print_exc()
class MPV(object):
""" See man mpv(1) for the details of the implemented commands. """
def __init__(self, *extra_mpv_flags, log_handler=None, start_event_thread=True, **extra_mpv_opts):
""" Create an MPV instance.
Extra arguments and extra keyword arguments will be passed to mpv as options. """
self._event_thread = None
self.handle = _mpv_create()
_mpv_set_option_string(self.handle, b'audio-display', b'no')
istr = lambda o: ('yes' if o else 'no') if type(o) is bool else str(o)
try:
for flag in extra_mpv_flags:
_mpv_set_option_string(self.handle, flag.encode('utf-8'), b'')
for k,v in extra_mpv_opts.items():
_mpv_set_option_string(self.handle, k.replace('_', '-').encode('utf-8'), istr(v).encode('utf-8'))
except AttributeError as e:
_mpv_initialize(self.handle)
raise e
_mpv_initialize(self.handle)
self._event_callbacks = []
self._property_handlers = collections.defaultdict(lambda: [])
self._message_handlers = {}
self._key_binding_handlers = {}
self._playback_cond = threading.Condition()
self._event_handle = _mpv_create_client(self.handle, b'py_event_handler')
self._loop = partial(_event_loop, self._event_handle, self._playback_cond, self._event_callbacks,
self._message_handlers, self._property_handlers, log_handler)
if start_event_thread:
self._event_thread = threading.Thread(target=self._loop, name='MPVEventHandlerThread')
self._event_thread.setDaemon(True)
self._event_thread.start()
else:
self._event_thread = None
if log_handler is not None:
self.set_loglevel('terminal-default')
def wait_for_playback(self):
""" Waits until playback of the current title is paused or done """
with self._playback_cond:
self._playback_cond.wait()
def wait_for_property(self, name, cond=lambda val: val, level_sensitive=True):
sema = threading.Semaphore(value=0)
def observer(val):
if cond(val):
sema.release()
self.observe_property(name, observer)
if not level_sensitive or not cond(getattr(self, name.replace('-', '_'))):
sema.acquire()
self.unobserve_property(name, observer)
def __del__(self):
if self.handle:
self.terminate()
def terminate(self):
self.handle, handle = None, self.handle
if threading.current_thread() is self._event_thread:
# Handle special case to allow event handle to be detached.
# This is necessary since otherwise the event thread would deadlock itself.
grim_reaper = threading.Thread(target=lambda: _mpv_terminate_destroy(handle))
grim_reaper.start()
else:
_mpv_terminate_destroy(handle)
if self._event_thread:
self._event_thread.join()
def set_loglevel(self, level):
_mpv_request_log_messages(self._event_handle, level.encode('utf-8'))
def command(self, name, *args):
""" Execute a raw command """
args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8'))
for arg in args if arg is not None ] + [None]
_mpv_command(self.handle, (c_char_p*len(args))(*args))
def seek(self, amount, reference="relative", precision="default-precise"):
self.command('seek', amount, reference, precision)
def revert_seek(self):
self.command('revert_seek');
def frame_step(self):
self.command('frame_step')
def frame_back_step(self):
self.command('frame_back_step')
def _add_property(self, name, value=None):
self.command('add_property', name, value)
def _cycle_property(self, name, direction='up'):
self.command('cycle_property', name, direction)
def _multiply_property(self, name, factor):
self.command('multiply_property', name, factor)
def screenshot(self, includes='subtitles', mode='single'):
self.command('screenshot', includes, mode)
def screenshot_to_file(self, filename, includes='subtitles'):
self.command('screenshot_to_file', filename.encode(fs_enc), includes)
def playlist_next(self, mode='weak'):
self.command('playlist_next', mode)
def playlist_prev(self, mode='weak'):
self.command('playlist_prev', mode)
@staticmethod
def _encode_options(options):
return ','.join('{}={}'.format(str(key), str(val)) for key, val in options.items())
def loadfile(self, filename, mode='replace', **options):
self.command('loadfile', filename.encode(fs_enc), mode, MPV._encode_options(options))
def loadlist(self, playlist, mode='replace'):
self.command('loadlist', playlist.encode(fs_enc), mode)
def playlist_clear(self):
self.command('playlist_clear')
def playlist_remove(self, index='current'):
self.command('playlist_remove', index)
def playlist_move(self, index1, index2):
self.command('playlist_move', index1, index2)
def run(self, command, *args):
self.command('run', command, *args)
def quit(self, code=None):
self.command('quit', code)
def quit_watch_later(self, code=None):
self.command('quit_watch_later', code)
def sub_add(self, filename):
self.command('sub_add', filename.encode(fs_enc))
def sub_remove(self, sub_id=None):
self.command('sub_remove', sub_id)
def sub_reload(self, sub_id=None):
self.command('sub_reload', sub_id)
def sub_step(self, skip):
self.command('sub_step', skip)
def sub_seek(self, skip):
self.command('sub_seek', skip)
def toggle_osd(self):
self.command('osd')
def show_text(self, string, duration='-', level=None):
self.command('show_text', string, duration, level)
def show_progress(self):
self.command('show_progress')
def discnav(self, command):
self.command('discnav', command)
def write_watch_later_config(self):
self.command('write_watch_later_config')
def overlay_add(self, overlay_id, x, y, file_or_fd, offset, fmt, w, h, stride):
self.command('overlay_add', overlay_id, x, y, file_or_fd, offset, fmt, w, h, stride)
def overlay_remove(self, overlay_id):
self.command('overlay_remove', overlay_id)
def script_message(self, *args):
self.command('script_message', *args)
def script_message_to(self, target, *args):
self.command('script_message_to', target, *args)
def observe_property(self, name, handler):
self._property_handlers[name].append(handler)
_mpv_observe_property(self._event_handle, hash(name)&0xffffffffffffffff, name.encode('utf-8'), MpvFormat.STRING)
def unobserve_property(self, name, handler):
handlers = self._property_handlers[name]
handlers.remove(handler)
if not handlers:
_mpv_unobserve_property(self._event_handle, hash(name)&0xffffffffffffffff)
def register_message_handler(self, target, handler):
self._message_handlers[target] = handler
def unregister_message_handler(self, target):
del self._message_handlers[target]
def register_event_callback(self, callback):
self._event_callbacks.append(callback)
def unregister_event_callback(self, callback):
self._event_callbacks.remove(callback)
@staticmethod
def _binding_name(callback_or_cmd):
return 'py_kb_{:016x}'.format(hash(callback_or_cmd)&0xffffffffffffffff)
def register_key_binding(self, keydef, callback_or_cmd, mode='force'):
""" BIG FAT WARNING: mpv's key binding mechanism is pretty powerful. This means, you essentially get arbitrary
code exectution through key bindings. This interface makes some limited effort to sanitize the keydef given in
the first parameter, but YOU SHOULD NOT RELY ON THIS IN FOR SECURITY. If your input comes from config files,
this is completely fine--but, if you are about to pass untrusted input into this parameter, better double-check
whether this is secure in your case. """
if not re.match(r'(Shift+)?(Ctrl+)?(Alt+)?(Meta+)?(.|\w+)', keydef):
raise ValueError('Invalid keydef. Expected format: [Shift+][Ctrl+][Alt+][Meta+]<key>\n'
'<key> is either the literal character the key produces (ASCII or Unicode character), or a '
'symbolic name (as printed by --input-keylist')
binding_name = MPV._binding_name(keydef)
if callable(callback_or_cmd):
self._key_binding_handlers[binding_name] = callback_or_cmd
self.register_message_handler('key-binding', self._handle_key_binding_message)
self.command('define-section',
binding_name, '{} script-binding py_event_handler/{}'.format(keydef, binding_name), mode)
elif isinstance(callback_or_cmd, str):
self.command('define-section', binding_name, '{} {}'.format(keydef, callback_or_cmd), mode)
else:
raise TypeError('register_key_binding expects either an str with an mpv command or a python callable.')
self.command('enable-section', binding_name)
def _handle_key_binding_message(self, binding_name, key_state, key_name):
self._key_binding_handlers[binding_name](key_state, key_name)
def unregister_key_binding(self, keydef):
binding_name = MPV._binding_name(keydef)
self.command('disable-section', binding_name)
self.command('define-section', binding_name, '')
if callable(callback):
del self._key_binding_handlers[binding_name]
if not self._key_binding_handlers:
self.unregister_message_handler('key-binding')
# Convenience functions
def play(self, filename):
self.loadfile(filename)
# Property accessors
def _get_property(self, name, proptype=str, decode_str=False):
fmt = {int: MpvFormat.INT64,
float: MpvFormat.DOUBLE,
bool: MpvFormat.FLAG,
str: MpvFormat.STRING,
bytes: MpvFormat.STRING,
commalist: MpvFormat.STRING,
MpvFormat.NODE: MpvFormat.NODE}[proptype]
out = cast(create_string_buffer(sizeof(c_void_p)), c_void_p)
outptr = byref(out)
try:
cval = _mpv_get_property(self.handle, name.encode('utf-8'), fmt, outptr)
rv = MpvNode.node_cast_value(outptr, fmt, decode_str or proptype in (str, commalist))
if proptype is commalist:
rv = proptype(rv)
if proptype is str:
_mpv_free(out)
elif proptype is MpvFormat.NODE:
_mpv_free_node_contents(outptr)
return rv
except PropertyUnavailableError as ex:
return None
def _set_property(self, name, value, proptype=str):
ename = name.encode('utf-8')
if type(value) is bytes:
_mpv_set_property_string(self.handle, ename, value)
elif type(value) is bool:
_mpv_set_property_string(self.handle, ename, b'yes' if value else b'no')
elif proptype in (str, int, float):
_mpv_set_property_string(self.handle, ename, str(proptype(value)).encode('utf-8'))
else:
raise TypeError('Cannot set {} property {} to value of type {}'.format(proptype, name, type(value)))
# Dict-like option access
def __getitem__(self, name, file_local=False):
""" Get an option value """
prefix = 'file-local-options/' if file_local else 'options/'
return self._get_property(prefix+name)
def __setitem__(self, name, value, file_local=False):
""" Get an option value """
prefix = 'file-local-options/' if file_local else 'options/'
return self._set_property(prefix+name, value)
def __iter__(self):
return iter(self.options)
def option_info(self, name):
return self._get_property('option-info/'+name)
def commalist(propval=''):
return str(propval).split(',')
node = MpvFormat.NODE
ALL_PROPERTIES = {
'osd-level': (int, 'rw'),
'osd-scale': (float, 'rw'),
'loop': (str, 'rw'),
'loop-file': (str, 'rw'),
'speed': (float, 'rw'),
'filename': (bytes, 'r'),
'file-size': (int, 'r'),
'path': (bytes, 'r'),
'media-title': (bytes, 'r'),
'stream-pos': (int, 'rw'),
'stream-end': (int, 'r'),
'length': (float, 'r'), # deprecated for ages now
'duration': (float, 'r'),
'avsync': (float, 'r'),
'total-avsync-change': (float, 'r'),
'drop-frame-count': (int, 'r'),
'percent-pos': (float, 'rw'),
# 'ratio-pos': (float, 'rw'),
'time-pos': (float, 'rw'),
'time-start': (float, 'r'),
'time-remaining': (float, 'r'),
'playtime-remaining': (float, 'r'),
'chapter': (int, 'rw'),
'edition': (int, 'rw'),
'disc-titles': (int, 'r'),
'disc-title': (str, 'rw'),
# 'disc-menu-active': (bool, 'r'),
'chapters': (int, 'r'),
'editions': (int, 'r'),
'angle': (int, 'rw'),
'pause': (bool, 'rw'),
'core-idle': (bool, 'r'),
'cache': (int, 'r'),
'cache-size': (int, 'rw'),
'cache-free': (int, 'r'),
'cache-used': (int, 'r'),
'cache-speed': (int, 'r'),
'cache-idle': (bool, 'r'),
'cache-buffering-state': (int, 'r'),
'paused-for-cache': (bool, 'r'),
# 'pause-for-cache': (bool, 'r'),
'eof-reached': (bool, 'r'),
# 'pts-association-mode': (str, 'rw'),
'hr-seek': (str, 'rw'),
'volume': (float, 'rw'),
'volume-max': (int, 'rw'),
'ao-volume': (float, 'rw'),
'mute': (bool, 'rw'),
'ao-mute': (bool, 'rw'),
'audio-speed-correction': (float, 'r'),
'audio-delay': (float, 'rw'),
'audio-format': (str, 'r'),
'audio-codec': (str, 'r'),
'audio-codec-name': (str, 'r'),
'audio-bitrate': (float, 'r'),
'packet-audio-bitrate': (float, 'r'),
'audio-samplerate': (int, 'r'),
'audio-channels': (str, 'r'),
'aid': (str, 'rw'),
'audio': (str, 'rw'), # alias for aid
'balance': (int, 'rw'),
'fullscreen': (bool, 'rw'),
'deinterlace': (str, 'rw'),
'colormatrix': (str, 'rw'),
'colormatrix-input-range': (str, 'rw'),
# 'colormatrix-output-range': (str, 'rw'),
'colormatrix-primaries': (str, 'rw'),
'ontop': (bool, 'rw'),
'border': (bool, 'rw'),
'framedrop': (str, 'rw'),
'gamma': (float, 'rw'),
'brightness': (int, 'rw'),
'contrast': (int, 'rw'),
'saturation': (int, 'rw'),
'hue': (int, 'rw'),
'hwdec': (str, 'rw'),
'panscan': (float, 'rw'),
'video-format': (str, 'r'),
'video-codec': (str, 'r'),
'video-bitrate': (float, 'r'),
'packet-video-bitrate': (float, 'r'),
'width': (int, 'r'),
'height': (int, 'r'),
'dwidth': (int, 'r'),
'dheight': (int, 'r'),
'fps': (float, 'r'),
'estimated-vf-fps': (float, 'r'),
'window-scale': (float, 'rw'),
'video-aspect': (str, 'rw'),
'osd-width': (int, 'r'),
'osd-height': (int, 'r'),
'osd-par': (float, 'r'),
'vid': (str, 'rw'),
'video': (str, 'rw'), # alias for vid
'video-align-x': (float, 'rw'),
'video-align-y': (float, 'rw'),
'video-pan-x': (float, 'rw'),
'video-pan-y': (float, 'rw'),
'video-zoom': (float, 'rw'),
'video-unscaled': (bool, 'w'),
'video-speed-correction': (float, 'r'),
'program': (int, 'w'),
'sid': (str, 'rw'),
'sub': (str, 'rw'), # alias for sid
'secondary-sid': (str, 'rw'),
'sub-delay': (float, 'rw'),
'sub-pos': (int, 'rw'),
'sub-visibility': (bool, 'rw'),
'sub-forced-only': (bool, 'rw'),
'sub-scale': (float, 'rw'),
'sub-bitrate': (float, 'r'),
'packet-sub-bitrate': (float, 'r'),
# 'ass-use-margins': (bool, 'rw'),
'ass-vsfilter-aspect-compat': (bool, 'rw'),
'ass-style-override': (bool, 'rw'),
'stream-capture': (str, 'rw'),
'tv-brightness': (int, 'rw'),
'tv-contrast': (int, 'rw'),
'tv-saturation': (int, 'rw'),
'tv-hue': (int, 'rw'),
'playlist-pos': (int, 'rw'),
'playlist-pos-1': (int, 'rw'), # ugh.
'playlist-count': (int, 'r'),
# 'quvi-format': (str, 'rw'),
'seekable': (bool, 'r'),
'seeking': (bool, 'r'),
'partially-seekable': (bool, 'r'),
'playback-abort': (bool, 'r'),
'cursor-autohide': (str, 'rw'),
'audio-device': (str, 'rw'),
'current-vo': (str, 'r'),
'current-ao': (str, 'r'),
'audio-out-detected-device': (str, 'r'),
'protocol-list': (str, 'r'),
'mpv-version': (str, 'r'),
'mpv-configuration': (str, 'r'),
'ffmpeg-version': (str, 'r'),
'display-sync-active': (bool, 'r'),
'stream-open-filename': (bytes, 'rw'), # Undocumented
'file-format': (commalist,'r'), # Be careful with this one.
'mistimed-frame-count': (int, 'r'),
'vsync-ratio': (float, 'r'),
'vo-drop-frame-count': (int, 'r'),
'vo-delayed-frame-count': (int, 'r'),
'playback-time': (float, 'rw'),
'demuxer-cache-duration': (float, 'r'),
'demuxer-cache-time': (float, 'r'),
'demuxer-cache-idle': (bool, 'r'),
'idle': (bool, 'r'),
'disc-title-list': (commalist,'r'),
'field-dominance': (str, 'rw'),
'taskbar-progress': (bool, 'rw'),
'on-all-workspaces': (bool, 'rw'),
'video-output-levels': (str, 'r'),
'vo-configured': (bool, 'r'),
'hwdec-current': (str, 'r'),
'hwdec-interop': (str, 'r'),
'estimated-frame-count': (int, 'r'),
'estimated-frame-number': (int, 'r'),
'sub-use-margins': (bool, 'rw'),
'ass-force-margins': (bool, 'rw'),
'video-rotate': (str, 'rw'),
'video-stereo-mode': (str, 'rw'),
'ab-loop-a': (str, 'r'), # What a mess...
'ab-loop-b': (str, 'r'),
'dvb-channel': (str, 'w'),
'dvb-channel-name': (str, 'rw'),
'window-minimized': (bool, 'r'),
'display-names': (commalist, 'r'),
'display-fps': (float, 'r'), # access apparently misdocumented in the manpage
'estimated-display-fps': (float, 'r'),
'vsync-jitter': (float, 'r'),
'video-params': (node, 'r', True),
'video-out-params': (node, 'r', True),
'track-list': (node, 'r', False),
'playlist': (node, 'r', False),
'chapter-list': (node, 'r', False),
'vo-performance': (node, 'r', True),
'filtered-metadata': (node, 'r', False),
'metadata': (node, 'r', False),
'chapter-metadata': (node, 'r', False),
'vf-metadata': (node, 'r', False),
'af-metadata': (node, 'r', False),
'edition-list': (node, 'r', False),
'disc-titles': (node, 'r', False),
'audio-params': (node, 'r', True),
'audio-out-params': (node, 'r', True),
'audio-device-list': (node, 'r', True),
'video-frame-info': (node, 'r', True),
'decoder-list': (node, 'r', True),
'encoder-list': (node, 'r', True),
'vf': (node, 'r', True),
'af': (node, 'r', True),
'options': (node, 'r', True),
'file-local-options': (node, 'r', True),
'property-list': (commalist,'r')}
def bindproperty(MPV, name, proptype, access, decode_str=False):
getter = lambda self: self._get_property(name, proptype, decode_str)
setter = lambda self, value: self._set_property(name, value, proptype)
def barf(*args):
raise NotImplementedError('Access denied')
setattr(MPV, name.replace('-', '_'), property(getter if 'r' in access else barf, setter if 'w' in access else barf))
for name, (proptype, access, *args) in ALL_PROPERTIES.items():
bindproperty(MPV, name, proptype, access, *args)
| Frechdachs/python-mpv | mpv.py | Python | agpl-3.0 | 42,232 |
/*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2017 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.purap.document.validation.impl;
import org.kuali.kfs.coreservice.framework.parameter.ParameterService;
import org.kuali.kfs.module.purap.PurapConstants;
import org.kuali.kfs.module.purap.PurapParameterConstants;
import org.kuali.kfs.module.purap.PurapRuleConstants;
import org.kuali.kfs.module.purap.document.RequisitionDocument;
import org.kuali.kfs.sys.context.SpringContext;
import org.kuali.kfs.sys.document.validation.event.AttributedDocumentEvent;
public class RequisitionNewIndividualItemValidation extends PurchasingNewIndividualItemValidation {
public boolean validate(AttributedDocumentEvent event) {
return super.validate(event);
}
@Override
protected boolean commodityCodeIsRequired() {
//if the ENABLE_COMMODITY_CODE_IND parameter is N then we don't
//need to check for the ITEMS_REQUIRE_COMMODITY_CODE_IND parameter anymore, just return false.
boolean enableCommodityCode = SpringContext.getBean(ParameterService.class).getParameterValueAsBoolean(PurapConstants.PURAP_NAMESPACE, "Document", PurapParameterConstants.ENABLE_COMMODITY_CODE_IND);
if (!enableCommodityCode) {
return false;
} else {
return super.getParameterService().getParameterValueAsBoolean(RequisitionDocument.class, PurapRuleConstants.ITEMS_REQUIRE_COMMODITY_CODE_IND);
}
}
}
| quikkian-ua-devops/will-financials | kfs-purap/src/main/java/org/kuali/kfs/module/purap/document/validation/impl/RequisitionNewIndividualItemValidation.java | Java | agpl-3.0 | 2,205 |
<?php
/**
* plentymarkets shopware connector
* Copyright © 2013 plentymarkets GmbH
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License, supplemented by an additional
* permission, and of our proprietary license can be found
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "plentymarkets" is a registered trademark of plentymarkets GmbH.
* "shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, titles and interests in the
* above trademarks remain entirely with the trademark owners.
*
* @copyright Copyright (c) 2013, plentymarkets GmbH (http://www.plentymarkets.com)
* @author Daniel Bächtle <daniel.baechtle@plentymarkets.com>
*/
/**
* I am a generated class and am required for communicating with plentymarkets.
*/
class PlentySoapObject_ItemBase
{
/**
* @var ArrayOfPlentysoapobject_itemattributevalueset
*/
public $AttributeValueSets;
/**
* @var PlentySoapObject_ItemAvailability
*/
public $Availability;
/**
* @var string
*/
public $BundleType;
/**
* @var ArrayOfPlentysoapobject_itemcategory
*/
public $Categories;
/**
* @var int
*/
public $Condition;
/**
* @var string
*/
public $CustomsTariffNumber;
/**
* @var string
*/
public $DeepLink;
/**
* @var string
*/
public $EAN1;
/**
* @var string
*/
public $EAN2;
/**
* @var string
*/
public $EAN3;
/**
* @var string
*/
public $EAN4;
/**
* @var string
*/
public $ExternalItemID;
/**
* @var int
*/
public $FSK;
/**
* @var PlentySoapObject_ItemFreeTextFields
*/
public $FreeTextFields;
/**
* @var int
*/
public $HasAttributes;
/**
* @var string
*/
public $ISBN;
/**
* @var int
*/
public $Inserted;
/**
* @var ArrayOfPlentysoapobject_itemattributemarkup
*/
public $ItemAttributeMarkup;
/**
* @var int
*/
public $ItemID;
/**
* @var string
*/
public $ItemNo;
/**
* @var ArrayOfPlentysoapobject_itemproperty
*/
public $ItemProperties;
/**
* @var ArrayOfPlentysoapobject_itemsupplier
*/
public $ItemSuppliers;
/**
* @var string
*/
public $ItemURL;
/**
* @var int
*/
public $LastUpdate;
/**
* @var int
*/
public $Marking1ID;
/**
* @var int
*/
public $Marking2ID;
/**
* @var string
*/
public $Model;
/**
* @var PlentySoapObject_ItemOthers
*/
public $Others;
/**
* @var ArrayOfPlentysoapobject_integer
*/
public $ParcelServicePresetIDs;
/**
* @var string
*/
public $Position;
/**
* @var PlentySoapObject_ItemPriceSet
*/
public $PriceSet;
/**
* @var int
*/
public $ProducerID;
/**
* @var string
*/
public $ProducerName;
/**
* @var int
*/
public $ProducingCountryID;
/**
* @var int
*/
public $Published;
/**
* @var PlentySoapObject_ItemStock
*/
public $Stock;
/**
* @var int
*/
public $StorageLocation;
/**
* @var PlentySoapObject_ItemTexts
*/
public $Texts;
/**
* @var int
*/
public $Type;
/**
* @var int
*/
public $VATInternalID;
/**
* @var string
*/
public $WebShopSpecial;
}
| k-30/plentymarkets-shopware-connector | Components/Soap/Models/PlentySoapObject/ItemBase.php | PHP | agpl-3.0 | 3,646 |
# -*- coding: utf-8 -*-
"""
2020-09-07 Cornelius Kölbel <cornelius.koelbel@netknights.it>
Add exception
2017-04-26 Friedrich Weber <friedrich.weber@netknights.it>
Make it possible to check for correct LDAPS/STARTTLS settings
2017-01-08 Cornelius Kölbel <cornelius.koelbel@netknights.it>
Remove objectGUID. Since we stick with ldap3 version 2.1,
the objectGUID is returned in a human readable format.
2016-12-05 Martin Wheldon <martin.wheldon@greenhills-it.co.uk>
Fixed issue creating ldap entries with objectClasses defined
Fix problem when searching for attribute values containing the
space character.
2016-05-26 Martin Wheldon <martin.wheldon@greenhills-it.co.uk>
Rewrite of search functionality to add recursive parsing
of ldap search filters
Fixed issue searching for attributes with multiple values
Added ability to use ~= in searches
Created unittests for mock
2016-02-19 Cornelius Kölbel <cornelius.koelbel@netknights.it>
Add the possibility to check objectGUID
2015-01-31 Change responses.py to be able to run with SMTP
Cornelius Kölbel <cornelius@privacyidea.org>
Original responses.py is:
Copyright 2013 Dropbox, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import (
absolute_import, division, unicode_literals
)
from passlib.hash import ldap_salted_sha1
from ast import literal_eval
import uuid
from ldap3.utils.conv import escape_bytes
import ldap3
import re
import pyparsing
from .smtpmock import get_wrapped
from collections import namedtuple, Sequence, Sized
from privacyidea.lib.utils import to_bytes, to_unicode
DIRECTORY = "tests/testdata/tmp_directory"
Call = namedtuple('Call', ['request', 'response'])
_wrapper_template = """\
def wrapper%(signature)s:
with ldap3mock:
return func%(funcargs)s
"""
def _convert_objectGUID(item):
item = uuid.UUID("{{{0!s}}}".format(item)).bytes_le
item = escape_bytes(item)
return item
class CallList(Sequence, Sized):
def __init__(self):
self._calls = []
def __iter__(self):
return iter(self._calls)
def __len__(self):
return len(self._calls)
def __getitem__(self, idx):
return self._calls[idx]
def setdata(self, request, response):
self._calls.append(Call(request, response))
def reset(self):
self._calls = []
class Connection(object):
class Extend(object):
class Standard(object):
def __init__(self, connection):
self.connection = connection
def paged_search(self, **kwargs):
self.connection.search(search_base=kwargs.get("search_base"),
search_scope=kwargs.get("search_scope"),
search_filter=kwargs.get(
"search_filter"),
attributes=kwargs.get("attributes"),
paged_size=kwargs.get("page_size"),
size_limit=kwargs.get("size_limit"),
paged_cookie=None)
result = self.connection.response
if kwargs.get("generator", False):
# If ``generator=True`` is passed, ``paged_search`` should return an iterator.
result = iter(result)
return result
def __init__(self, connection):
self.standard = self.Standard(connection)
def __init__(self, directory=None):
if directory is None:
directory = []
import copy
self.directory = copy.deepcopy(directory)
self.bound = False
self.start_tls_called = False
self.extend = self.Extend(self)
self.operation = {
"!" : self._search_not,
"&" : self._search_and,
"|" : self._search_or,
}
def set_directory(self, directory):
self.directory = directory
def _find_user(self, dn):
return next(i for (i, d) in enumerate(self.directory) if d["dn"] == dn)
@staticmethod
def open(read_server_info=True):
return
def bind(self, read_server_info=True):
return self.bound
def start_tls(self, read_server_info=True):
self.start_tls_called = True
def add(self, dn, object_class=None, attributes=None):
self.result = { 'dn' : '',
'referrals' : None,
'description' : 'success',
'result' : 0,
'message' : '',
'type' : 'addResponse'}
# Check to see if the user exists in the directory
try:
index = self._find_user(dn)
except StopIteration:
# If we get here the user doesn't exist so continue
# Create a entry object for the new user
entry = {}
entry['dn'] = dn
entry['attributes'] = attributes
if object_class != None:
entry['attributes'].update( {'objectClass': object_class} )
else:
# User already exists
self.result["description"] = "failure"
self.result["result"] = 68
self.result["message"] = \
"Error entryAlreadyExists for {0}".format(dn)
return False
# Add the user entry to the directory
self.directory.append(entry)
# Attempt to write changes to disk
with open(DIRECTORY, 'w+') as f:
f.write(str(self.directory))
return True
def delete(self, dn, controls=None):
self.result = { 'dn' : '',
'referrals' : None,
'description' : 'success',
'result' : 0,
'message' : '',
'type' : 'addResponse'}
# Check to see if the user exists in the directory
try:
index = self._find_user(dn)
except StopIteration:
# If we get here the user doesn't exist so continue
self.result["description"] = "failure"
self.result["result"] = 32
self.result["message"] = "Error no such object: {0}".format(dn)
return False
# Delete the entry object for the user
self.directory.pop(index)
# Attempt to write changes to disk
with open(DIRECTORY, 'w+') as f:
f.write(str(self.directory))
return True
def modify(self, dn, changes, controls=None):
self.result = { 'dn' : '',
'referrals' : None,
'description' : 'success',
'result' : 0,
'message' : '',
'type' : 'modifyResponse'}
# Check to see if the user exists in the directory
try:
index = self._find_user(dn)
except StopIteration:
# If we get here the user doesn't exist so continue
self.result["description"] = "failure"
self.result["result"] = 32
self.result["message"] = "Error no such object: {0!s}".format(dn)
return False
# extract the hash we are interested in
entry = self.directory[index].get("attributes")
# Loop over the changes hash and apply them
for k, v in changes.items():
if v[0] == "MODIFY_DELETE":
entry.pop(k)
elif v[0] == "MODIFY_REPLACE" or v[0] == "MODIFY_ADD":
entry[k] = v[1][0]
else:
self.result["result"] = 2
self.result["message"] = "Error bad/missing/not implemented" \
"modify operation: %s" % k[1]
# Place the attributes back into the directory hash
self.directory[index]["attributes"] = entry
# Attempt to write changes to disk
with open(DIRECTORY, 'w+') as f:
f.write(str(self.directory))
return True
@staticmethod
def _match_greater_than_or_equal(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) >= str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_greater_than(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) > str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_less_than_or_equal(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) <= str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_less_than(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) < str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_equal_to(search_base, attribute, value, candidates):
matches = list()
match_using_regex = False
if "*" in value:
match_using_regex = True
#regex = check_escape(value)
regex = value.replace('*', '.*')
regex = "^{0}$".format(regex)
for entry in candidates:
dn = to_unicode(entry.get("dn"))
if attribute not in entry.get("attributes") or not dn.endswith(search_base):
continue
values_from_directory = entry.get("attributes").get(attribute)
if isinstance(values_from_directory, list):
for item in values_from_directory:
if attribute == "objectGUID":
item = _convert_objectGUID(item)
if match_using_regex:
m = re.match(regex, str(item), re.I)
if m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if item == value:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if attribute == "objectGUID":
values_from_directory = _convert_objectGUID(values_from_directory)
if match_using_regex:
m = re.match(regex, str(values_from_directory), re.I)
if m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
# The value, which we compare is unicode, so we convert
# the values_from_directory to unicode rather than str.
if isinstance(values_from_directory, bytes):
values_from_directory = values_from_directory.decode(
"utf-8")
elif type(values_from_directory) == int:
values_from_directory = u"{0!s}".format(values_from_directory)
if value == values_from_directory:
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _match_notequal_to(search_base, attribute, value, candidates):
matches = list()
match_using_regex = False
if "*" in value:
match_using_regex = True
#regex = check_escape(value)
regex = value.replace('*', '.*')
regex = "^{0}$".format(regex)
for entry in candidates:
found = False
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
values_from_directory = entry.get("attributes").get(attribute)
if isinstance(values_from_directory, list):
for item in values_from_directory:
if attribute == "objectGUID":
item = _convert_objectGUID(item)
if match_using_regex:
m = re.match(regex, str(item), re.I)
if m:
found = True
else:
if item == value:
found = True
if found is False:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if attribute == "objectGUID":
values_from_directory = _convert_objectGUID(values_from_directory)
if match_using_regex:
m = re.match(regex, str(values_from_directory), re.I)
if not m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if str(value) != str(values_from_directory):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches
@staticmethod
def _parse_filter():
op = pyparsing.oneOf('! & |')
lpar = pyparsing.Literal('(').suppress()
rpar = pyparsing.Literal(')').suppress()
k = pyparsing.Word(pyparsing.alphanums)
# NOTE: We may need to expand on this list, but as this is not a real
# LDAP server we should be OK.
# Value to contain:
# numbers, upper/lower case letters, astrisk, at symbol, minus, full
# stop, backslash or a space
v = pyparsing.Word(pyparsing.alphanums + "-*@.\\ äöü")
rel = pyparsing.oneOf("= ~= >= <=")
expr = pyparsing.Forward()
atom = pyparsing.Group(lpar + op + expr + rpar) \
| pyparsing.Combine(lpar + k + rel + v + rpar)
expr << atom + pyparsing.ZeroOrMore( expr )
return expr
@staticmethod
def _deDuplicate(results):
found = dict()
deDuped = list()
for entry in results:
dn = entry.get("dn")
if not dn in found:
found[dn] = 1
deDuped.append(entry)
return deDuped
def _invert_results(self, candidates):
inverted_candidates = list(self.directory)
for candidate in candidates:
try:
inverted_candidates.remove(candidate)
except ValueError:
pass
return inverted_candidates
def _search_not(self, base, search_filter, candidates=None):
# Create empty candidates list as we need to use self.directory for
# each search
candidates = list()
this_filter = list()
index = 0
search_filter.remove("!")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates = self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates = self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
candidates = self._invert_results(candidates)
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates = Connection._match_less_than(base, k, v,
self.directory)
elif "<=" in item:
k, v = item.split("<=")
candidates = Connection._match_greater_than(base, k, v,
self.directory)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates = Connection._match_notequal_to(base, k, v,
self.directory)
elif "=" in item:
k, v = item.split("=")
candidates = Connection._match_notequal_to(base, k, v,
self.directory)
return candidates
def _search_and(self, base, search_filter, candidates=None):
# Load the data from the directory, if we aren't passed any
if candidates == [] or candidates is None:
candidates = self.directory
this_filter = list()
index = 0
search_filter.remove("&")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates = self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates = self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates = Connection._match_greater_than_or_equal(base, k, v,
candidates)
elif "<=" in item:
k, v = item.split("<=")
candidates = Connection._match_less_than_or_equal(base, k, v,
candidates)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates = Connection._match_equal_to(base, k, v,
candidates)
elif "=" in item:
k, v = item.split("=")
candidates = Connection._match_equal_to(base, k, v,
candidates)
return candidates
def _search_or(self, base, search_filter, candidates=None):
# Create empty candidates list as we need to use self.directory for
# each search
candidates = list()
this_filter = list()
index = 0
search_filter.remove("|")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates += self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates += self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates += Connection._match_greater_than_or_equal(base, k, v,
self.directory)
elif "<=" in item:
k, v = item.split("<=")
candidates += Connection._match_less_than_or_equal(base, k, v,
self.directory)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates += Connection._match_equal_to(base, k, v,
self.directory)
elif "=" in item:
k, v = item.split("=")
candidates += Connection._match_equal_to(base, k, v,
self.directory)
return candidates
def search(self, search_base=None, search_scope=None,
search_filter=None, attributes=None, paged_size=5,
size_limit=0, paged_cookie=None):
s_filter = list()
candidates = list()
self.response = list()
self.result = dict()
try:
if isinstance(search_filter, bytes):
# We need to convert to unicode otherwise pyparsing will not
# find the u"ö"
search_filter = to_unicode(search_filter)
expr = Connection._parse_filter()
s_filter = expr.parseString(search_filter).asList()[0]
except pyparsing.ParseBaseException as exx:
# Just for debugging purposes
s = "{!s}".format(exx)
for item in s_filter:
if item[0] in self.operation:
candidates = self.operation.get(item[0])(search_base,
s_filter)
self.response = Connection._deDuplicate(candidates)
return True
def unbind(self):
return True
class Ldap3Mock(object):
def __init__(self):
self._calls = CallList()
self._server_mock = None
self.directory = []
self.exception = None
self.reset()
def reset(self):
self._calls.reset()
def setLDAPDirectory(self, directory=None):
if directory is None:
self.directory = []
else:
try:
with open(DIRECTORY, 'w+') as f:
f.write(str(directory))
self.directory = directory
except OSError as e:
raise
def set_exception(self, exc=True):
self.exception = exc
def _load_data(self, directory):
try:
with open(directory, 'r') as f:
data = f.read()
return literal_eval(data)
except OSError as e:
raise
@property
def calls(self):
return self._calls
def __enter__(self):
self.start()
def __exit__(self, *args):
self.stop()
self.reset()
def activate(self, func):
evaldict = {'ldap3mock': self, 'func': func}
return get_wrapped(func, _wrapper_template, evaldict)
def _on_Server(self, host, port, use_ssl, connect_timeout, get_info=None,
tls=None):
# mangle request packet
return "FakeServerObject"
def _on_Connection(self, server, user, password,
auto_bind=None, client_strategy=None,
authentication=None, check_names=None,
auto_referrals=None, receive_timeout=None):
"""
We need to create a Connection object with
methods:
add()
modify()
search()
unbind()
and object
response
"""
# Raise an exception, if we are told to do so
if self.exception:
raise Exception("LDAP request failed")
# check the password
correct_password = False
# Anonymous bind
# Reload the directory just in case a change has been made to
# user credentials
self.directory = self._load_data(DIRECTORY)
if authentication == ldap3.ANONYMOUS and user == "":
correct_password = True
for entry in self.directory:
if to_unicode(entry.get("dn")) == user:
pw = entry.get("attributes").get("userPassword")
# password can be unicode
if to_bytes(pw) == to_bytes(password):
correct_password = True
elif pw.startswith('{SSHA}'):
correct_password = ldap_salted_sha1.verify(password, pw)
else:
correct_password = False
self.con_obj = Connection(self.directory)
self.con_obj.bound = correct_password
return self.con_obj
def start(self):
import mock
def unbound_on_Server(host, port,
use_ssl,
connect_timeout, *a, **kwargs):
return self._on_Server(host, port,
use_ssl,
connect_timeout, *a, **kwargs)
self._server_mock = mock.MagicMock()
self._server_mock.side_effect = unbound_on_Server
self._patcher = mock.patch('ldap3.Server',
self._server_mock)
self._patcher.start()
def unbound_on_Connection(server, user,
password,
auto_bind,
client_strategy,
authentication,
check_names,
auto_referrals, *a, **kwargs):
return self._on_Connection(server, user,
password,
auto_bind,
client_strategy,
authentication,
check_names,
auto_referrals, *a,
**kwargs)
self._patcher2 = mock.patch('ldap3.Connection',
unbound_on_Connection)
self._patcher2.start()
def stop(self):
self._patcher.stop()
self._patcher2.stop()
self._server_mock = None
def get_server_mock(self):
return self._server_mock
# expose default mock namespace
mock = _default_mock = Ldap3Mock()
__all__ = []
for __attr in (a for a in dir(_default_mock) if not a.startswith('_')):
__all__.append(__attr)
globals()[__attr] = getattr(_default_mock, __attr)
| privacyidea/privacyidea | tests/ldap3mock.py | Python | agpl-3.0 | 28,972 |
package storage
import (
"fmt"
"strings"
"time"
mgo "github.com/ilius/mgo"
"github.com/ilius/mgo/bson"
)
func ModifyIndexTTL(db mgo.Database, collection string, index mgo.Index) error {
keyInfo, err := mgo.ParseIndexKey(index.Key)
if err != nil {
return err
}
expireAfterSeconds := int(index.ExpireAfter / time.Second)
fmt.Printf(
"Updating TTL on collection %s to expireAfterSeconds=%d\n",
collection,
expireAfterSeconds,
)
err = db.Run(bson.D{
{"collMod", collection},
{"index", bson.M{
"keyPattern": keyInfo.Key,
"expireAfterSeconds": expireAfterSeconds,
}},
}, nil)
if err != nil {
return err
}
return nil
}
func EnsureIndexWithTTL(db mgo.Database, collection string, index mgo.Index) error {
err := db.C(collection).EnsureIndex(index)
if err != nil {
// if expireAfterSeconds is changed, we need to drop and re-create the index
// unless we use `collMod` added in 2.3.2
// https://jira.mongodb.org/browse/SERVER-6700
if strings.Contains(err.Error(), "already exists with different options") {
return ModifyIndexTTL(db, collection, index)
}
return err
}
return nil
}
| ilius/starcal-server | pkg/scal/storage/index_ttl.go | GO | agpl-3.0 | 1,140 |
/**
* Copyright (C) 2000 - 2012 Silverpeas
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* Open Source Software ("FLOSS") applications as described in Silverpeas's
* FLOSS exception. You should have received a copy of the text describing
* the FLOSS exception, and it is also available here:
* "http://www.silverpeas.org/docs/core/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.stratelia.silverpeas.peasCore;
import com.silverpeas.util.ArrayUtil;
import com.stratelia.silverpeas.silvertrace.SilverTrace;
import com.stratelia.webactiv.beans.admin.ComponentInstLight;
import com.stratelia.webactiv.beans.admin.OrganizationController;
import javax.servlet.http.HttpServletRequest;
/**
* @author ehugonnet
*/
public class SilverpeasWebUtil {
private OrganizationController organizationController = new OrganizationController();
public SilverpeasWebUtil() {
}
public SilverpeasWebUtil(OrganizationController controller) {
organizationController = controller;
}
public OrganizationController getOrganizationController() {
return organizationController;
}
/**
* Accessing the MainSessionController
* @param request the HttpServletRequest
* @return the current MainSessionController.
*/
public MainSessionController getMainSessionController(HttpServletRequest request) {
return (MainSessionController) request.getSession().getAttribute(
MainSessionController.MAIN_SESSION_CONTROLLER_ATT);
}
/**
* Extract the space id and the component id.
* @param request
* @return
*/
public String[] getComponentId(HttpServletRequest request) {
String spaceId;
String componentId;
String function;
String pathInfo = request.getPathInfo();
SilverTrace.info("peasCore", "ComponentRequestRouter.getComponentId",
"root.MSG_GEN_PARAM_VALUE", "pathInfo=" + pathInfo);
if (pathInfo != null) {
spaceId = null;
pathInfo = pathInfo.substring(1); // remove first '/'
function = pathInfo.substring(pathInfo.indexOf('/') + 1, pathInfo.length());
if (pathInfo.startsWith("jsp")) {
// Pour les feuilles de styles, icones, ... + Pour les composants de
// l'espace personnel (non instanciables)
componentId = null;
} else {
// Get the space and component Ids
// componentId extracted from the URL
// Old url (with WA..)
if (pathInfo.contains("WA")) {
String sAndCId = pathInfo.substring(0, pathInfo.indexOf('/'));
// spaceId looks like WA17
spaceId = sAndCId.substring(0, sAndCId.indexOf('_'));
// componentId looks like kmelia123
componentId = sAndCId.substring(spaceId.length() + 1, sAndCId.length());
} else {
componentId = pathInfo.substring(0, pathInfo.indexOf('/'));
}
if (function.startsWith("Main") || function.startsWith("searchResult")
|| function.equalsIgnoreCase("searchresult")
|| function.startsWith("portlet")
|| function.equals("GoToFilesTab")) {
ComponentInstLight component = organizationController.getComponentInstLight(componentId);
spaceId = component.getDomainFatherId();
}
SilverTrace.info("peasCore", "ComponentRequestRouter.getComponentId",
"root.MSG_GEN_PARAM_VALUE", "componentId=" + componentId
+ "spaceId=" + spaceId + " pathInfo=" + pathInfo);
}
} else {
spaceId = "-1";
componentId = "-1";
function = "Error";
}
String[] context = new String[] { spaceId, componentId, function };
SilverTrace.info("peasCore", "ComponentRequestRouter.getComponentId",
"root.MSG_GEN_PARAM_VALUE", "spaceId=" + spaceId + " | componentId="
+ componentId + " | function=" + function);
return context;
}
public String[] getRoles(HttpServletRequest request) {
MainSessionController controller = getMainSessionController(request);
if (controller != null) {
return organizationController.getUserProfiles(controller.getUserId(),
getComponentId(request)[1]);
}
return ArrayUtil.EMPTY_STRING_ARRAY;
}
}
| NicolasEYSSERIC/Silverpeas-Core | web-core/src/main/java/com/stratelia/silverpeas/peasCore/SilverpeasWebUtil.java | Java | agpl-3.0 | 5,021 |
/*
* Copyright (c) 2012 - 2020 Splice Machine, Inc.
*
* This file is part of Splice Machine.
* Splice Machine is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either
* version 3, or (at your option) any later version.
* Splice Machine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License along with Splice Machine.
* If not, see <http://www.gnu.org/licenses/>.
*/
package com.splicemachine.si.impl;
import splice.com.google.common.base.Function;
import com.splicemachine.si.api.txn.TransactionStatus;
/**
* Provides hooks for tests to provide callbacks. Mainly used to provide thread coordination in tests. It allows tests
* to "trace" the internals of the SI execution.
*/
public class Tracer {
private static transient Function<byte[],byte[]> fRowRollForward = null;
private static transient Function<Long, Object> fTransactionRollForward = null;
private static transient Function<Object[], Object> fStatus = null;
private static transient Runnable fCompact = null;
private static transient Function<Long, Object> fCommitting = null;
private static transient Function<Long, Object> fWaiting = null;
private static transient Function<Object[], Object> fRegion = null;
private static transient Function<Object, String> bestAccess = null;
public static Integer rollForwardDelayOverride = null;
public static void registerRowRollForward(Function<byte[],byte[]> f) {
Tracer.fRowRollForward = f;
}
public static boolean isTracingRowRollForward() {
return Tracer.fRowRollForward != null;
}
public static void registerTransactionRollForward(Function<Long, Object> f) {
Tracer.fTransactionRollForward = f;
}
public static boolean isTracingTransactionRollForward() {
return Tracer.fTransactionRollForward != null;
}
public static void registerStatus(Function<Object[], Object> f) {
Tracer.fStatus = f;
}
public static void registerCompact(Runnable f) {
Tracer.fCompact = f;
}
public static void registerCommitting(Function<Long, Object> f) {
Tracer.fCommitting = f;
}
public static void registerBestAccess(Function<Object, String> f) {
Tracer.bestAccess = f;
}
public static void registerWaiting(Function<Long, Object> f) {
Tracer.fWaiting = f;
}
public static void registerRegion(Function<Object[], Object> f) {
Tracer.fRegion = f;
}
public static void traceRowRollForward(byte[] key) {
if (fRowRollForward != null) {
fRowRollForward.apply(key);
}
}
public static void traceTransactionRollForward(long transactionId) {
if (fTransactionRollForward != null) {
fTransactionRollForward.apply(transactionId);
}
}
public static void traceStatus(long transactionId, TransactionStatus newStatus, boolean beforeChange) {
if (fStatus != null) {
fStatus.apply(new Object[] {transactionId, newStatus, beforeChange});
}
}
public static void compact() {
if (fCompact != null) {
fCompact.run();
}
}
public static void traceCommitting(long transactionId) {
if (fCommitting != null) {
fCommitting.apply(transactionId);
}
}
public static void traceWaiting(long transactionId) {
if (fWaiting != null) {
fWaiting.apply(transactionId);
}
}
public static void traceRegion(String tableName, Object region) {
if (fRegion != null) {
fRegion.apply(new Object[] {tableName, region});
}
}
public static void traceBestAccess(Object objectParam) {
if (bestAccess != null) {
bestAccess.apply(objectParam);
}
}
}
| splicemachine/spliceengine | splice_si_api/src/main/java/com/splicemachine/si/impl/Tracer.java | Java | agpl-3.0 | 4,164 |
/*
* JBILLING CONFIDENTIAL
* _____________________
*
* [2003] - [2012] Enterprise jBilling Software Ltd.
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Enterprise jBilling Software.
* The intellectual and technical concepts contained
* herein are proprietary to Enterprise jBilling Software
* and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden.
*/
package com.sapienter.jbilling.server.user;
import com.sapienter.jbilling.server.user.contact.db.ContactDTO;
import com.sapienter.jbilling.server.user.db.CompanyDAS;
import com.sapienter.jbilling.server.user.db.CompanyDTO;
import com.sapienter.jbilling.server.util.db.CurrencyDAS;
import com.sapienter.jbilling.server.util.db.CurrencyDTO;
import com.sapienter.jbilling.server.util.db.LanguageDAS;
import javax.validation.Valid;
import javax.validation.constraints.Size;
public class CompanyWS implements java.io.Serializable {
private int id;
private Integer currencyId;
private Integer languageId;
@Size(min = 0, max = 100, message = "validation.error.size,0,100")
private String description;
@Valid
private ContactWS contact;
public CompanyWS() {
}
public CompanyWS(int i) {
id = i;
}
public CompanyWS(CompanyDTO companyDto) {
this.id = companyDto.getId();
this.currencyId= companyDto.getCurrencyId();
this.languageId = companyDto.getLanguageId();
this.description = companyDto.getDescription();
ContactDTO contact = new EntityBL(Integer.valueOf(this.id)).getContact();
if (contact != null) {
this.contact = new ContactWS(contact.getId(),
contact.getAddress1(),
contact.getAddress2(),
contact.getCity(),
contact.getStateProvince(),
contact.getPostalCode(),
contact.getCountryCode(),
contact.getDeleted());
}
}
public CompanyDTO getDTO(){
CompanyDTO dto = new CompanyDAS().find(Integer.valueOf(this.id));
dto.setCurrency(new CurrencyDAS().find(this.currencyId));
dto.setLanguage(new LanguageDAS().find(this.languageId));
dto.setDescription(this.description);
return dto;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Integer getCurrencyId() {
return currencyId;
}
public void setCurrencyId(Integer currencyId) {
this.currencyId = currencyId;
}
public Integer getLanguageId() {
return languageId;
}
public void setLanguageId(Integer languageId) {
this.languageId = languageId;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public ContactWS getContact() {
return contact;
}
public void setContact(ContactWS contact) {
this.contact = contact;
}
public String toString() {
return "CompanyWS [id=" + id + ", currencyId=" + currencyId
+ ", languageId=" + languageId + ", description=" + description
+ ", contact=" + contact + "]";
}
} | rahith/ComtalkA-S | src/java/com/sapienter/jbilling/server/user/CompanyWS.java | Java | agpl-3.0 | 3,574 |
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/
*/
package org.phenotips.variantStoreIntegration;
import org.phenotips.data.similarity.internal.AbstractVariant;
import org.phenotips.variantstore.shared.GACallInfoFields;
import org.phenotips.variantstore.shared.GAVariantInfoFields;
import org.phenotips.variantstore.shared.VariantUtils;
import java.text.DecimalFormat;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.ga4gh.GACall;
import org.ga4gh.GAVariant;
/**
* A variant from the variant store. Annotated by Exomiser.
*
* @version $Id$
*/
public class VariantStoreVariant extends AbstractVariant
{
private static DecimalFormat df = new DecimalFormat("#.####");
/**
* Create a {@link Variant} from a {@link GAVariant} returned by a {@link
* org.phenotips.variantstore.VariantStoreInterface}.
*
* @param gaVariant a {@link GAVariant}
* @param totIndividuals number of individuals stored in the variant store
*/
public VariantStoreVariant(GAVariant gaVariant, Integer totIndividuals) {
setChrom(gaVariant.getReferenceName());
setPosition((int) (gaVariant.getStart() + 1));
GACall call = gaVariant.getCalls().get(0);
List<Integer> genotype = call.getGenotype();
setGenotype(gaVariant.getReferenceBases(),
StringUtils.join(gaVariant.getAlternateBases(), ','),
StringUtils.join(genotype, '/'));
setEffect(VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GENE_EFFECT));
String value = VariantUtils.getInfo(call, GACallInfoFields.EXOMISER_VARIANT_SCORE);
if (value == null || "null".equals(value)) {
setScore(null);
} else {
setScore(Double.valueOf(value));
}
setAnnotation("geneScore", VariantUtils.getInfo(call, GACallInfoFields.EXOMISER_GENE_COMBINED_SCORE));
setAnnotation("geneSymbol", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GENE));
setAnnotation("hgvs", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GENE_HGVS));
value = VariantUtils.getInfo(gaVariant, GAVariantInfoFields.EXAC_AF);
setAnnotation("exacAF", df.format(Double.valueOf(value)));
setAnnotation("gtHet", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GT_HET));
setAnnotation("gtHom", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GT_HOM));
if (totIndividuals != null) {
value = VariantUtils.getInfo(gaVariant, GAVariantInfoFields.AC_TOT);
Double pcAF = Double.valueOf(value) / (totIndividuals * 2);
setAnnotation("pcAF", df.format(pcAF));
}
}
}
| phenotips/variant-store | integration/api/src/main/java/org/phenotips/variantStoreIntegration/VariantStoreVariant.java | Java | agpl-3.0 | 3,411 |
/*
* Created on 20/giu/2010
*
* Copyright 2010 by Andrea Vacondio (andrea.vacondio@gmail.com).
*
* This file is part of the Sejda source code
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.sejda.model.exception;
/**
* Exception thrown when a wrong password has been set and it's not possible to open the pdf document (and execute the task)
*
* @author Andrea Vacondio
*
*/
public class TaskWrongPasswordException extends TaskIOException {
private static final long serialVersionUID = -5517166148313118559L;
/**
* @param message
* @param cause
*/
public TaskWrongPasswordException(String message, Throwable cause) {
super(message, cause);
}
/**
* @param message
*/
public TaskWrongPasswordException(String message) {
super(message);
}
/**
* @param cause
*/
public TaskWrongPasswordException(Throwable cause) {
super(cause);
}
}
| torakiki/sejda | sejda-model/src/main/java/org/sejda/model/exception/TaskWrongPasswordException.java | Java | agpl-3.0 | 1,584 |
import { module, test } from 'qunit';
import { setupTest } from 'ember-qunit';
module('Unit | Model | partner partnerclient plan', function(hooks) {
setupTest(hooks);
// Replace this with your real tests.
test('it exists', function(assert) {
let store = this.owner.lookup('service:store');
let model = store.createRecord('partner/partnerclient-plan', {});
assert.ok(model);
});
});
| appknox/irene | tests/unit/models/partner/partnerclient-plan-test.js | JavaScript | agpl-3.0 | 404 |
/*
* Copyright (C) 2000 - 2016 Silverpeas
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* Open Source Software ("FLOSS") applications as described in Silverpeas's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* "http://www.silverpeas.org/docs/core/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.silverpeas.core.personalorganizer.service;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.silverpeas.core.personalorganizer.model.JournalHeader;
import org.silverpeas.core.personalorganizer.model.ParticipationStatus;
import org.silverpeas.core.personalorganizer.model.SchedulableCount;
import org.silverpeas.core.personalorganizer.socialnetwork.SocialInformationEvent;
import org.silverpeas.core.util.StringUtil;
import org.silverpeas.core.silvertrace.SilverTrace;
import org.silverpeas.core.persistence.jdbc.DBUtil;
import org.silverpeas.core.util.DateUtil;
import org.silverpeas.core.exception.SilverpeasException;
import org.silverpeas.core.exception.UtilException;
public class JournalDAO {
public static final String COLUMNNAMES =
"id, name, delegatorId, description, priority, classification, startDay, startHour, endDay, endHour, externalId";
private static final String JOURNALCOLUMNNAMES =
"CalendarJournal.id, CalendarJournal.name, CalendarJournal.delegatorId, CalendarJournal.description, CalendarJournal.priority, "
+ " CalendarJournal.classification, CalendarJournal.startDay, CalendarJournal.startHour, CalendarJournal.endDay, CalendarJournal.endHour, CalendarJournal.externalId";
private static final String INSERT_JOURNAL = "INSERT INTO CalendarJournal ("
+ COLUMNNAMES + ") values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
private static final String UPDATE_JOURNAL = "UPDATE CalendarJournal SET name = ?, "
+ "delegatorId = ?, description = ?, priority = ?, classification = ?, "
+ "startDay = ?, startHour = ?, endDay = ?, endHour = ?, externalId = ? WHERE id = ?";
private static final String DELETE_JOURNAL = "DELETE FROM CalendarJournal WHERE id = ?";
public String addJournal(Connection con, JournalHeader journal)
throws SQLException, UtilException, CalendarException {
PreparedStatement prepStmt = null;
int id = 0;
try {
prepStmt = con.prepareStatement(INSERT_JOURNAL);
id = DBUtil.getNextId("CalendarJournal", "id");
prepStmt.setInt(1, id);
prepStmt.setString(2, journal.getName());
prepStmt.setString(3, journal.getDelegatorId());
prepStmt.setString(4, journal.getDescription());
prepStmt.setInt(5, journal.getPriority().getValue());
prepStmt.setString(6, journal.getClassification().getString());
prepStmt.setString(7, journal.getStartDay());
prepStmt.setString(8, journal.getStartHour());
prepStmt.setString(9, journal.getEndDay());
prepStmt.setString(10, journal.getEndHour());
prepStmt.setString(11, journal.getExternalId());
if (prepStmt.executeUpdate() == 0) {
throw new CalendarException(
"JournalDAO.Connection con, addJournal(Connection con, JournalHeader journal)",
SilverpeasException.ERROR, "calendar.EX_EXCUTE_INSERT_EMPTY");
}
} finally {
DBUtil.close(prepStmt);
}
return String.valueOf(id);
}
public void updateJournal(Connection con, JournalHeader journal)
throws SQLException, CalendarException {
PreparedStatement prepStmt = null;
try {
prepStmt = con.prepareStatement(UPDATE_JOURNAL);
prepStmt.setString(1, journal.getName());
prepStmt.setString(2, journal.getDelegatorId());
prepStmt.setString(3, journal.getDescription());
prepStmt.setInt(4, journal.getPriority().getValue());
prepStmt.setString(5, journal.getClassification().getString());
prepStmt.setString(6, journal.getStartDay());
prepStmt.setString(7, journal.getStartHour());
prepStmt.setString(8, journal.getEndDay());
prepStmt.setString(9, journal.getEndHour());
prepStmt.setString(10, journal.getExternalId());
prepStmt.setInt(11, Integer.parseInt(journal.getId()));
if (prepStmt.executeUpdate() == 0) {
throw new CalendarException(
"JournalDAO.Connection con, updateJournal(Connection con, JournalHeader journal)",
SilverpeasException.ERROR, "calendar.EX_EXCUTE_UPDATE_EMPTY");
}
} finally {
DBUtil.close(prepStmt);
}
}
public void removeJournal(Connection con, String id)
throws SQLException, CalendarException {
PreparedStatement prepStmt = null;
try {
prepStmt = con.prepareStatement(DELETE_JOURNAL);
prepStmt.setInt(1, Integer.parseInt(id));
if (prepStmt.executeUpdate() == 0) {
throw new CalendarException(
"JournalDAO.Connection con, removeJournal(Connection con, JournalHeader journal)",
SilverpeasException.ERROR, "calendar.EX_EXCUTE_DELETE_EMPTY");
}
} finally {
DBUtil.close(prepStmt);
}
}
public boolean hasTentativeJournalsForUser(Connection con,
String userId) throws SQLException, java.text.ParseException {
PreparedStatement prepStmt = null;
ResultSet rs = null;
try {
prepStmt = getTentativePreparedStatement(con, userId);
rs = prepStmt.executeQuery();
return rs.next();
} finally {
DBUtil.close(rs, prepStmt);
}
}
public Collection<JournalHeader> getTentativeJournalHeadersForUser(Connection con,
String userId) throws SQLException, java.text.ParseException {
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<JournalHeader> list = new ArrayList<JournalHeader>();
try {
prepStmt = getTentativePreparedStatement(con, userId);
rs = prepStmt.executeQuery();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(journal);
}
} finally {
DBUtil.close(rs, prepStmt);
}
return list;
}
private PreparedStatement getTentativePreparedStatement(
Connection con, String userId) throws SQLException {
String selectStatement = "select distinct " + JournalDAO.JOURNALCOLUMNNAMES
+ " from CalendarJournal, CalendarJournalAttendee "
+ " WHERE (CalendarJournal.id = CalendarJournalAttendee.journalId) "
+ " and (CalendarJournalAttendee.participationStatus = ?) "
+ " and (userId = ?) " + " order by startDay, startHour";
PreparedStatement prepStmt = con.prepareStatement(selectStatement);
prepStmt.setString(1, ParticipationStatus.TENTATIVE);
prepStmt.setString(2, userId);
return prepStmt;
}
private Collection<JournalHeader> getJournalHeadersForUser(Connection con,
String day, String userId, String categoryId, String participation,
String comparator) throws SQLException, java.text.ParseException {
StringBuilder selectStatement = new StringBuilder();
selectStatement.append("select distinct ").append(
JournalDAO.JOURNALCOLUMNNAMES).append(
" from CalendarJournal, CalendarJournalAttendee ");
if (categoryId != null) {
selectStatement.append(", CalendarJournalCategory ");
}
selectStatement.append(" where (CalendarJournal.id = CalendarJournalAttendee.journalId) ");
selectStatement.append(" and (userId = '").append(userId).append("'");
selectStatement.append(" and participationStatus = '").append(participation).append("') ");
if (categoryId != null) {
selectStatement.append(" and (CalendarJournal.id = CalendarJournalCategory.journalId) ");
selectStatement.append(" and (CalendarJournalCategory.categoryId = '").append(categoryId).
append("') ");
}
selectStatement.append(" and ((startDay ").append(comparator).append(" '").append(day).append(
"') or (startDay <= '").append(day).append(
"' and endDay >= '").append(day).append("')) ");
if (participation.equals(ParticipationStatus.ACCEPTED)) {
selectStatement.append("union ").append("select distinct ").append(
JournalDAO.JOURNALCOLUMNNAMES).append(" from CalendarJournal ");
if (categoryId != null) {
selectStatement.append(", CalendarJournalCategory ");
}
selectStatement.append(" where (delegatorId = '").append(userId).append(
"') ");
if (categoryId != null) {
selectStatement.append(" and (CalendarJournal.id = CalendarJournalCategory.journalId) ");
selectStatement.append(" and (CalendarJournalCategory.categoryId = '").append(categoryId).
append("') ");
}
selectStatement.append(" and ((startDay ").append(comparator).append(" '").append(day)
.append(
"') or (startDay <= '").append(day).append("' and endDay >= '").append(day).append(
"')) ");
}
selectStatement.append(" order by 7 , 8 "); // Modif PHiL -> Interbase
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<JournalHeader> list = null;
try {
prepStmt = con.prepareStatement(selectStatement.toString());
rs = prepStmt.executeQuery();
list = new ArrayList<JournalHeader>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(journal);
}
} finally {
DBUtil.close(rs, prepStmt);
}
return list;
}
public Collection<JournalHeader> getDayJournalHeadersForUser(Connection con,
String day, String userId, String categoryId, String participation)
throws SQLException, java.text.ParseException {
return getJournalHeadersForUser(con, day, userId, categoryId,
participation, "=");
}
public Collection<JournalHeader> getNextJournalHeadersForUser(Connection con,
String day, String userId, String categoryId, String participation)
throws SQLException, java.text.ParseException {
return getJournalHeadersForUser(con, day, userId, categoryId,
participation, ">=");
}
/**
* get next JournalHeader for this user accordint to the type of data base
* used(PostgreSQL,Oracle,MMS)
* @param con
* @param day
* @param userId
* @param classification
* @param limit
* @param offset
* @return
* @throws SQLException
* @throws java.text.ParseException
*/
public List<JournalHeader> getNextEventsForUser(Connection con,
String day, String userId, String classification, Date begin, Date end)
throws SQLException, java.text.ParseException {
String selectNextEvents =
"select distinct " + JournalDAO.JOURNALCOLUMNNAMES + " from CalendarJournal "
+ " where delegatorId = ? and endDay >= ? ";
int classificationIndex = 2;
int limitIndex = 3;
if (StringUtil.isDefined(classification)) {
selectNextEvents += " and classification = ? ";
classificationIndex++;
limitIndex++;
}
selectNextEvents += " and CalendarJournal.startDay >= ? and CalendarJournal.startDay <= ?";
selectNextEvents += " order by CalendarJournal.startDay, CalendarJournal.startHour ";
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<JournalHeader> list = null;
try {
prepStmt = con.prepareStatement(selectNextEvents);
prepStmt.setString(1, userId);
prepStmt.setString(2, day);
if (classificationIndex == 3)// Classification param not null
{
prepStmt.setString(classificationIndex, classification);
}
prepStmt.setString(limitIndex, DateUtil.date2SQLDate(begin));
prepStmt.setString(limitIndex + 1, DateUtil.date2SQLDate(end));
rs = prepStmt.executeQuery();
list = new ArrayList<JournalHeader>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(journal);
}
} finally {
DBUtil.close(rs, prepStmt);
}
return list;
}
public Collection<SchedulableCount> countMonthJournalsForUser(Connection con,
String month, String userId, String categoryId, String participation)
throws SQLException {
StringBuilder selectStatement = new StringBuilder(200);
String theDay = "";
selectStatement
.append(
"select count(distinct CalendarJournal.id), ? from CalendarJournal, CalendarJournalAttendee ");
if (categoryId != null) {
selectStatement.append(", CalendarJournalCategory ");
}
selectStatement.append("where (CalendarJournal.id = CalendarJournalAttendee.journalId) ");
selectStatement.append("and (userId = ").append(userId);
selectStatement.append(" and participationStatus = '").append(participation).append("')");
selectStatement.append(" and ((startDay = ?) or ((startDay <= ?) and (endDay >= ?))) ");
if (categoryId != null) {
selectStatement.append(" and (CalendarJournal.id = CalendarJournalCategory.journalId)");
selectStatement.append(" and (CalendarJournalCategory.categoryId = '").append(categoryId).
append("') ");
}
selectStatement.append("group by ?");
if (participation.equals(ParticipationStatus.ACCEPTED)) {
selectStatement.append(
"union select count(distinct CalendarJournal.id), ? from CalendarJournal ");
if (categoryId != null) {
selectStatement.append(", CalendarJournalCategory ");
}
selectStatement.append("where (delegatorId = '").append(userId).append(
"')");
selectStatement.append(" and ((startDay = ?) or ((startDay <= ?) and (endDay >= ?)))");
if (categoryId != null) {
selectStatement.append(" and (CalendarJournal.id = CalendarJournalCategory.journalId)");
selectStatement.append(" and (CalendarJournalCategory.categoryId = '").append(categoryId).
append("') ");
}
selectStatement.append("group by ?");
}
List<SchedulableCount> list = new ArrayList<SchedulableCount>();
int number;
String date = "";
PreparedStatement prepStmt = null;
try {
ResultSet rs = null;
prepStmt = con.prepareStatement(selectStatement.toString());
for (int day = 1; day == 31; day++) {
if (day < 10) {
theDay = month + "0" + String.valueOf(day);
} else {
theDay = month + String.valueOf(day);
}
prepStmt.setString(1, theDay);
prepStmt.setString(2, theDay);
prepStmt.setString(3, theDay);
prepStmt.setString(4, theDay);
prepStmt.setString(5, theDay);
prepStmt.setString(6, theDay);
prepStmt.setString(7, theDay);
prepStmt.setString(8, theDay);
prepStmt.setString(9, theDay);
prepStmt.setString(10, theDay);
rs = prepStmt.executeQuery();
while (rs.next()) {
number = rs.getInt(1);
date = rs.getString(2);
SchedulableCount count = new SchedulableCount(number, date);
list.add(count);
}
DBUtil.close(rs);
}
} finally {
DBUtil.close(prepStmt);
}
return list;
}
public Collection<JournalHeader> getPeriodJournalHeadersForUser(Connection con,
String begin, String end, String userId, String categoryId,
String participation) throws SQLException, java.text.ParseException {
StringBuilder selectStatement = new StringBuilder(200);
selectStatement.append("select distinct ").append(JournalDAO.COLUMNNAMES).append(
" from CalendarJournal, CalendarJournalAttendee ");
if (categoryId != null) {
selectStatement.append(", CalendarJournalCategory ");
}
selectStatement.append(" where (CalendarJournal.id = CalendarJournalAttendee.journalId) ");
selectStatement.append(" and (userId = '").append(userId).append("' ");
selectStatement.append(" and participationStatus = '").append(participation).append("') ");
if (categoryId != null) {
selectStatement.append(" and (CalendarJournal.id = CalendarJournalCategory.journalId) ");
selectStatement.append(" and (categoryId = '").append(categoryId).append(
"') ");
}
selectStatement.append(" and ( (startDay >= '").append(begin).append(
"' and startDay <= '").append(end).append("')");
selectStatement.append(" or (endDay >= '").append(begin).append(
"' and endDay <= '").append(end).append("')");
selectStatement.append(" or ('").append(begin).append("' >= startDay and '").append(begin).
append("' <= endDay) ");
selectStatement.append(" or ('").append(end).append("' >= startDay and '").append(end).append(
"' <= endDay) ) ");
if (participation.equals(ParticipationStatus.ACCEPTED)) {
selectStatement.append(" union select distinct ").append(
JournalDAO.COLUMNNAMES).append(" from CalendarJournal ");
if (categoryId != null) {
selectStatement.append(", CalendarJournalCategory ");
}
selectStatement.append("where (delegatorId = '").append(userId).append(
"') ");
if (categoryId != null) {
selectStatement.append(" and (CalendarJournal.id = CalendarJournalCategory.journalId) ");
selectStatement.append(" and (categoryId = '").append(categoryId).append("') ");
}
selectStatement.append(" and ( (startDay >= '").append(begin).append(
"' and startDay <= '").append(end).append("')");
selectStatement.append(" or (endDay >= '").append(begin).append(
"' and endDay <= '").append(end).append("')");
selectStatement.append(" or ('").append(begin).append(
"' >= startDay and '").append(begin).append("' <= endDay) ");
selectStatement.append(" or ('").append(end).append("' >= startDay and '").append(end)
.append(
"' <= endDay) ) ");
}
selectStatement.append(" order by 7 , 8 ");
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<JournalHeader> list = null;
try {
prepStmt = con.prepareStatement(selectStatement.toString());
rs = prepStmt.executeQuery();
list = new ArrayList<JournalHeader>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(journal);
}
} finally {
DBUtil.close(rs, prepStmt);
}
return list;
}
public JournalHeader getJournalHeaderFromResultSet(ResultSet rs) throws SQLException,
java.text.ParseException {
String id = String.valueOf(rs.getInt(1));
String name = rs.getString(2);
String delegatorId = rs.getString(3);
JournalHeader journal = new JournalHeader(id, name, delegatorId);
journal.setDescription(rs.getString(4));
try {
journal.getPriority().setValue(rs.getInt(5));
} catch (Exception e) {
SilverTrace.warn("calendar",
"JournalDAO.getJournalHeaderFromResultSet(ResultSet rs)",
"calendar_MSG_NOT_GET_PRIORITY");
}
journal.getClassification().setString(rs.getString(6));
journal.setStartDay(rs.getString(7));
journal.setStartHour(rs.getString(8));
journal.setEndDay(rs.getString(9));
journal.setEndHour(rs.getString(10));
journal.setExternalId(rs.getString(11));
return journal;
}
public JournalHeader getJournalHeader(Connection con, String journalId)
throws SQLException, CalendarException, java.text.ParseException {
String selectStatement = "select " + JournalDAO.COLUMNNAMES
+ " from CalendarJournal " + "where id = ?";
PreparedStatement prepStmt = null;
ResultSet rs = null;
JournalHeader journal;
try {
prepStmt = con.prepareStatement(selectStatement);
prepStmt.setInt(1, Integer.parseInt(journalId));
rs = prepStmt.executeQuery();
if (rs.next()) {
journal = getJournalHeaderFromResultSet(rs);
} else {
throw new CalendarException(
"JournalDAO.Connection con, String journalId",
SilverpeasException.ERROR, "calendar.EX_RS_EMPTY", "journalId="
+ journalId);
}
return journal;
} finally {
DBUtil.close(rs, prepStmt);
}
}
public Collection<JournalHeader> getOutlookJournalHeadersForUser(Connection con,
String userId) throws SQLException, CalendarException,
java.text.ParseException {
String selectStatement = "select " + JournalDAO.COLUMNNAMES
+ " from CalendarJournal "
+ "where delegatorId = ? and externalId is not null";
PreparedStatement prepStmt = null;
ResultSet rs = null;
try {
prepStmt = con.prepareStatement(selectStatement);
prepStmt.setString(1, userId);
rs = prepStmt.executeQuery();
Collection<JournalHeader> list = new ArrayList<JournalHeader>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(journal);
}
return list;
} finally {
DBUtil.close(rs, prepStmt);
}
}
public Collection<JournalHeader> getOutlookJournalHeadersForUserAfterDate(
Connection con, String userId, java.util.Date startDate)
throws SQLException, CalendarException, java.text.ParseException {
String selectStatement = "select " + JournalDAO.COLUMNNAMES
+ " from CalendarJournal "
+ "where delegatorId = ? and startDay >= ? and externalId is not null";
PreparedStatement prepStmt = null;
ResultSet rs = null;
Collection<JournalHeader> list = null;
try {
prepStmt = con.prepareStatement(selectStatement);
prepStmt.setString(1, userId);
prepStmt.setString(2, DateUtil.date2SQLDate(startDate));
rs = prepStmt.executeQuery();
list = new ArrayList<JournalHeader>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(journal);
}
return list;
} finally {
DBUtil.close(rs, prepStmt);
}
}
public Collection<JournalHeader> getJournalHeadersForUserAfterDate(Connection con,
String userId, java.util.Date startDate, int nbReturned)
throws SQLException, CalendarException, java.text.ParseException {
String selectStatement = "select " + JournalDAO.COLUMNNAMES
+ " from CalendarJournal " + "where delegatorId = ? "
+ "and ((startDay >= ?) or (startDay <= ? and endDay >= ?))"
+ " order by startDay, startHour";
PreparedStatement prepStmt = null;
ResultSet rs = null;
String startDateString = DateUtil.date2SQLDate(startDate);
try {
int count = 0;
prepStmt = con.prepareStatement(selectStatement);
prepStmt.setString(1, userId);
prepStmt.setString(2, startDateString);
prepStmt.setString(3, startDateString);
prepStmt.setString(4, startDateString);
rs = prepStmt.executeQuery();
Collection<JournalHeader> list = new ArrayList<JournalHeader>();
while (rs.next() && nbReturned != count) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(journal);
count++;
}
return list;
} finally {
DBUtil.close(rs, prepStmt);
}
}
/**
* get Next Social Events for a given list of my Contacts accordint to the type of data base
* used(PostgreSQL,Oracle,MMS) . This includes all kinds of events
* @param con
* @param day
* @param myId
* @param myContactsIds
* @param begin
* @param end
* @return List<SocialInformationEvent>
* @throws SQLException
* @throws ParseException
*/
public List<SocialInformationEvent> getNextEventsForMyContacts(Connection con, String day,
String myId, List<String> myContactsIds, Date begin, Date end) throws SQLException,
ParseException {
String selectNextEvents =
"select distinct " + JournalDAO.JOURNALCOLUMNNAMES + " from CalendarJournal "
+ " where endDay >= ? and delegatorId in(" + toSqlString(myContactsIds) + ") "
+ " and startDay >= ? and startDay <= ? "
+ " order by startDay ASC, startHour ASC";
PreparedStatement prepStmt = null;
ResultSet rs = null;
try {
prepStmt = con.prepareStatement(selectNextEvents);
prepStmt.setString(1, day);
prepStmt.setString(2, DateUtil.date2SQLDate(begin));
prepStmt.setString(3, DateUtil.date2SQLDate(end));
rs = prepStmt.executeQuery();
List<SocialInformationEvent> list = new ArrayList<SocialInformationEvent>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(new SocialInformationEvent(journal, journal.getId().equals(myId)));
}
return list;
} finally {
DBUtil.close(rs, prepStmt);
}
}
private static String toSqlString(List<String> list) {
StringBuilder result = new StringBuilder(100);
if (list == null || list.isEmpty()) {
return "''";
}
int i = 0;
for (String var : list) {
if (i != 0) {
result.append(",");
}
result.append("'").append(var).append("'");
i++;
}
return result.toString();
}
/**
* get Last Social Events for a given list of my Contacts accordint to the type of data base
* used(PostgreSQL,Oracle,MMS) . This includes all kinds of events
* @param con
* @param day
* @param myId
* @param myContactsIds
* @param begin
* @param end
* @return List<SocialInformationEvent>
* @throws SQLException
* @throws ParseException
*/
public List<SocialInformationEvent> getLastEventsForMyContacts(Connection con, String day,
String myId, List<String> myContactsIds, Date begin, Date end) throws SQLException,
ParseException {
String selectNextEvents =
"select distinct " + JournalDAO.JOURNALCOLUMNNAMES + " from CalendarJournal "
+ " where endDay < ? and delegatorId in(" + toSqlString(myContactsIds) + ") "
+ " and startDay >= ? and startDay <= ? "
+ " order by startDay desc, startHour desc";
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<SocialInformationEvent> list = null;
try {
prepStmt = con.prepareStatement(selectNextEvents);
prepStmt.setString(1, day);
prepStmt.setString(2, DateUtil.date2SQLDate(begin));
prepStmt.setString(3, DateUtil.date2SQLDate(end));
rs = prepStmt.executeQuery();
list = new ArrayList<SocialInformationEvent>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(new SocialInformationEvent(journal, journal.getId().equals(myId)));
}
} finally {
DBUtil.close(rs, prepStmt);
}
return list;
}
/**
* get my Last Social Events accordint to the type of data base used(PostgreSQL,Oracle,MMS) . This
* includes all kinds of events
* @param con
* @param day
* @param myId
* @param numberOfElement
* @param firstIndex
* @return List<SocialInformationEvent>
* @throws SQLException
* @throws ParseException
*/
public List<SocialInformationEvent> getMyLastEvents(Connection con, String day, String myId,
Date begin, Date end) throws SQLException,
ParseException {
String selectNextEvents =
"select distinct " + JournalDAO.JOURNALCOLUMNNAMES
+ " from CalendarJournal " + " where endDay < ? and delegatorId = ? "
+ " and startDay >= ? and startDay <= ? "
+ " order by startDay desc, startHour desc ";
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<SocialInformationEvent> list = null;
try {
prepStmt = con.prepareStatement(selectNextEvents);
prepStmt.setString(1, day);
prepStmt.setString(2, myId);
prepStmt.setString(3, DateUtil.date2SQLDate(begin));
prepStmt.setString(4, DateUtil.date2SQLDate(end));
rs = prepStmt.executeQuery();
list = new ArrayList<SocialInformationEvent>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(new SocialInformationEvent(journal));
}
} finally {
DBUtil.close(rs, prepStmt);
}
return list;
}
/**
* get my Last Social Events when data base is MMS. This includes all kinds of events
* @param con
* @param day
* @param myId
* @param numberOfElement
* @param firstIndex
* @return
* @throws SQLException
* @throws java.text.ParseException
*/
public List<SocialInformationEvent> getMyLastEvents_MSS(Connection con,
String day, String myId, Date begin, Date end) throws
SQLException, java.text.ParseException {
String selectNextEvents =
"select distinct " + JournalDAO.JOURNALCOLUMNNAMES
+ " from CalendarJournal "
+ " where endDay < ? and delegatorId = ? "
+ " and startDay >= ? and startDay <= ? "
+ " order by CalendarJournal.startDay desc, CalendarJournal.startHour desc";
PreparedStatement prepStmt = null;
ResultSet rs = null;
List<SocialInformationEvent> list = null;
try {
prepStmt = con.prepareStatement(selectNextEvents);
prepStmt.setString(1, day);
prepStmt.setString(2, myId);
prepStmt.setString(3, DateUtil.date2SQLDate(begin));
prepStmt.setString(4, DateUtil.date2SQLDate(end));
rs = prepStmt.executeQuery();
list = new ArrayList<SocialInformationEvent>();
while (rs.next()) {
JournalHeader journal = getJournalHeaderFromResultSet(rs);
list.add(new SocialInformationEvent(journal));
}
} finally {
DBUtil.close(rs, prepStmt);
}
return list;
}
}
| auroreallibe/Silverpeas-Core | core-services/personalOrganizer/src/main/java/org/silverpeas/core/personalorganizer/service/JournalDAO.java | Java | agpl-3.0 | 30,469 |
/**
* Copyright (C) 2000 - 2013 Silverpeas
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of the GPL, you may
* redistribute this Program in connection with Free/Libre Open Source Software ("FLOSS")
* applications as described in Silverpeas's FLOSS exception. You should have received a copy of the
* text describing the FLOSS exception, and it is also available here:
* "http://www.silverpeas.org/docs/core/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*/
package com.stratelia.webactiv.beans.admin;
import java.util.ArrayList;
import java.util.List;
import com.silverpeas.util.StringUtil;
import com.stratelia.webactiv.organization.AdminPersistenceException;
import com.stratelia.webactiv.organization.SpaceRow;
import com.stratelia.webactiv.organization.SpaceUserRoleRow;
import com.stratelia.webactiv.util.exception.SilverpeasException;
public class SpaceProfileInstManager {
/**
* Constructor
*/
public SpaceProfileInstManager() {
}
/**
* Create a new space profile instance in database
*
*
* @param spaceProfileInst
* @param domainManager
* @param sFatherId
* @return
* @throws AdminException
*/
public String createSpaceProfileInst(SpaceProfileInst spaceProfileInst,
DomainDriverManager domainManager, String parentSpaceId) throws AdminException {
try {
// Create the spaceProfile node
SpaceUserRoleRow newRole = makeSpaceUserRoleRow(spaceProfileInst);
newRole.spaceId = idAsInt(parentSpaceId);
domainManager.getOrganization().spaceUserRole.createSpaceUserRole(newRole);
String spaceProfileNodeId = idAsString(newRole.id);
// Update the CSpace with the links TSpaceProfile-TGroup
for (String groupId : spaceProfileInst.getAllGroups()) {
domainManager.getOrganization().spaceUserRole.addGroupInSpaceUserRole(idAsInt(groupId),
idAsInt(spaceProfileNodeId));
}
// Update the CSpace with the links TSpaceProfile-TUser
for (String userId : spaceProfileInst.getAllUsers()) {
domainManager.getOrganization().spaceUserRole.addUserInSpaceUserRole(idAsInt(userId),
idAsInt(spaceProfileNodeId));
}
return spaceProfileNodeId;
} catch (Exception e) {
throw new AdminException("SpaceProfileInstManager.addSpaceProfileInst",
SilverpeasException.ERROR, "admin.EX_ERR_ADD_SPACE_PROFILE",
"space profile name: '" + spaceProfileInst.getName() + "'", e);
}
}
/**
* Get Space profile information with given id and creates a new SpaceProfileInst
*
* @param ddManager
* @param spaceProfileId
* @param parentSpaceId
* @return
* @throws AdminException
*/
public SpaceProfileInst getSpaceProfileInst(DomainDriverManager ddManager,
String spaceProfileId, String parentSpaceId) throws AdminException {
if (!StringUtil.isDefined(parentSpaceId)) {
try {
ddManager.getOrganizationSchema();
SpaceRow space = ddManager.getOrganization().space.getSpaceOfSpaceUserRole(idAsInt(
spaceProfileId));
if (space == null) {
space = new SpaceRow();
}
parentSpaceId = idAsString(space.id);
} catch (Exception e) {
throw new AdminException("SpaceProfileInstManager.getSpaceProfileInst",
SilverpeasException.ERROR, "admin.EX_ERR_GET_SPACE_PROFILE",
"space profile Id: '" + spaceProfileId + "', space Id: '"
+ parentSpaceId + "'", e);
} finally {
ddManager.releaseOrganizationSchema();
}
}
try {
ddManager.getOrganizationSchema();
// Load the profile detail
SpaceUserRoleRow spaceUserRole = ddManager.getOrganization().spaceUserRole.
getSpaceUserRole(idAsInt(spaceProfileId));
SpaceProfileInst spaceProfileInst = null;
if (spaceUserRole != null) {
// Set the attributes of the space profile Inst
spaceProfileInst = spaceUserRoleRow2SpaceProfileInst(spaceUserRole);
setUsersAndGroups(ddManager, spaceProfileInst);
}
return spaceProfileInst;
} catch (Exception e) {
throw new AdminException("SpaceProfileInstManager.getSpaceProfileInst",
SilverpeasException.ERROR, "admin.EX_ERR_SET_SPACE_PROFILE",
"space profile Id: '" + spaceProfileId + "', space Id: '"
+ parentSpaceId + "'", e);
} finally {
ddManager.releaseOrganizationSchema();
}
}
/**
* get information for given id and store it in the given SpaceProfileInst object
*
* @param ddManager
* @param spaceId
* @param roleName
* @throws AdminException
*/
public SpaceProfileInst getInheritedSpaceProfileInstByName(DomainDriverManager ddManager,
String spaceId, String roleName) throws AdminException {
return getSpaceProfileInst(ddManager, spaceId, roleName, true);
}
public SpaceProfileInst getSpaceProfileInstByName(DomainDriverManager ddManager,
String spaceId, String roleName) throws AdminException {
return getSpaceProfileInst(ddManager, spaceId, roleName, false);
}
private SpaceProfileInst getSpaceProfileInst(DomainDriverManager ddManager,
String spaceId, String roleName, boolean isInherited) throws AdminException {
try {
ddManager.getOrganizationSchema();
int inherited = 0;
if (isInherited) {
inherited = 1;
}
// Load the profile detail
SpaceUserRoleRow spaceUserRole = ddManager.getOrganization().spaceUserRole.
getSpaceUserRole(idAsInt(spaceId), roleName, inherited);
SpaceProfileInst spaceProfileInst = null;
if (spaceUserRole != null) {
// Set the attributes of the space profile Inst
spaceProfileInst = spaceUserRoleRow2SpaceProfileInst(spaceUserRole);
setUsersAndGroups(ddManager, spaceProfileInst);
}
return spaceProfileInst;
} catch (Exception e) {
throw new AdminException("SpaceProfileInstManager.getInheritedSpaceProfileInst",
SilverpeasException.ERROR, "admin.EX_ERR_GET_SPACE_PROFILE",
"spaceId = " + spaceId + ", role = " + roleName, e);
} finally {
ddManager.releaseOrganizationSchema();
}
}
private void setUsersAndGroups(DomainDriverManager ddManager, SpaceProfileInst spaceProfileInst)
throws AdminPersistenceException {
// Get the groups
String[] asGroupIds = ddManager.getOrganization().group.
getDirectGroupIdsInSpaceUserRole(idAsInt(spaceProfileInst.getId()));
// Set the groups to the space profile
if (asGroupIds != null) {
for (String groupId : asGroupIds) {
spaceProfileInst.addGroup(groupId);
}
}
// Get the Users
String[] asUsersIds = ddManager.getOrganization().user.getDirectUserIdsOfSpaceUserRole(idAsInt(
spaceProfileInst.getId()));
// Set the Users to the space profile
if (asUsersIds != null) {
for (String userId : asUsersIds) {
spaceProfileInst.addUser(userId);
}
}
}
private SpaceProfileInst spaceUserRoleRow2SpaceProfileInst(SpaceUserRoleRow spaceUserRole) {
// Set the attributes of the space profile Inst
SpaceProfileInst spaceProfileInst = new SpaceProfileInst();
spaceProfileInst.setId(Integer.toString(spaceUserRole.id));
spaceProfileInst.setName(spaceUserRole.roleName);
spaceProfileInst.setLabel(spaceUserRole.name);
spaceProfileInst.setDescription(spaceUserRole.description);
spaceProfileInst.setSpaceFatherId(Integer.toString(spaceUserRole.spaceId));
if (spaceUserRole.isInherited == 1) {
spaceProfileInst.setInherited(true);
}
return spaceProfileInst;
}
/**
* Deletes space profile instance from Silverpeas
*
* @param spaceProfileInst
* @param ddManager
* @throws AdminException
*/
public void deleteSpaceProfileInst(SpaceProfileInst spaceProfileInst,
DomainDriverManager ddManager) throws AdminException {
try {
// delete the spaceProfile node
ddManager.getOrganization().spaceUserRole.removeSpaceUserRole(idAsInt(spaceProfileInst
.getId()));
} catch (Exception e) {
throw new AdminException("SpaceProfileInstManager.deleteSpaceProfileInst",
SilverpeasException.ERROR, "admin.EX_ERR_DELETE_SPACEPROFILE", "space profile Id: '"
+ spaceProfileInst.getId() + "'", e);
}
}
/**
* Updates space profile instance
*
* @param spaceProfileInst
* @param ddManager
* @param spaceProfileInstNew
* @return
* @throws AdminException
*/
public String updateSpaceProfileInst(SpaceProfileInst spaceProfileInst,
DomainDriverManager ddManager, SpaceProfileInst spaceProfileInstNew)
throws AdminException {
List<String> alOldSpaceProfileGroup = new ArrayList<String>();
List<String> alNewSpaceProfileGroup = new ArrayList<String>();
List<String> alAddGroup = new ArrayList<String>();
List<String> alRemGroup = new ArrayList<String>();
List<String> alOldSpaceProfileUser = new ArrayList<String>();
List<String> alNewSpaceProfileUser = new ArrayList<String>();
List<String> alAddUser = new ArrayList<String>();
List<String> alRemUser = new ArrayList<String>();
try {
// Compute the Old spaceProfile group list
List<String> alGroup = spaceProfileInst.getAllGroups();
for (String groupId : alGroup) {
alOldSpaceProfileGroup.add(groupId);
}
// Compute the New spaceProfile group list
alGroup = spaceProfileInstNew.getAllGroups();
for (String groupId : alGroup) {
alNewSpaceProfileGroup.add(groupId);
}
// Compute the remove group list
for (String groupId : alOldSpaceProfileGroup) {
if (!alNewSpaceProfileGroup.contains(groupId)) {
alRemGroup.add(groupId);
}
}
// Compute the add and stay group list
for (String groupId : alNewSpaceProfileGroup) {
if (!alOldSpaceProfileGroup.contains(groupId)) {
alAddGroup.add(groupId);
}
}
// Add the new Groups
for (String groupId : alAddGroup) {
// Create the links between the spaceProfile and the group
ddManager.getOrganization().spaceUserRole.addGroupInSpaceUserRole(
idAsInt(groupId), idAsInt(spaceProfileInst.getId()));
}
// Remove the removed groups
for (String groupId : alRemGroup) {
// delete the node link SpaceProfile_Group
ddManager.getOrganization().spaceUserRole.removeGroupFromSpaceUserRole(
idAsInt(groupId), idAsInt(spaceProfileInst.getId()));
}
// Compute the Old spaceProfile User list
ArrayList<String> alUser = spaceProfileInst.getAllUsers();
for (String userId : alUser) {
alOldSpaceProfileUser.add(userId);
}
// Compute the New spaceProfile User list
alUser = spaceProfileInstNew.getAllUsers();
for (String userId : alUser) {
alNewSpaceProfileUser.add(userId);
}
// Compute the remove User list
for (String userId : alOldSpaceProfileUser) {
if (!alNewSpaceProfileUser.contains(userId)) {
alRemUser.add(userId);
}
}
// Compute the add and stay User list
for (String userId : alNewSpaceProfileUser) {
if (!alOldSpaceProfileUser.contains(userId)) {
alAddUser.add(userId);
}
}
// Add the new Users
for (String userId : alAddUser) {
// Create the links between the spaceProfile and the User
ddManager.getOrganization().spaceUserRole.addUserInSpaceUserRole(
idAsInt(userId), idAsInt(spaceProfileInst.getId()));
}
// Remove the removed Users
for (String userId : alRemUser) {
// delete the node link SpaceProfile_User
ddManager.getOrganization().spaceUserRole.removeUserFromSpaceUserRole(
idAsInt(userId), idAsInt(spaceProfileInst.getId()));
}
// update the spaceProfile node
SpaceUserRoleRow changedSpaceUserRole = makeSpaceUserRoleRow(spaceProfileInstNew);
changedSpaceUserRole.id = idAsInt(spaceProfileInstNew.getId());
ddManager.getOrganization().spaceUserRole.updateSpaceUserRole(changedSpaceUserRole);
return idAsString(changedSpaceUserRole.id);
} catch (Exception e) {
throw new AdminException("SpaceProfileInstManager.updateSpaceProfileInst",
SilverpeasException.ERROR, "admin.EX_ERR_UPDATE_SPACEPROFILE",
"space profile Id: '" + spaceProfileInst.getId() + "'", e);
}
}
/**
* Converts SpaceProfileInst to SpaceUserRoleRow
*/
private SpaceUserRoleRow makeSpaceUserRoleRow(SpaceProfileInst spaceProfileInst) {
SpaceUserRoleRow spaceUserRole = new SpaceUserRoleRow();
spaceUserRole.id = idAsInt(spaceProfileInst.getId());
spaceUserRole.roleName = spaceProfileInst.getName();
spaceUserRole.name = spaceProfileInst.getLabel();
spaceUserRole.description = spaceProfileInst.getDescription();
if (spaceProfileInst.isInherited()) {
spaceUserRole.isInherited = 1;
}
return spaceUserRole;
}
/**
* Convert String Id to int Id
*/
private int idAsInt(String id) {
if (id == null || id.length() == 0) {
return -1; // the null id.
}
try {
return Integer.parseInt(id);
} catch (NumberFormatException e) {
return -1; // the null id.
}
}
/**
* Convert int Id to String Id
*/
static private String idAsString(int id) {
return Integer.toString(id);
}
}
| CecileBONIN/Silverpeas-Core | lib-core/src/main/java/com/stratelia/webactiv/beans/admin/SpaceProfileInstManager.java | Java | agpl-3.0 | 14,175 |
package org.bimserver.database.query.literals;
/******************************************************************************
* Copyright (C) 2009-2019 BIMserver.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see {@literal<http://www.gnu.org/licenses/>}.
*****************************************************************************/
import org.bimserver.database.query.conditions.LiteralCondition;
public class StringLiteral extends LiteralCondition {
private final String value;
public StringLiteral(String value) {
this.value = value;
}
public Object getValue() {
return value;
}
} | opensourceBIM/BIMserver | BimServer/src/org/bimserver/database/query/literals/StringLiteral.java | Java | agpl-3.0 | 1,207 |
<?php
/*********************************************************************************
* The contents of this file are subject to the SugarCRM Master Subscription
* Agreement ("License") which can be viewed at
* http://www.sugarcrm.com/crm/en/msa/master_subscription_agreement_11_April_2011.pdf
* By installing or using this file, You have unconditionally agreed to the
* terms and conditions of the License, and You may not use this file except in
* compliance with the License. Under the terms of the license, You shall not,
* among other things: 1) sublicense, resell, rent, lease, redistribute, assign
* or otherwise transfer Your rights to the Software, and 2) use the Software
* for timesharing or service bureau purposes such as hosting the Software for
* commercial gain and/or for the benefit of a third party. Use of the Software
* may be subject to applicable fees and any use of the Software without first
* paying applicable fees is strictly prohibited. You do not have the right to
* remove SugarCRM copyrights from the source code or user interface.
*
* All copies of the Covered Code must include on each user interface screen:
* (i) the "Powered by SugarCRM" logo and
* (ii) the SugarCRM copyright notice
* in the same form as they appear in the distribution. See full license for
* requirements.
*
* Your Warranty, Limitations of liability and Indemnity are expressly stated
* in the License. Please refer to the License for the specific language
* governing these rights and limitations under the License. Portions created
* by SugarCRM are Copyright (C) 2004-2011 SugarCRM, Inc.; All Rights Reserved.
********************************************************************************/
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
$mod_strings= array (
'LBL_ROLE' => 'Rolle:',
'LBL_LANGUAGE' => 'Språk:',
'LBL_MODULE_NAME' => 'Roller',
'LBL_MODULE_TITLE' => 'Roller: Hjem',
'LBL_SEARCH_FORM_TITLE' => 'Rollesøk',
'LBL_LIST_FORM_TITLE' => 'Rolleliste',
'LNK_NEW_ROLE' => 'Opprett rolle',
'LNK_ROLES' => 'Roller',
'LBL_NAME' => 'Navn:',
'LBL_DESCRIPTION' => 'Beskrivelse:',
'LBL_ALLOWED_MODULES' => 'Tillatte moduler:',
'LBL_DISALLOWED_MODULES' => 'Ikke-tillatte moduler:',
'LBL_ASSIGN_MODULES' => 'Endre moduler:',
'LBL_DEFAULT_SUBPANEL_TITLE' => 'Roller',
'LBL_USERS' => 'Brukere',
'LBL_USERS_SUBPANEL_TITLE' => 'Brukere',
);?>
| harish-patel/ecrm | modules/Roles/language/nb_NO.lang.php | PHP | agpl-3.0 | 2,968 |
<?php
/**
* Manager for Plus
*
* !! Most of Plus is handled via Discovery or Wire\Paywall !!
*/
namespace Minds\Core\Plus;
use Minds\Core\Di\Di;
use Minds\Core\Config;
use Minds\Core\Data\ElasticSearch;
use Minds\Core\Data\Cassandra;
use Minds\Core\Wire\Paywall\PaywallEntityInterface;
use Minds\Core\Rewards\Contributions\ContributionValues;
class Manager
{
/** @var Config */
protected $config;
/** @var ElasticSearch\Client */
protected $es;
/** @var Cassandra\Client */
protected $db;
/** @var int */
const SUBSCRIPTION_PERIOD_MONTH = 30;
/** @var int */
const SUBSCRIPTION_PERIOD_YEAR = 365;
/** @var int */
const REVENUE_SHARE_PCT = 25;
public function __construct($config = null, $es = null, $db = null)
{
$this->config = $config ?? Di::_()->get('Config');
$this->es = $es ?? Di::_()->get('Database\ElasticSearch');
$this->db = $db ?? Di::_()->get('Database\Cassandra\Cql');
}
/**
* Returns the plus guid
* @return string
*/
public function getPlusGuid(): string
{
return $this->config->get('plus')['handler'];
}
/**
* Returns the plus support tier urn
* @return string
*/
public function getPlusSupportTierUrn(): string
{
return $this->config->get('plus')['support_tier_urn'];
}
/**
* Returns the subscription price
* @param string $period (month,day)
* @return int (cents)
*/
public function getSubscriptionPrice(string $period): int
{
/** @var string */
$key = '';
switch ($period) {
case 'month':
$key = 'monthly';
break;
case 'year':
$key = 'yearly';
break;
default:
throw new \Exception("Subscription can only be month or year");
}
return $this->config->get('upgrades')['plus'][$key]['usd'] * 100;
}
/**
* Return sum of revenue for the previous subscriptions period (30 days)
* Will return in USD
* @param int $asOfTs
* @return float
*/
public function getActiveRevenue($asOfTs = null): float
{
$revenue = 0;
$from = strtotime(self::SUBSCRIPTION_PERIOD_MONTH . " days ago", $asOfTs ?? time());
$to = strtotime("+" . self::SUBSCRIPTION_PERIOD_MONTH . " days", $from);
//
// Sum the wire takings for the previous 30 days where monthly subscription
//
$revenue += $this->getRevenue($from, $to, $this->getSubscriptionPrice('month'));
//
// Sum the wire takings for the previous 30 days where yearly subscription (amortized to month)
//
$from = strtotime(self::SUBSCRIPTION_PERIOD_YEAR . " days ago", $asOfTs ?? time());
$to = strtotime("+" . self::SUBSCRIPTION_PERIOD_YEAR . " days", $from);
$revenue += $this->getRevenue($from, $to, $this->getSubscriptionPrice('year')) / 12;
return round($revenue / 100, 2);
}
/**
* Returns the daily revenue for Plus
* - Assumptions:
* - Subscription is 30 days
* - Amoritize the revenue by dividing the revenue
* for previous 30 days by 30
* - eg: ($300 per month) / 30 = $10 per day
* Will return in USD
* @param int $asOfTime
* @return int
*/
public function getDailyRevenue($asOfTs = null): float
{
return round($this->getActiveRevenue($asOfTs) / self::SUBSCRIPTION_PERIOD_MONTH, 2);
}
/**
* @var int $from
* @var int $to
* @return int
*/
protected function getRevenue(int $from, int $to, int $amount): int
{
$query = new Cassandra\Prepared\Custom();
// ALLOW FILTERING is used to filter by amount. As subscription volume is small
// and paritioned by receiver_guid, it should not be an issue
$query->query("SELECT SUM(wei) as wei_sum
FROM wire
WHERE receiver_guid=?
AND method='usd'
AND timestamp >= ?
AND timestamp < ?
AND wei=?
ALLOW FILTERING
", [
new \Cassandra\Varint($this->getPlusGuid()),
new \Cassandra\Timestamp($from, 0),
new \Cassandra\Timestamp($to, 0),
new \Cassandra\Varint($amount)
]);
try {
$result = $this->db->request($query);
} catch (\Exception $e) {
error_log(print_r($e, true));
return 0;
}
return (int) $result[0]['wei_sum'];
}
/**
* Return unlocks (deprecated)
* @param int $asOfTs
* @return iterable
*/
public function getUnlocks(int $asOfTs): array
{
return [];
}
/**
* Return the scores of users
* @param int $asOfTs
* @return iterable
*/
public function getScores(int $asOfTs): iterable
{
/** @var array */
$must = [];
$must[] = [
'term' => [
'support_tier_urn' => $this->getPlusSupportTierUrn(),
],
];
$must[] = [
'range' => [
'@timestamp' => [
'gte' => $asOfTs * 1000,
'lt' => strtotime('midnight tomorrow', $asOfTs) * 1000,
]
]
];
$body = [
'query' => [
'bool' => [
'must' => $must,
],
],
'aggs' => [
'owners' => [
'terms' => [
'field' => 'entity_owner_guid.keyword',
'size' => 5000,
],
'aggs' => [
'actions' => [
'terms' => [
'field' => 'action.keyword',
'size' => 5000,
],
'aggs' => [
'unique_user_actions' => [
'cardinality' => [
'field' => 'user_guid.keyword',
],
]
]
]
]
],
]
];
$query = [
'index' => 'minds-metrics-*',
'body' => $body,
'size' => 0,
];
$prepared = new ElasticSearch\Prepared\Search();
$prepared->query($query);
$response = $this->es->request($prepared);
$total = array_sum(array_map(function ($bucket) {
return $this->sumInteractionScores($bucket);
}, $response['aggregations']['owners']['buckets']));
foreach ($response['aggregations']['owners']['buckets'] as $bucket) {
$count = $this->sumInteractionScores($bucket);
if (!$count) {
continue;
}
$score = [
'user_guid' => $bucket['key'],
'total' => $total,
'count' => $count,
'sharePct' => $count / $total,
];
yield $score;
}
}
/**
* Returns if a post is Minds+ paywalled or not
* @param PaywallEntityInterface $entity
* @return bool
*/
public function isPlusEntity(PaywallEntityInterface $entity): bool
{
if (!$entity->isPayWall()) {
return false;
}
$threshold = $entity->getWireThreshold();
return $threshold['support_tier']['urn'] === $this->getPlusSupportTierUrn();
}
/**
* Returns the score of owner bucket interactions
* @param array $bucket
* @return int
*/
private function sumInteractionScores(array $bucket): int
{
return array_sum(array_map(function ($bucket) {
return $bucket['unique_user_actions']['value'] * ContributionValues::metricKeyToMultiplier($bucket['key']);
}, $bucket['actions']['buckets']));
}
}
| Minds/engine | Core/Plus/Manager.php | PHP | agpl-3.0 | 8,138 |
# -*- coding: utf-8 -*-
"""
Models for Student Identity Verification
This is where we put any models relating to establishing the real-life identity
of a student over a period of time. Right now, the only models are the abstract
`PhotoVerification`, and its one concrete implementation
`SoftwareSecurePhotoVerification`. The hope is to keep as much of the
photo verification process as generic as possible.
"""
import functools
import json
import logging
import os.path
import uuid
from datetime import timedelta
from email.utils import formatdate
import requests
import six
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.files.base import ContentFile
from django.urls import reverse
from django.db import models
from django.dispatch import receiver
from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy
from model_utils import Choices
from model_utils.models import StatusModel, TimeStampedModel
from opaque_keys.edx.django.models import CourseKeyField
from lms.djangoapps.verify_student.ssencrypt import (
encrypt_and_encode,
generate_signed_message,
random_aes_key,
rsa_encrypt
)
from openedx.core.djangoapps.signals.signals import LEARNER_NOW_VERIFIED
from openedx.core.storage import get_storage
from .utils import earliest_allowed_verification_date
log = logging.getLogger(__name__)
def generateUUID(): # pylint: disable=invalid-name
""" Utility function; generates UUIDs """
return str(uuid.uuid4())
class VerificationException(Exception):
pass
def status_before_must_be(*valid_start_statuses):
"""
Helper decorator with arguments to make sure that an object with a `status`
attribute is in one of a list of acceptable status states before a method
is called. You could use it in a class definition like:
@status_before_must_be("submitted", "approved", "denied")
def refund_user(self, user_id):
# Do logic here...
If the object has a status that is not listed when the `refund_user` method
is invoked, it will throw a `VerificationException`. This is just to avoid
distracting boilerplate when looking at a Model that needs to go through a
workflow process.
"""
def decorator_func(func):
"""
Decorator function that gets returned
"""
@functools.wraps(func)
def with_status_check(obj, *args, **kwargs):
if obj.status not in valid_start_statuses:
exception_msg = (
u"Error calling {} {}: status is '{}', must be one of: {}"
).format(func, obj, obj.status, valid_start_statuses)
raise VerificationException(exception_msg)
return func(obj, *args, **kwargs)
return with_status_check
return decorator_func
class IDVerificationAttempt(StatusModel):
"""
Each IDVerificationAttempt represents a Student's attempt to establish
their identity through one of several methods that inherit from this Model,
including PhotoVerification and SSOVerification.
.. pii: The User's name is stored in this and sub-models
.. pii_types: name
.. pii_retirement: retained
"""
STATUS = Choices('created', 'ready', 'submitted', 'must_retry', 'approved', 'denied')
user = models.ForeignKey(User, db_index=True, on_delete=models.CASCADE)
# They can change their name later on, so we want to copy the value here so
# we always preserve what it was at the time they requested. We only copy
# this value during the mark_ready() step. Prior to that, you should be
# displaying the user's name from their user.profile.name.
name = models.CharField(blank=True, max_length=255)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True, db_index=True)
class Meta(object):
app_label = "verify_student"
abstract = True
ordering = ['-created_at']
@property
def expiration_datetime(self):
"""Datetime that the verification will expire. """
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
return self.created_at + timedelta(days=days_good_for)
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
raise NotImplementedError
def active_at_datetime(self, deadline):
"""Check whether the verification was active at a particular datetime.
Arguments:
deadline (datetime): The date at which the verification was active
(created before and expiration datetime is after today).
Returns:
bool
"""
return (
self.created_at < deadline and
self.expiration_datetime > now()
)
class ManualVerification(IDVerificationAttempt):
"""
Each ManualVerification represents a user's verification that bypasses the need for
any other verification.
.. pii: The User's name is stored in the parent model
.. pii_types: name
.. pii_retirement: retained
"""
reason = models.CharField(
max_length=255,
blank=True,
help_text=(
'Specifies the reason for manual verification of the user.'
)
)
class Meta(object):
app_label = 'verify_student'
def __unicode__(self):
return 'ManualIDVerification for {name}, status: {status}'.format(
name=self.name,
status=self.status,
)
def should_display_status_to_user(self):
"""
Whether or not the status should be displayed to the user.
"""
return False
class SSOVerification(IDVerificationAttempt):
"""
Each SSOVerification represents a Student's attempt to establish their identity
by signing in with SSO. ID verification through SSO bypasses the need for
photo verification.
.. no_pii:
"""
OAUTH2 = 'third_party_auth.models.OAuth2ProviderConfig'
SAML = 'third_party_auth.models.SAMLProviderConfig'
LTI = 'third_party_auth.models.LTIProviderConfig'
IDENTITY_PROVIDER_TYPE_CHOICES = (
(OAUTH2, 'OAuth2 Provider'),
(SAML, 'SAML Provider'),
(LTI, 'LTI Provider'),
)
identity_provider_type = models.CharField(
max_length=100,
blank=False,
choices=IDENTITY_PROVIDER_TYPE_CHOICES,
default=SAML,
help_text=(
'Specifies which type of Identity Provider this verification originated from.'
)
)
identity_provider_slug = models.SlugField(
max_length=30, db_index=True, default='default',
help_text=(
'The slug uniquely identifying the Identity Provider this verification originated from.'
))
class Meta(object):
app_label = "verify_student"
def __unicode__(self):
return 'SSOIDVerification for {name}, status: {status}'.format(
name=self.name,
status=self.status,
)
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
return False
class PhotoVerification(IDVerificationAttempt):
"""
Each PhotoVerification represents a Student's attempt to establish
their identity by uploading a photo of themselves and a picture ID. An
attempt actually has a number of fields that need to be filled out at
different steps of the approval process. While it's useful as a Django Model
for the querying facilities, **you should only edit a `PhotoVerification`
object through the methods provided**. Initialize them with a user:
attempt = PhotoVerification(user=user)
We track this attempt through various states:
`created`
Initial creation and state we're in after uploading the images.
`ready`
The user has uploaded their images and checked that they can read the
images. There's a separate state here because it may be the case that we
don't actually submit this attempt for review until payment is made.
`submitted`
Submitted for review. The review may be done by a staff member or an
external service. The user cannot make changes once in this state.
`must_retry`
We submitted this, but there was an error on submission (i.e. we did not
get a 200 when we POSTed to Software Secure)
`approved`
An admin or an external service has confirmed that the user's photo and
photo ID match up, and that the photo ID's name matches the user's.
`denied`
The request has been denied. See `error_msg` for details on why. An
admin might later override this and change to `approved`, but the
student cannot re-open this attempt -- they have to create another
attempt and submit it instead.
Because this Model inherits from IDVerificationAttempt, which inherits
from StatusModel, we can also do things like:
attempt.status == PhotoVerification.STATUS.created
attempt.status == "created"
pending_requests = PhotoVerification.submitted.all()
.. pii: The User's name is stored in the parent model, this one stores links to face and photo ID images
.. pii_types: name, image
.. pii_retirement: retained
"""
######################## Fields Set During Creation ########################
# See class docstring for description of status states
# Where we place the uploaded image files (e.g. S3 URLs)
face_image_url = models.URLField(blank=True, max_length=255)
photo_id_image_url = models.URLField(blank=True, max_length=255)
# Randomly generated UUID so that external services can post back the
# results of checking a user's photo submission without use exposing actual
# user IDs or something too easily guessable.
receipt_id = models.CharField(
db_index=True,
default=generateUUID,
max_length=255,
)
# Indicates whether or not a user wants to see the verification status
# displayed on their dash. Right now, only relevant for allowing students
# to "dismiss" a failed midcourse reverification message
# TODO: This field is deprecated.
display = models.BooleanField(db_index=True, default=True)
######################## Fields Set When Submitting ########################
submitted_at = models.DateTimeField(null=True, db_index=True)
#################### Fields Set During Approval/Denial #####################
# If the review was done by an internal staff member, mark who it was.
reviewing_user = models.ForeignKey(
User,
db_index=True,
default=None,
null=True,
related_name="photo_verifications_reviewed",
on_delete=models.CASCADE,
)
# Mark the name of the service used to evaluate this attempt (e.g
# Software Secure).
reviewing_service = models.CharField(blank=True, max_length=255)
# If status is "denied", this should contain text explaining why.
error_msg = models.TextField(blank=True)
# Non-required field. External services can add any arbitrary codes as time
# goes on. We don't try to define an exhuastive list -- this is just
# capturing it so that we can later query for the common problems.
error_code = models.CharField(blank=True, max_length=50)
class Meta(object):
app_label = "verify_student"
abstract = True
ordering = ['-created_at']
def parsed_error_msg(self):
"""
Sometimes, the error message we've received needs to be parsed into
something more human readable
The default behavior is to return the current error message as is.
"""
return self.error_msg
@status_before_must_be("created")
def upload_face_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def upload_photo_id_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def mark_ready(self):
"""
Mark that the user data in this attempt is correct. In order to
succeed, the user must have uploaded the necessary images
(`face_image_url`, `photo_id_image_url`). This method will also copy
their name from their user profile. Prior to marking it ready, we read
this value directly from their profile, since they're free to change it.
This often happens because people put in less formal versions of their
name on signup, but realize they want something different to go on a
formal document.
Valid attempt statuses when calling this method:
`created`
Status after method completes: `ready`
Other fields that will be set by this method:
`name`
State Transitions:
`created` → `ready`
This is what happens when the user confirms to us that the pictures
they uploaded are good. Note that we don't actually do a submission
anywhere yet.
"""
# At any point prior to this, they can change their names via their
# student dashboard. But at this point, we lock the value into the
# attempt.
self.name = self.user.profile.name
self.status = "ready"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def approve(self, user_id=None, service=""):
"""
Approve this attempt. `user_id`
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `approved`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`
State Transitions:
`submitted` → `approved`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `approved`
No-op. First one to approve it wins.
`denied` → `approved`
This might happen if a staff member wants to override a decision
made by an external service or another staff member (say, in
response to a support request). In this case, the previous values
of `reviewed_by_user_id` and `reviewed_by_service` will be changed
to whoever is doing the approving, and `error_msg` will be reset.
The only record that this record was ever denied would be in our
logs. This should be a relatively rare occurence.
"""
# If someone approves an outdated version of this, the first one wins
if self.status == "approved":
return
log.info(u"Verification for user '{user_id}' approved by '{reviewer}'.".format(
user_id=self.user, reviewer=user_id
))
self.error_msg = "" # reset, in case this attempt was denied before
self.error_code = "" # reset, in case this attempt was denied before
self.reviewing_user = user_id
self.reviewing_service = service
self.status = "approved"
self.save()
# Emit signal to find and generate eligible certificates
LEARNER_NOW_VERIFIED.send_robust(
sender=PhotoVerification,
user=self.user
)
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def deny(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Deny this attempt.
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `denied`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`,
`error_code`
State Transitions:
`submitted` → `denied`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `denied`
This might happen if a staff member wants to override a decision
made by an external service or another staff member, or just correct
a mistake made during the approval process. In this case, the
previous values of `reviewed_by_user_id` and `reviewed_by_service`
will be changed to whoever is doing the denying. The only record
that this record was ever approved would be in our logs. This should
be a relatively rare occurence.
`denied` → `denied`
Update the error message and reviewing_user/reviewing_service. Just
lets you amend the error message in case there were additional
details to be made.
"""
log.info(u"Verification for user '{user_id}' denied by '{reviewer}'.".format(
user_id=self.user, reviewer=reviewing_user
))
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "denied"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def system_error(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Mark that this attempt could not be completed because of a system error.
Status should be moved to `must_retry`. For example, if Software Secure
reported to us that they couldn't process our submission because they
couldn't decrypt the image we sent.
"""
if self.status in ["approved", "denied"]:
return # If we were already approved or denied, just leave it.
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "must_retry"
self.save()
@classmethod
def retire_user(cls, user_id):
"""
Retire user as part of GDPR Phase I
Returns 'True' if records found
:param user_id: int
:return: bool
"""
try:
user_obj = User.objects.get(id=user_id)
except User.DoesNotExist:
return False
photo_objects = cls.objects.filter(
user=user_obj
).update(
name='',
face_image_url='',
photo_id_image_url='',
photo_id_key=''
)
return photo_objects > 0
class SoftwareSecurePhotoVerification(PhotoVerification):
"""
Model to verify identity using a service provided by Software Secure. Much
of the logic is inherited from `PhotoVerification`, but this class
encrypts the photos.
Software Secure (http://www.softwaresecure.com/) is a remote proctoring
service that also does identity verification. A student uses their webcam
to upload two images: one of their face, one of a photo ID. Due to the
sensitive nature of the data, the following security precautions are taken:
1. The snapshot of their face is encrypted using AES-256 in CBC mode. All
face photos are encypted with the same key, and this key is known to
both Software Secure and edx-platform.
2. The snapshot of a user's photo ID is also encrypted using AES-256, but
the key is randomly generated using os.urandom. Every verification
attempt has a new key. The AES key is then encrypted using a public key
provided by Software Secure. We store only the RSA-encryped AES key.
Since edx-platform does not have Software Secure's private RSA key, it
means that we can no longer even read photo ID.
3. The encrypted photos are base64 encoded and stored in an S3 bucket that
edx-platform does not have read access to.
Note: this model handles *inital* verifications (which you must perform
at the time you register for a verified cert).
.. pii: The User's name is stored in the parent model, this one stores links to face and photo ID images
.. pii_types: name, image
.. pii_retirement: retained
"""
# This is a base64.urlsafe_encode(rsa_encrypt(photo_id_aes_key), ss_pub_key)
# So first we generate a random AES-256 key to encrypt our photo ID with.
# Then we RSA encrypt it with Software Secure's public key. Then we base64
# encode that. The result is saved here. Actual expected length is 344.
photo_id_key = models.TextField(max_length=1024)
IMAGE_LINK_DURATION = 5 * 60 * 60 * 24 # 5 days in seconds
copy_id_photo_from = models.ForeignKey("self", null=True, blank=True, on_delete=models.CASCADE)
# Fields for functionality of sending email when verification expires
# expiry_date: The date when the SoftwareSecurePhotoVerification will expire
# expiry_email_date: This field is used to maintain a check for learners to which email
# to notify for expired verification is already sent.
expiry_date = models.DateTimeField(null=True, blank=True, db_index=True)
expiry_email_date = models.DateTimeField(null=True, blank=True, db_index=True)
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def approve(self, user_id=None, service=""):
"""
Approve the verification attempt for user
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
After method completes:
status is set to `approved`
expiry_date is set to one year from now
"""
self.expiry_date = now() + timedelta(
days=settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
)
super(SoftwareSecurePhotoVerification, self).approve(user_id, service)
@classmethod
def get_initial_verification(cls, user, earliest_allowed_date=None):
"""Get initial verification for a user with the 'photo_id_key'.
Arguments:
user(User): user object
earliest_allowed_date(datetime): override expiration date for initial verification
Return:
SoftwareSecurePhotoVerification (object) or None
"""
init_verification = cls.objects.filter(
user=user,
status__in=["submitted", "approved"],
created_at__gte=(
earliest_allowed_date or earliest_allowed_verification_date()
)
).exclude(photo_id_key='')
return init_verification.latest('created_at') if init_verification.exists() else None
@status_before_must_be("created")
def upload_face_image(self, img_data):
"""
Upload an image of the user's face. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to the
storage backend.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
return
aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
aes_key = aes_key_str.decode("hex")
path = self._get_path("face")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
@status_before_must_be("created")
def upload_photo_id_image(self, img_data):
"""
Upload an the user's photo ID image. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using a randomly generated AES key, encode it with base64 and save it
to the storage backend. The random key is also encrypted using Software
Secure's public RSA key and stored in our `photo_id_key` field.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
# fake photo id key is set only for initial verification
self.photo_id_key = 'fake-photo-id-key'
self.save()
return
aes_key = random_aes_key()
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_aes_key = rsa_encrypt(aes_key, rsa_key_str)
# Save this to the storage backend
path = self._get_path("photo_id")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
# Update our record fields
self.photo_id_key = rsa_encrypted_aes_key.encode('base64')
self.save()
@status_before_must_be("must_retry", "ready", "submitted")
def submit(self, copy_id_photo_from=None):
"""
Submit our verification attempt to Software Secure for validation. This
will set our status to "submitted" if the post is successful, and
"must_retry" if the post fails.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
"""
try:
response = self.send_request(copy_id_photo_from=copy_id_photo_from)
if response.ok:
self.submitted_at = now()
self.status = "submitted"
self.save()
else:
self.status = "must_retry"
self.error_msg = response.text
self.save()
except Exception: # pylint: disable=broad-except
log.exception(
u'Software Secure submission failed for user %s, setting status to must_retry',
self.user.username
)
self.status = "must_retry"
self.save()
def parsed_error_msg(self):
"""
Parse the error messages we receive from SoftwareSecure
Error messages are written in the form:
`[{"photoIdReasons": ["Not provided"]}]`
Returns:
str[]: List of error messages.
"""
parsed_errors = []
error_map = {
'EdX name not provided': 'name_mismatch',
'Name mismatch': 'name_mismatch',
'Photo/ID Photo mismatch': 'photos_mismatched',
'ID name not provided': 'id_image_missing_name',
'Invalid Id': 'id_invalid',
'No text': 'id_invalid',
'Not provided': 'id_image_missing',
'Photo hidden/No photo': 'id_image_not_clear',
'Text not clear': 'id_image_not_clear',
'Face out of view': 'user_image_not_clear',
'Image not clear': 'user_image_not_clear',
'Photo not provided': 'user_image_missing',
}
try:
messages = set()
message_groups = json.loads(self.error_msg)
for message_group in message_groups:
messages = messages.union(set(*six.itervalues(message_group)))
for message in messages:
parsed_error = error_map.get(message)
if parsed_error:
parsed_errors.append(parsed_error)
else:
log.debug(u'Ignoring photo verification error message: %s', message)
except Exception: # pylint: disable=broad-except
log.exception(u'Failed to parse error message for SoftwareSecurePhotoVerification %d', self.pk)
return parsed_errors
def image_url(self, name, override_receipt_id=None):
"""
We dynamically generate this, since we want it the expiration clock to
start when the message is created, not when the record is created.
Arguments:
name (str): Name of the image (e.g. "photo_id" or "face")
Keyword Arguments:
override_receipt_id (str): If provided, use this receipt ID instead
of the ID for this attempt. This is useful for reverification
where we need to construct a URL to a previously-submitted
photo ID image.
Returns:
string: The expiring URL for the image.
"""
path = self._get_path(name, override_receipt_id=override_receipt_id)
return self._storage.url(path)
@cached_property
def _storage(self):
"""
Return the configured django storage backend.
"""
config = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]
# Default to the S3 backend for backward compatibility
storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto.S3BotoStorage")
storage_kwargs = config.get("STORAGE_KWARGS", {})
# Map old settings to the parameters expected by the storage backend
if "AWS_ACCESS_KEY" in config:
storage_kwargs["access_key"] = config["AWS_ACCESS_KEY"]
if "AWS_SECRET_KEY" in config:
storage_kwargs["secret_key"] = config["AWS_SECRET_KEY"]
if "S3_BUCKET" in config:
storage_kwargs["bucket"] = config["S3_BUCKET"]
storage_kwargs["querystring_expire"] = self.IMAGE_LINK_DURATION
return get_storage(storage_class, **storage_kwargs)
def _get_path(self, prefix, override_receipt_id=None):
"""
Returns the path to a resource with this instance's `receipt_id`.
If `override_receipt_id` is given, the path to that resource will be
retrieved instead. This allows us to retrieve images submitted in
previous attempts (used for reverification, where we send a new face
photo with the same photo ID from a previous attempt).
"""
receipt_id = self.receipt_id if override_receipt_id is None else override_receipt_id
return os.path.join(prefix, receipt_id)
def _encrypted_user_photo_key_str(self):
"""
Software Secure needs to have both UserPhoto and PhotoID decrypted in
the same manner. So even though this is going to be the same for every
request, we're also using RSA encryption to encrypt the AES key for
faces.
"""
face_aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
face_aes_key = face_aes_key_str.decode("hex")
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_face_aes_key = rsa_encrypt(face_aes_key, rsa_key_str)
return rsa_encrypted_face_aes_key.encode("base64")
def create_request(self, copy_id_photo_from=None):
"""
Construct the HTTP request to the photo verification service.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
Returns:
tuple of (header, body), where both `header` and `body` are dictionaries.
"""
access_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]
secret_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"]
scheme = "https" if settings.HTTPS == "on" else "http"
callback_url = "{}://{}{}".format(
scheme, settings.SITE_NAME, reverse('verify_student_results_callback')
)
# If we're copying the photo ID image from a previous verification attempt,
# then we need to send the old image data with the correct image key.
photo_id_url = (
self.image_url("photo_id")
if copy_id_photo_from is None
else self.image_url("photo_id", override_receipt_id=copy_id_photo_from.receipt_id)
)
photo_id_key = (
self.photo_id_key
if copy_id_photo_from is None else
copy_id_photo_from.photo_id_key
)
body = {
"EdX-ID": str(self.receipt_id),
"ExpectedName": self.name,
"PhotoID": photo_id_url,
"PhotoIDKey": photo_id_key,
"SendResponseTo": callback_url,
"UserPhoto": self.image_url("face"),
"UserPhotoKey": self._encrypted_user_photo_key_str(),
}
headers = {
"Content-Type": "application/json",
"Date": formatdate(timeval=None, localtime=False, usegmt=True)
}
_message, _sig, authorization = generate_signed_message(
"POST", headers, body, access_key, secret_key
)
headers['Authorization'] = authorization
return headers, body
def request_message_txt(self):
"""
This is the body of the request we send across. This is never actually
used in the code, but exists for debugging purposes -- you can call
`print attempt.request_message_txt()` on the console and get a readable
rendering of the request that would be sent across, without actually
sending anything.
"""
headers, body = self.create_request()
header_txt = "\n".join(
u"{}: {}".format(h, v) for h, v in sorted(headers.items())
)
body_txt = json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8')
return header_txt + "\n\n" + body_txt
def send_request(self, copy_id_photo_from=None):
"""
Assembles a submission to Software Secure and sends it via HTTPS.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
Returns:
request.Response
"""
# If AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING is True, we want to
# skip posting anything to Software Secure. We actually don't even
# create the message because that would require encryption and message
# signing that rely on settings.VERIFY_STUDENT values that aren't set
# in dev. So we just pretend like we successfully posted
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
fake_response = requests.Response()
fake_response.status_code = 200
return fake_response
headers, body = self.create_request(copy_id_photo_from=copy_id_photo_from)
response = requests.post(
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_URL"],
headers=headers,
data=json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8'),
verify=False
)
log.info(u"Sent request to Software Secure for receipt ID %s.", self.receipt_id)
if copy_id_photo_from is not None:
log.info(
(
u"Software Secure attempt with receipt ID %s used the same photo ID "
u"data as the receipt with ID %s"
),
self.receipt_id, copy_id_photo_from.receipt_id
)
log.debug("Headers:\n{}\n\n".format(headers))
log.debug("Body:\n{}\n\n".format(body))
log.debug(u"Return code: {}".format(response.status_code))
log.debug(u"Return message:\n\n{}\n\n".format(response.text))
return response
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
return True
class VerificationDeadline(TimeStampedModel):
"""
Represent a verification deadline for a particular course.
The verification deadline is the datetime after which
users are no longer allowed to submit photos for initial verification
in a course.
Note that this is NOT the same as the "upgrade" deadline, after
which a user is no longer allowed to upgrade to a verified enrollment.
If no verification deadline record exists for a course,
then that course does not have a deadline. This means that users
can submit photos at any time.
.. no_pii:
"""
class Meta(object):
app_label = "verify_student"
course_key = CourseKeyField(
max_length=255,
db_index=True,
unique=True,
help_text=ugettext_lazy(u"The course for which this deadline applies"),
)
deadline = models.DateTimeField(
help_text=ugettext_lazy(
u"The datetime after which users are no longer allowed "
"to submit photos for verification."
)
)
# The system prefers to set this automatically based on default settings. But
# if the field is set manually we want a way to indicate that so we don't
# overwrite the manual setting of the field.
deadline_is_explicit = models.BooleanField(default=False)
ALL_DEADLINES_CACHE_KEY = "verify_student.all_verification_deadlines"
@classmethod
def set_deadline(cls, course_key, deadline, is_explicit=False):
"""
Configure the verification deadline for a course.
If `deadline` is `None`, then the course will have no verification
deadline. In this case, users will be able to verify for the course
at any time.
Arguments:
course_key (CourseKey): Identifier for the course.
deadline (datetime or None): The verification deadline.
"""
if deadline is None:
VerificationDeadline.objects.filter(course_key=course_key).delete()
else:
record, created = VerificationDeadline.objects.get_or_create(
course_key=course_key,
defaults={"deadline": deadline, "deadline_is_explicit": is_explicit}
)
if not created:
record.deadline = deadline
record.deadline_is_explicit = is_explicit
record.save()
@classmethod
def deadlines_for_courses(cls, course_keys):
"""
Retrieve verification deadlines for particular courses.
Arguments:
course_keys (list): List of `CourseKey`s.
Returns:
dict: Map of course keys to datetimes (verification deadlines)
"""
all_deadlines = cache.get(cls.ALL_DEADLINES_CACHE_KEY)
if all_deadlines is None:
all_deadlines = {
deadline.course_key: deadline.deadline
for deadline in VerificationDeadline.objects.all()
}
cache.set(cls.ALL_DEADLINES_CACHE_KEY, all_deadlines)
return {
course_key: all_deadlines[course_key]
for course_key in course_keys
if course_key in all_deadlines
}
@classmethod
def deadline_for_course(cls, course_key):
"""
Retrieve the verification deadline for a particular course.
Arguments:
course_key (CourseKey): The identifier for the course.
Returns:
datetime or None
"""
try:
deadline = cls.objects.get(course_key=course_key)
return deadline.deadline
except cls.DoesNotExist:
return None
@receiver(models.signals.post_save, sender=VerificationDeadline)
@receiver(models.signals.post_delete, sender=VerificationDeadline)
def invalidate_deadline_caches(sender, **kwargs): # pylint: disable=unused-argument
"""Invalidate the cached verification deadline information. """
cache.delete(VerificationDeadline.ALL_DEADLINES_CACHE_KEY)
| jolyonb/edx-platform | lms/djangoapps/verify_student/models.py | Python | agpl-3.0 | 40,982 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime
from dateutil.relativedelta import relativedelta
import time
from operator import itemgetter
from itertools import groupby
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
from openerp import netsvc
from openerp import tools
from openerp.tools import float_compare, DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.decimal_precision as dp
import logging
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Incoterms
#----------------------------------------------------------
class stock_incoterms(osv.osv):
_name = "stock.incoterms"
_description = "Incoterms"
_columns = {
'name': fields.char('Name', size=64, required=True, help="Incoterms are series of sales terms.They are used to divide transaction costs and responsibilities between buyer and seller and reflect state-of-the-art transportation practices."),
'code': fields.char('Code', size=3, required=True, help="Code for Incoterms"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide an INCOTERM without deleting it."),
}
_defaults = {
'active': True,
}
stock_incoterms()
class stock_journal(osv.osv):
_name = "stock.journal"
_description = "Stock Journal"
_columns = {
'name': fields.char('Stock Journal', size=32, required=True),
'user_id': fields.many2one('res.users', 'Responsible'),
}
_defaults = {
'user_id': lambda s, c, u, ctx: u
}
stock_journal()
#----------------------------------------------------------
# Stock Location
#----------------------------------------------------------
class stock_location(osv.osv):
_name = "stock.location"
_description = "Location"
_parent_name = "location_id"
_parent_store = True
_parent_order = 'posz,name'
_order = 'parent_left'
# TODO: implement name_search() in a way that matches the results of name_get!
def name_get(self, cr, uid, ids, context=None):
# always return the full hierarchical name
res = self._complete_name(cr, uid, ids, 'complete_name', None, context=context)
return res.items()
def _complete_name(self, cr, uid, ids, name, args, context=None):
""" Forms complete name of location from parent location to child location.
@return: Dictionary of values
"""
res = {}
for m in self.browse(cr, uid, ids, context=context):
names = [m.name]
parent = m.location_id
while parent:
names.append(parent.name)
parent = parent.location_id
res[m.id] = ' / '.join(reversed(names))
return res
def _get_sublocations(self, cr, uid, ids, context=None):
""" return all sublocations of the given stock locations (included) """
return self.search(cr, uid, [('id', 'child_of', ids)], context=context)
def _product_value(self, cr, uid, ids, field_names, arg, context=None):
"""Computes stock value (real and virtual) for a product, as well as stock qty (real and virtual).
@param field_names: Name of field
@return: Dictionary of values
"""
prod_id = context and context.get('product_id', False)
if not prod_id:
return dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids])
product_product_obj = self.pool.get('product.product')
cr.execute('select distinct product_id, location_id from stock_move where location_id in %s', (tuple(ids), ))
dict1 = cr.dictfetchall()
cr.execute('select distinct product_id, location_dest_id as location_id from stock_move where location_dest_id in %s', (tuple(ids), ))
dict2 = cr.dictfetchall()
res_products_by_location = sorted(dict1+dict2, key=itemgetter('location_id'))
products_by_location = dict((k, [v['product_id'] for v in itr]) for k, itr in groupby(res_products_by_location, itemgetter('location_id')))
result = dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids])
result.update(dict([(i, {}.fromkeys(field_names, 0.0)) for i in list(set([aaa['location_id'] for aaa in res_products_by_location]))]))
currency_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id.id
currency_obj = self.pool.get('res.currency')
currency = currency_obj.browse(cr, uid, currency_id, context=context)
for loc_id, product_ids in products_by_location.items():
if prod_id:
product_ids = [prod_id]
c = (context or {}).copy()
c['location'] = loc_id
for prod in product_product_obj.browse(cr, uid, product_ids, context=c):
for f in field_names:
if f == 'stock_real':
if loc_id not in result:
result[loc_id] = {}
result[loc_id][f] += prod.qty_available
elif f == 'stock_virtual':
result[loc_id][f] += prod.virtual_available
elif f == 'stock_real_value':
amount = prod.qty_available * prod.standard_price
amount = currency_obj.round(cr, uid, currency, amount)
result[loc_id][f] += amount
elif f == 'stock_virtual_value':
amount = prod.virtual_available * prod.standard_price
amount = currency_obj.round(cr, uid, currency, amount)
result[loc_id][f] += amount
return result
_columns = {
'name': fields.char('Location Name', size=64, required=True, translate=True),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a location without deleting it."),
'usage': fields.selection([('supplier', 'Supplier Location'), ('view', 'View'), ('internal', 'Internal Location'), ('customer', 'Customer Location'), ('inventory', 'Inventory'), ('procurement', 'Procurement'), ('production', 'Production'), ('transit', 'Transit Location for Inter-Companies Transfers')], 'Location Type', required=True,
help="""* Supplier Location: Virtual location representing the source location for products coming from your suppliers
\n* View: Virtual location used to create a hierarchical structures for your warehouse, aggregating its child locations ; can't directly contain products
\n* Internal Location: Physical locations inside your own warehouses,
\n* Customer Location: Virtual location representing the destination location for products sent to your customers
\n* Inventory: Virtual location serving as counterpart for inventory operations used to correct stock levels (Physical inventories)
\n* Procurement: Virtual location serving as temporary counterpart for procurement operations when the source (supplier or production) is not known yet. This location should be empty when the procurement scheduler has finished running.
\n* Production: Virtual counterpart location for production operations: this location consumes the raw material and produces finished products
""", select = True),
# temporarily removed, as it's unused: 'allocation_method': fields.selection([('fifo', 'FIFO'), ('lifo', 'LIFO'), ('nearest', 'Nearest')], 'Allocation Method', required=True),
'complete_name': fields.function(_complete_name, type='char', size=256, string="Location Name",
store={'stock.location': (_get_sublocations, ['name', 'location_id'], 10)}),
'stock_real': fields.function(_product_value, type='float', string='Real Stock', multi="stock"),
'stock_virtual': fields.function(_product_value, type='float', string='Virtual Stock', multi="stock"),
'location_id': fields.many2one('stock.location', 'Parent Location', select=True, ondelete='cascade'),
'child_ids': fields.one2many('stock.location', 'location_id', 'Contains'),
'chained_journal_id': fields.many2one('stock.journal', 'Chaining Journal',help="Inventory Journal in which the chained move will be written, if the Chaining Type is not Transparent (no journal is used if left empty)"),
'chained_location_id': fields.many2one('stock.location', 'Chained Location If Fixed'),
'chained_location_type': fields.selection([('none', 'None'), ('customer', 'Customer'), ('fixed', 'Fixed Location')],
'Chained Location Type', required=True,
help="Determines whether this location is chained to another location, i.e. any incoming product in this location \n" \
"should next go to the chained location. The chained location is determined according to the type :"\
"\n* None: No chaining at all"\
"\n* Customer: The chained location will be taken from the Customer Location field on the Partner form of the Partner that is specified in the Picking list of the incoming products." \
"\n* Fixed Location: The chained location is taken from the next field: Chained Location if Fixed." \
),
'chained_auto_packing': fields.selection(
[('auto', 'Automatic Move'), ('manual', 'Manual Operation'), ('transparent', 'Automatic No Step Added')],
'Chaining Type',
required=True,
help="This is used only if you select a chained location type.\n" \
"The 'Automatic Move' value will create a stock move after the current one that will be "\
"validated automatically. With 'Manual Operation', the stock move has to be validated "\
"by a worker. With 'Automatic No Step Added', the location is replaced in the original move."
),
'chained_picking_type': fields.selection([('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], 'Shipping Type', help="Shipping Type of the Picking List that will contain the chained move (leave empty to automatically detect the type based on the source and destination locations)."),
'chained_company_id': fields.many2one('res.company', 'Chained Company', help='The company the Picking List containing the chained move will belong to (leave empty to use the default company determination rules'),
'chained_delay': fields.integer('Chaining Lead Time',help="Delay between original move and chained move in days"),
'partner_id': fields.many2one('res.partner', 'Location Address',help="Address of customer or supplier."),
'icon': fields.selection(tools.icons, 'Icon', size=64,help="Icon show in hierarchical tree view"),
'comment': fields.text('Additional Information'),
'posx': fields.integer('Corridor (X)',help="Optional localization details, for information purpose only"),
'posy': fields.integer('Shelves (Y)', help="Optional localization details, for information purpose only"),
'posz': fields.integer('Height (Z)', help="Optional localization details, for information purpose only"),
'parent_left': fields.integer('Left Parent', select=1),
'parent_right': fields.integer('Right Parent', select=1),
'stock_real_value': fields.function(_product_value, type='float', string='Real Stock Value', multi="stock", digits_compute=dp.get_precision('Account')),
'stock_virtual_value': fields.function(_product_value, type='float', string='Virtual Stock Value', multi="stock", digits_compute=dp.get_precision('Account')),
'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this location is shared between all companies'),
'scrap_location': fields.boolean('Scrap Location', help='Check this box to allow using this location to put scrapped/damaged goods.'),
'valuation_in_account_id': fields.many2one('account.account', 'Stock Valuation Account (Incoming)', domain = [('type','=','other')],
help="Used for real-time inventory valuation. When set on a virtual location (non internal type), "
"this account will be used to hold the value of products being moved from an internal location "
"into this location, instead of the generic Stock Output Account set on the product. "
"This has no effect for internal locations."),
'valuation_out_account_id': fields.many2one('account.account', 'Stock Valuation Account (Outgoing)', domain = [('type','=','other')],
help="Used for real-time inventory valuation. When set on a virtual location (non internal type), "
"this account will be used to hold the value of products being moved out of this location "
"and into an internal location, instead of the generic Stock Output Account set on the product. "
"This has no effect for internal locations."),
}
_defaults = {
'active': True,
'usage': 'internal',
'chained_location_type': 'none',
'chained_auto_packing': 'manual',
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location', context=c),
'posx': 0,
'posy': 0,
'posz': 0,
'icon': False,
'scrap_location': False,
}
def chained_location_get(self, cr, uid, location, partner=None, product=None, context=None):
""" Finds chained location
@param location: Location id
@param partner: Partner id
@param product: Product id
@return: List of values
"""
result = None
if location.chained_location_type == 'customer':
if partner:
result = partner.property_stock_customer
else:
loc_id = self.pool['res.partner'].default_get(cr, uid, ['property_stock_customer'], context=context)['property_stock_customer']
result = self.pool['stock.location'].browse(cr, uid, loc_id, context=context)
elif location.chained_location_type == 'fixed':
result = location.chained_location_id
if result:
return result, location.chained_auto_packing, location.chained_delay, location.chained_journal_id and location.chained_journal_id.id or False, location.chained_company_id and location.chained_company_id.id or False, location.chained_picking_type, False
return result
def picking_type_get(self, cr, uid, from_location, to_location, context=None):
""" Gets type of picking.
@param from_location: Source location
@param to_location: Destination location
@return: Location type
"""
result = 'internal'
if (from_location.usage=='internal') and (to_location and to_location.usage in ('customer', 'supplier')):
result = 'out'
elif (from_location.usage in ('supplier', 'customer')) and (to_location.usage == 'internal'):
result = 'in'
return result
def _product_get_all_report(self, cr, uid, ids, product_ids=False, context=None):
return self._product_get_report(cr, uid, ids, product_ids, context, recursive=True)
def _product_get_report(self, cr, uid, ids, product_ids=False,
context=None, recursive=False):
""" Finds the product quantity and price for particular location.
@param product_ids: Ids of product
@param recursive: True or False
@return: Dictionary of values
"""
if context is None:
context = {}
product_obj = self.pool.get('product.product')
# Take the user company and pricetype
context['currency_id'] = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
# To be able to offer recursive or non-recursive reports we need to prevent recursive quantities by default
context['compute_child'] = False
if not product_ids:
product_ids = product_obj.search(cr, uid, [], context={'active_test': False})
products = product_obj.browse(cr, uid, product_ids, context=context)
products_by_uom = {}
products_by_id = {}
for product in products:
products_by_uom.setdefault(product.uom_id.id, [])
products_by_uom[product.uom_id.id].append(product)
products_by_id.setdefault(product.id, [])
products_by_id[product.id] = product
result = {}
result['product'] = []
for id in ids:
quantity_total = 0.0
total_price = 0.0
for uom_id in products_by_uom.keys():
fnc = self._product_get
if recursive:
fnc = self._product_all_get
ctx = context.copy()
ctx['uom'] = uom_id
qty = fnc(cr, uid, id, [x.id for x in products_by_uom[uom_id]],
context=ctx)
for product_id in qty.keys():
if not qty[product_id]:
continue
product = products_by_id[product_id]
quantity_total += qty[product_id]
# Compute based on pricetype
# Choose the right filed standard_price to read
amount_unit = product.price_get('standard_price', context=context)[product.id]
price = qty[product_id] * amount_unit
total_price += price
result['product'].append({
'price': amount_unit,
'prod_name': product.name,
'code': product.default_code, # used by lot_overview_all report!
'variants': product.variants or '',
'uom': product.uom_id.name,
'prod_qty': qty[product_id],
'price_value': price,
})
result['total'] = quantity_total
result['total_price'] = total_price
return result
def _product_get_multi_location(self, cr, uid, ids, product_ids=False, context=None,
states=['done'], what=('in', 'out')):
"""
@param product_ids: Ids of product
@param states: List of states
@param what: Tuple of
@return:
"""
product_obj = self.pool.get('product.product')
if context is None:
context = {}
context.update({
'states': states,
'what': what,
'location': ids
})
return product_obj.get_product_available(cr, uid, product_ids, context=context)
def _product_get(self, cr, uid, id, product_ids=False, context=None, states=None):
"""
@param product_ids:
@param states:
@return:
"""
if states is None:
states = ['done']
ids = id and [id] or []
return self._product_get_multi_location(cr, uid, ids, product_ids, context=context, states=states)
def _product_all_get(self, cr, uid, id, product_ids=False, context=None, states=None):
if states is None:
states = ['done']
# build the list of ids of children of the location given by id
ids = id and [id] or []
location_ids = self.search(cr, uid, [('location_id', 'child_of', ids)])
return self._product_get_multi_location(cr, uid, location_ids, product_ids, context, states)
def _product_virtual_get(self, cr, uid, id, product_ids=False, context=None, states=None):
if states is None:
states = ['done']
return self._product_all_get(cr, uid, id, product_ids, context, ['confirmed', 'waiting', 'assigned', 'done'])
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None, lock=False):
"""
Attempt to find a quantity ``product_qty`` (in the product's default uom or the uom passed in ``context``) of product ``product_id``
in locations with id ``ids`` and their child locations. If ``lock`` is True, the stock.move lines
of product with id ``product_id`` in the searched location will be write-locked using Postgres's
"FOR UPDATE NOWAIT" option until the transaction is committed or rolled back, to prevent reservin
twice the same products.
If ``lock`` is True and the lock cannot be obtained (because another transaction has locked some of
the same stock.move lines), a log line will be output and False will be returned, as if there was
not enough stock.
:param product_id: Id of product to reserve
:param product_qty: Quantity of product to reserve (in the product's default uom or the uom passed in ``context``)
:param lock: if True, the stock.move lines of product with id ``product_id`` in all locations (and children locations) with ``ids`` will
be write-locked using postgres's "FOR UPDATE NOWAIT" option until the transaction is committed or rolled back. This is
to prevent reserving twice the same products.
:param context: optional context dictionary: if a 'uom' key is present it will be used instead of the default product uom to
compute the ``product_qty`` and in the return value.
:return: List of tuples in the form (qty, location_id) with the (partial) quantities that can be taken in each location to
reach the requested product_qty (``qty`` is expressed in the default uom of the product), of False if enough
products could not be found, or the lock could not be obtained (and ``lock`` was True).
"""
result = []
amount = 0.0
if context is None:
context = {}
uom_obj = self.pool.get('product.uom')
uom_rounding = self.pool.get('product.product').browse(cr, uid, product_id, context=context).uom_id.rounding
if context.get('uom'):
uom_rounding = uom_obj.browse(cr, uid, context.get('uom'), context=context).rounding
locations_ids = self.search(cr, uid, [('location_id', 'child_of', ids)])
if locations_ids:
# Fetch only the locations in which this product has ever been processed (in or out)
cr.execute("""SELECT l.id FROM stock_location l WHERE l.id in %s AND
EXISTS (SELECT 1 FROM stock_move m WHERE m.product_id = %s
AND ((state = 'done' AND m.location_dest_id = l.id)
OR (state in ('done','assigned') AND m.location_id = l.id)))
""", (tuple(locations_ids), product_id,))
locations_ids = [i for (i,) in cr.fetchall()]
for id in locations_ids:
if lock:
try:
# Must lock with a separate select query because FOR UPDATE can't be used with
# aggregation/group by's (when individual rows aren't identifiable).
# We use a SAVEPOINT to be able to rollback this part of the transaction without
# failing the whole transaction in case the LOCK cannot be acquired.
cr.execute("SAVEPOINT stock_location_product_reserve")
cr.execute("""SELECT id FROM stock_move
WHERE product_id=%s AND
(
(location_dest_id=%s AND
location_id<>%s AND
state='done')
OR
(location_id=%s AND
location_dest_id<>%s AND
state in ('done', 'assigned'))
)
FOR UPDATE of stock_move NOWAIT""", (product_id, id, id, id, id), log_exceptions=False)
except Exception:
# Here it's likely that the FOR UPDATE NOWAIT failed to get the LOCK,
# so we ROLLBACK to the SAVEPOINT to restore the transaction to its earlier
# state, we return False as if the products were not available, and log it:
cr.execute("ROLLBACK TO stock_location_product_reserve")
_logger.warning("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
_logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
return False
# XXX TODO: rewrite this with one single query, possibly even the quantity conversion
cr.execute("""SELECT product_uom, sum(product_qty) AS product_qty
FROM stock_move
WHERE location_dest_id=%s AND
location_id<>%s AND
product_id=%s AND
state='done'
GROUP BY product_uom
""",
(id, id, product_id))
results = cr.dictfetchall()
cr.execute("""SELECT product_uom,-sum(product_qty) AS product_qty
FROM stock_move
WHERE location_id=%s AND
location_dest_id<>%s AND
product_id=%s AND
state in ('done', 'assigned')
GROUP BY product_uom
""",
(id, id, product_id))
results += cr.dictfetchall()
total = 0.0
results2 = 0.0
for r in results:
amount = uom_obj._compute_qty(cr, uid, r['product_uom'], r['product_qty'], context.get('uom', False))
results2 += amount
total += amount
if total <= 0.0:
continue
amount = results2
compare_qty = float_compare(amount, 0, precision_rounding=uom_rounding)
if compare_qty == 1:
if amount > min(total, product_qty):
amount = min(product_qty, total)
result.append((amount, id))
product_qty -= amount
total -= amount
if product_qty <= 0.0:
return result
if total <= 0.0:
continue
return False
stock_location()
class stock_tracking(osv.osv):
_name = "stock.tracking"
_description = "Packs"
def checksum(sscc):
salt = '31' * 8 + '3'
sum = 0
for sscc_part, salt_part in zip(sscc, salt):
sum += int(sscc_part) * int(salt_part)
return (10 - (sum % 10)) % 10
checksum = staticmethod(checksum)
def make_sscc(self, cr, uid, context=None):
sequence = self.pool.get('ir.sequence').get(cr, uid, 'stock.lot.tracking')
try:
return sequence + str(self.checksum(sequence))
except Exception:
return sequence
_columns = {
'name': fields.char('Pack Reference', size=64, required=True, select=True, help="By default, the pack reference is generated following the sscc standard. (Serial number + 1 check digit)"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a pack without deleting it."),
'serial': fields.char('Additional Reference', size=64, select=True, help="Other reference or serial number"),
'move_ids': fields.one2many('stock.move', 'tracking_id', 'Moves for this pack', readonly=True),
'date': fields.datetime('Creation Date', required=True),
}
_defaults = {
'active': 1,
'name': make_sscc,
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = self.search(cr, user, [('serial', '=', name)]+ args, limit=limit, context=context)
ids += self.search(cr, user, [('name', operator, name)]+ args, limit=limit, context=context)
return self.name_get(cr, user, ids, context)
def name_get(self, cr, uid, ids, context=None):
"""Append the serial to the name"""
if not len(ids):
return []
res = [ (r['id'], r['serial'] and '%s [%s]' % (r['name'], r['serial'])
or r['name'] )
for r in self.read(cr, uid, ids, ['name', 'serial'],
context=context) ]
return res
def unlink(self, cr, uid, ids, context=None):
raise osv.except_osv(_('Error!'), _('You cannot remove a lot line.'))
def action_traceability(self, cr, uid, ids, context=None):
""" It traces the information of a product
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return: A dictionary of values
"""
return self.pool.get('action.traceability').action_traceability(cr,uid,ids,context)
stock_tracking()
#----------------------------------------------------------
# Stock Picking
#----------------------------------------------------------
class stock_picking(osv.osv):
_name = "stock.picking"
_inherit = ['mail.thread']
_description = "Picking List"
_order = "id desc"
def _set_maximum_date(self, cr, uid, ids, name, value, arg, context=None):
""" Calculates planned date if it is greater than 'value'.
@param name: Name of field
@param value: Value of field
@param arg: User defined argument
@return: True or False
"""
if not value:
return False
if isinstance(ids, (int, long)):
ids = [ids]
for pick in self.browse(cr, uid, ids, context=context):
sql_str = """update stock_move set
date_expected='%s'
where
picking_id=%d """ % (value, pick.id)
if pick.max_date:
sql_str += " and (date_expected='" + pick.max_date + "')"
cr.execute(sql_str)
return True
def _set_minimum_date(self, cr, uid, ids, name, value, arg, context=None):
""" Calculates planned date if it is less than 'value'.
@param name: Name of field
@param value: Value of field
@param arg: User defined argument
@return: True or False
"""
if not value:
return False
if isinstance(ids, (int, long)):
ids = [ids]
for pick in self.browse(cr, uid, ids, context=context):
sql_str = """update stock_move set
date_expected='%s'
where
picking_id=%s """ % (value, pick.id)
if pick.min_date:
sql_str += " and (date_expected='" + pick.min_date + "')"
cr.execute(sql_str)
return True
def get_min_max_date(self, cr, uid, ids, field_name, arg, context=None):
""" Finds minimum and maximum dates for picking.
@return: Dictionary of values
"""
res = {}
for id in ids:
res[id] = {'min_date': False, 'max_date': False}
if not ids:
return res
cr.execute("""select
picking_id,
min(date_expected),
max(date_expected)
from
stock_move
where
picking_id IN %s
group by
picking_id""",(tuple(ids),))
for pick, dt1, dt2 in cr.fetchall():
res[pick]['min_date'] = dt1
res[pick]['max_date'] = dt2
return res
def create(self, cr, user, vals, context=None):
if ('name' not in vals) or (vals.get('name')=='/'):
seq_obj_name = self._name
vals['name'] = self.pool.get('ir.sequence').get(cr, user, seq_obj_name)
new_id = super(stock_picking, self).create(cr, user, vals, context)
return new_id
_columns = {
'name': fields.char('Reference', size=64, select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'origin': fields.char('Source Document', size=64, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Reference of the document", select=True),
'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'type': fields.selection([('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], 'Shipping Type', required=True, select=True, help="Shipping type specify, goods coming in or going out."),
'note': fields.text('Notes', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'stock_journal_id': fields.many2one('stock.journal','Stock Journal', select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'location_id': fields.many2one('stock.location', 'Location', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Keep empty if you produce at the location where the finished products are needed." \
"Set a location if you produce at a fixed location. This can be a partner location " \
"if you subcontract the manufacturing operations.", select=True),
'location_dest_id': fields.many2one('stock.location', 'Dest. Location', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Location where the system will stock the finished products.", select=True),
'move_type': fields.selection([('direct', 'Partial'), ('one', 'All at once')], 'Delivery Method', required=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="It specifies goods to be deliver partially or all at once"),
'state': fields.selection([
('draft', 'Draft'),
('cancel', 'Cancelled'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Transfer'),
('done', 'Transferred'),
], 'Status', readonly=True, select=True, track_visibility='onchange', help="""
* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Transfer: products reserved, simply waiting for confirmation.\n
* Transferred: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""
),
'min_date': fields.function(get_min_max_date, fnct_inv=_set_minimum_date, multi="min_max_date",
store=True, type='datetime', string='Scheduled Time', select=1, help="Scheduled time for the shipment to be processed"),
'date': fields.datetime('Creation Date', help="Creation date, usually the time of the order.", select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'max_date': fields.function(get_min_max_date, fnct_inv=_set_maximum_date, multi="min_max_date",
store=True, type='datetime', string='Max. Expected Date', select=2),
'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}),
'product_id': fields.related('move_lines', 'product_id', type='many2one', relation='product.product', string='Product'),
'auto_picking': fields.boolean('Auto-Picking', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'partner_id': fields.many2one('res.partner', 'Partner', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'invoice_state': fields.selection([
("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")], "Invoice Control",
select=True, required=True, readonly=True, track_visibility='onchange', states={'draft': [('readonly', False)]}),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
}
_defaults = {
'name': lambda self, cr, uid, context: '/',
'state': 'draft',
'move_type': 'direct',
'type': 'internal',
'invoice_state': 'none',
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.picking', context=c)
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Reference must be unique per Company!'),
]
def action_process(self, cr, uid, ids, context=None):
if context is None:
context = {}
"""Open the partial picking wizard"""
context.update({
'active_model': self._name,
'active_ids': ids,
'active_id': len(ids) and ids[0] or False
})
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'stock.partial.picking',
'type': 'ir.actions.act_window',
'target': 'new',
'context': context,
'nodestroy': True,
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
picking_obj = self.browse(cr, uid, id, context=context)
move_obj = self.pool.get('stock.move')
if ('name' not in default) or (picking_obj.name == '/'):
seq_obj_name = 'stock.picking.' + picking_obj.type
default['name'] = self.pool.get('ir.sequence').get(cr, uid, seq_obj_name)
default['origin'] = ''
default['backorder_id'] = False
if 'invoice_state' not in default and picking_obj.invoice_state == 'invoiced':
default['invoice_state'] = '2binvoiced'
res = super(stock_picking, self).copy(cr, uid, id, default, context)
if res:
picking_obj = self.browse(cr, uid, res, context=context)
for move in picking_obj.move_lines:
move_obj.write(cr, uid, [move.id], {'tracking_id': False, 'prodlot_id': False, 'move_history_ids2': [(6, 0, [])], 'move_history_ids': [(6, 0, [])]})
return res
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
if view_type == 'form' and not view_id:
mod_obj = self.pool.get('ir.model.data')
if self._name == "stock.picking.in":
model, view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_in_form')
if self._name == "stock.picking.out":
model, view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_out_form')
return super(stock_picking, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
return {}
def action_explode(self, cr, uid, moves, context=None):
"""Hook to allow other modules to split the moves of a picking."""
return moves
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms picking.
@return: True
"""
pickings = self.browse(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'state': 'confirmed'})
todo = []
for picking in pickings:
for r in picking.move_lines:
if r.state == 'draft':
todo.append(r.id)
todo = self.action_explode(cr, uid, todo, context)
if len(todo):
self.pool.get('stock.move').action_confirm(cr, uid, todo, context=context)
return True
def test_auto_picking(self, cr, uid, ids):
# TODO: Check locations to see if in the same location ?
return True
def action_assign(self, cr, uid, ids, *args):
""" Changes state of picking to available if all moves are confirmed.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
if pick.state == 'draft':
wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_confirm', cr)
move_ids = [x.id for x in pick.move_lines if x.state == 'confirmed']
if not move_ids:
raise osv.except_osv(_('Warning!'),_('Not enough stock, unable to reserve the products.'))
self.pool.get('stock.move').action_assign(cr, uid, move_ids)
return True
def force_assign(self, cr, uid, ids, *args):
""" Changes state of picking to available if moves are confirmed or waiting.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
move_ids = [x.id for x in pick.move_lines if x.state in ['confirmed','waiting']]
self.pool.get('stock.move').force_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return True
def draft_force_assign(self, cr, uid, ids, *args):
""" Confirms picking directly from draft state.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
if not pick.move_lines:
raise osv.except_osv(_('Error!'),_('You cannot process picking without stock moves.'))
wf_service.trg_validate(uid, 'stock.picking', pick.id,
'button_confirm', cr)
return True
def draft_validate(self, cr, uid, ids, context=None):
""" Validates picking directly from draft state.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
self.draft_force_assign(cr, uid, ids)
for pick in self.browse(cr, uid, ids, context=context):
move_ids = [x.id for x in pick.move_lines]
self.pool.get('stock.move').force_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return self.action_process(
cr, uid, ids, context=context)
def cancel_assign(self, cr, uid, ids, *args):
""" Cancels picking and moves.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
move_ids = [x.id for x in pick.move_lines]
self.pool.get('stock.move').cancel_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return True
def action_assign_wkf(self, cr, uid, ids, context=None):
""" Changes picking state to assigned.
@return: True
"""
self.write(cr, uid, ids, {'state': 'assigned'})
return True
def test_finished(self, cr, uid, ids):
""" Tests whether the move is in done or cancel state or not.
@return: True or False
"""
move_ids = self.pool.get('stock.move').search(cr, uid, [('picking_id', 'in', ids)])
for move in self.pool.get('stock.move').browse(cr, uid, move_ids):
if move.state not in ('done', 'cancel'):
if move.product_qty != 0.0:
return False
else:
move.write({'state': 'done'})
return True
def test_assigned(self, cr, uid, ids):
""" Tests whether the move is in assigned state or not.
@return: True or False
"""
#TOFIX: assignment of move lines should be call before testing assigment otherwise picking never gone in assign state
ok = True
for pick in self.browse(cr, uid, ids):
mt = pick.move_type
# incomming shipments are always set as available if they aren't chained
if pick.type == 'in':
if all([x.state != 'waiting' for x in pick.move_lines]):
return True
for move in pick.move_lines:
if (move.state in ('confirmed', 'draft')) and (mt == 'one'):
return False
if (mt == 'direct') and (move.state == 'assigned') and (move.product_qty):
return True
ok = ok and (move.state in ('cancel', 'done', 'assigned'))
return ok
def action_cancel(self, cr, uid, ids, context=None):
""" Changes picking state to cancel.
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
ids2 = [move.id for move in pick.move_lines]
self.pool.get('stock.move').action_cancel(cr, uid, ids2, context)
self.write(cr, uid, ids, {'state': 'cancel', 'invoice_state': 'none'})
return True
#
# TODO: change and create a move if not parents
#
def action_done(self, cr, uid, ids, context=None):
"""Changes picking state to done.
This method is called at the end of the workflow by the activity "done".
@return: True
"""
self.write(cr, uid, ids, {'state': 'done', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S')})
return True
def action_move(self, cr, uid, ids, context=None):
"""Process the Stock Moves of the Picking
This method is called by the workflow by the activity "move".
Normally that happens when the signal button_done is received (button
"Done" pressed on a Picking view).
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
todo = []
for move in pick.move_lines:
if move.state == 'draft':
self.pool.get('stock.move').action_confirm(cr, uid, [move.id],
context=context)
todo.append(move.id)
elif move.state in ('assigned','confirmed'):
todo.append(move.id)
if len(todo):
self.pool.get('stock.move').action_done(cr, uid, todo,
context=context)
return True
def get_currency_id(self, cr, uid, picking):
return False
def _get_partner_to_invoice(self, cr, uid, picking, context=None):
""" Gets the partner that will be invoiced
Note that this function is inherited in the sale and purchase modules
@param picking: object of the picking for which we are selecting the partner to invoice
@return: object of the partner to invoice
"""
return picking.partner_id and picking.partner_id.id
def _get_comment_invoice(self, cr, uid, picking):
"""
@return: comment string for invoice
"""
return picking.note or ''
def _get_price_unit_invoice(self, cr, uid, move_line, type, context=None):
""" Gets price unit for invoice
@param move_line: Stock move lines
@param type: Type of invoice
@return: The price unit for the move line
"""
if context is None:
context = {}
if type in ('in_invoice', 'in_refund'):
# Take the user company and pricetype
context['currency_id'] = move_line.company_id.currency_id.id
amount_unit = move_line.product_id.price_get('standard_price', context=context)[move_line.product_id.id]
return amount_unit
else:
return move_line.product_id.list_price
def _get_discount_invoice(self, cr, uid, move_line):
'''Return the discount for the move line'''
return 0.0
def _get_taxes_invoice(self, cr, uid, move_line, type):
""" Gets taxes on invoice
@param move_line: Stock move lines
@param type: Type of invoice
@return: Taxes Ids for the move line
"""
if type in ('in_invoice', 'in_refund'):
taxes = move_line.product_id.supplier_taxes_id
else:
taxes = move_line.product_id.taxes_id
if move_line.picking_id and move_line.picking_id.partner_id and move_line.picking_id.partner_id.id:
return self.pool.get('account.fiscal.position').map_tax(
cr,
uid,
move_line.picking_id.partner_id.property_account_position,
taxes
)
else:
return map(lambda x: x.id, taxes)
def _get_account_analytic_invoice(self, cr, uid, picking, move_line):
return False
def _invoice_line_hook(self, cr, uid, move_line, invoice_line_id):
'''Call after the creation of the invoice line'''
return
def _invoice_hook(self, cr, uid, picking, invoice_id):
'''Call after the creation of the invoice'''
return
def _get_invoice_type(self, pick):
src_usage = dest_usage = None
inv_type = None
if pick.invoice_state == '2binvoiced':
if pick.move_lines:
src_usage = pick.move_lines[0].location_id.usage
dest_usage = pick.move_lines[0].location_dest_id.usage
if pick.type == 'out' and dest_usage == 'supplier':
inv_type = 'in_refund'
elif pick.type == 'out' and dest_usage == 'customer':
inv_type = 'out_invoice'
elif pick.type == 'in' and src_usage == 'supplier':
inv_type = 'in_invoice'
elif pick.type == 'in' and src_usage == 'customer':
inv_type = 'out_refund'
else:
inv_type = 'out_invoice'
return inv_type
def _prepare_invoice_group(self, cr, uid, picking, partner, invoice, context=None):
""" Builds the dict for grouped invoices
@param picking: picking object
@param partner: object of the partner to invoice (not used here, but may be usefull if this function is inherited)
@param invoice: object of the invoice that we are updating
@return: dict that will be used to update the invoice
"""
comment = self._get_comment_invoice(cr, uid, picking)
return {
'name': (invoice.name or '') + ', ' + (picking.name or ''),
'origin': (invoice.origin or '') + ', ' + (picking.name or '') + (picking.origin and (':' + picking.origin) or ''),
'comment': (comment and (invoice.comment and invoice.comment + "\n" + comment or comment)) or (invoice.comment and invoice.comment or ''),
'date_invoice': context.get('date_inv', False),
'user_id': uid,
}
def _prepare_invoice(self, cr, uid, picking, partner, inv_type, journal_id, context=None):
""" Builds the dict containing the values for the invoice
@param picking: picking object
@param partner: object of the partner to invoice
@param inv_type: type of the invoice ('out_invoice', 'in_invoice', ...)
@param journal_id: ID of the accounting journal
@return: dict that will be used to create the invoice object
"""
if isinstance(partner, int):
partner = self.pool.get('res.partner').browse(cr, uid, partner, context=context)
if inv_type in ('out_invoice', 'out_refund'):
account_id = partner.property_account_receivable.id
payment_term = partner.property_payment_term.id or False
else:
account_id = partner.property_account_payable.id
payment_term = partner.property_supplier_payment_term.id or False
comment = self._get_comment_invoice(cr, uid, picking)
invoice_vals = {
'name': picking.name,
'origin': (picking.name or '') + (picking.origin and (':' + picking.origin) or ''),
'type': inv_type,
'account_id': account_id,
'partner_id': partner.id,
'comment': comment,
'payment_term': payment_term,
'fiscal_position': partner.property_account_position.id,
'date_invoice': context.get('date_inv', False),
'company_id': picking.company_id.id,
'user_id': uid,
}
cur_id = self.get_currency_id(cr, uid, picking)
if cur_id:
invoice_vals['currency_id'] = cur_id
if journal_id:
invoice_vals['journal_id'] = journal_id
return invoice_vals
def _prepare_invoice_line(self, cr, uid, group, picking, move_line, invoice_id,
invoice_vals, context=None):
""" Builds the dict containing the values for the invoice line
@param group: True or False
@param picking: picking object
@param: move_line: move_line object
@param: invoice_id: ID of the related invoice
@param: invoice_vals: dict used to created the invoice
@return: dict that will be used to create the invoice line
"""
if group:
name = (picking.name or '') + '-' + move_line.name
else:
name = move_line.name
origin = move_line.picking_id.name or ''
if move_line.picking_id.origin:
origin += ':' + move_line.picking_id.origin
if invoice_vals['type'] in ('out_invoice', 'out_refund'):
account_id = move_line.product_id.property_account_income.id
if not account_id:
account_id = move_line.product_id.categ_id.\
property_account_income_categ.id
else:
account_id = move_line.product_id.property_account_expense.id
if not account_id:
account_id = move_line.product_id.categ_id.\
property_account_expense_categ.id
if invoice_vals['fiscal_position']:
fp_obj = self.pool.get('account.fiscal.position')
fiscal_position = fp_obj.browse(cr, uid, invoice_vals['fiscal_position'], context=context)
account_id = fp_obj.map_account(cr, uid, fiscal_position, account_id)
# set UoS if it's a sale and the picking doesn't have one
uos_id = move_line.product_uos and move_line.product_uos.id or False
if not uos_id and invoice_vals['type'] in ('out_invoice', 'out_refund'):
uos_id = move_line.product_uom.id
return {
'name': name,
'origin': origin,
'invoice_id': invoice_id,
'uos_id': uos_id,
'product_id': move_line.product_id.id,
'account_id': account_id,
'price_unit': self._get_price_unit_invoice(cr, uid, move_line, invoice_vals['type']),
'discount': self._get_discount_invoice(cr, uid, move_line),
'quantity': move_line.product_uos_qty or move_line.product_qty,
'invoice_line_tax_id': [(6, 0, self._get_taxes_invoice(cr, uid, move_line, invoice_vals['type']))],
'account_analytic_id': self._get_account_analytic_invoice(cr, uid, picking, move_line),
}
def action_invoice_create(self, cr, uid, ids, journal_id=False,
group=False, type='out_invoice', context=None):
""" Creates invoice based on the invoice state selected for picking.
@param journal_id: Id of journal
@param group: Whether to create a group invoice or not
@param type: Type invoice to be created
@return: Ids of created invoices for the pickings
"""
if context is None:
context = {}
invoice_obj = self.pool.get('account.invoice')
invoice_line_obj = self.pool.get('account.invoice.line')
partner_obj = self.pool.get('res.partner')
invoices_group = {}
res = {}
inv_type = type
for picking in self.browse(cr, uid, ids, context=context):
if picking.invoice_state != '2binvoiced':
continue
partner = self._get_partner_to_invoice(cr, uid, picking, context=context)
if isinstance(partner, int):
partner = partner_obj.browse(cr, uid, [partner], context=context)[0]
if not partner:
raise osv.except_osv(_('Error, no partner!'),
_('Please put a partner on the picking list if you want to generate invoice.'))
if not inv_type:
inv_type = self._get_invoice_type(picking)
if group and partner.id in invoices_group:
invoice_id = invoices_group[partner.id]
invoice = invoice_obj.browse(cr, uid, invoice_id)
invoice_vals_group = self._prepare_invoice_group(cr, uid, picking, partner, invoice, context=context)
invoice_obj.write(cr, uid, [invoice_id], invoice_vals_group, context=context)
else:
invoice_vals = self._prepare_invoice(cr, uid, picking, partner, inv_type, journal_id, context=context)
invoice_id = invoice_obj.create(cr, uid, invoice_vals, context=context)
invoices_group[partner.id] = invoice_id
res[picking.id] = invoice_id
for move_line in picking.move_lines:
if move_line.state == 'cancel':
continue
if move_line.scrapped:
# do no invoice scrapped products
continue
vals = self._prepare_invoice_line(cr, uid, group, picking, move_line,
invoice_id, invoice_vals, context=context)
if vals:
invoice_line_id = invoice_line_obj.create(cr, uid, vals, context=context)
self._invoice_line_hook(cr, uid, move_line, invoice_line_id)
invoice_obj.button_compute(cr, uid, [invoice_id], context=context,
set_total=(inv_type in ('in_invoice', 'in_refund')))
self.write(cr, uid, [picking.id], {
'invoice_state': 'invoiced',
}, context=context)
self._invoice_hook(cr, uid, picking, invoice_id)
self.write(cr, uid, res.keys(), {
'invoice_state': 'invoiced',
}, context=context)
return res
def test_done(self, cr, uid, ids, context=None):
""" Test whether the move lines are done or not.
@return: True or False
"""
ok = False
for pick in self.browse(cr, uid, ids, context=context):
if not pick.move_lines:
return True
for move in pick.move_lines:
if move.state not in ('cancel','done'):
return False
if move.state=='done':
ok = True
return ok
def test_cancel(self, cr, uid, ids, context=None):
""" Test whether the move lines are canceled or not.
@return: True or False
"""
for pick in self.browse(cr, uid, ids, context=context):
for move in pick.move_lines:
if move.state not in ('cancel',):
return False
return True
def allow_cancel(self, cr, uid, ids, context=None):
for pick in self.browse(cr, uid, ids, context=context):
if not pick.move_lines:
return True
for move in pick.move_lines:
if move.state == 'done':
raise osv.except_osv(_('Error!'), _('You cannot cancel the picking as some moves have been done. You should cancel the picking lines.'))
return True
def unlink(self, cr, uid, ids, context=None):
move_obj = self.pool.get('stock.move')
if context is None:
context = {}
for pick in self.browse(cr, uid, ids, context=context):
if pick.state in ['done','cancel']:
raise osv.except_osv(_('Error!'), _('You cannot remove the picking which is in %s state!')%(pick.state,))
else:
ids2 = [move.id for move in pick.move_lines]
ctx = context.copy()
ctx.update({'call_unlink':True})
if pick.state != 'draft':
#Cancelling the move in order to affect Virtual stock of product
move_obj.action_cancel(cr, uid, ids2, ctx)
#Removing the move
move_obj.unlink(cr, uid, ids2, ctx)
return super(stock_picking, self).unlink(cr, uid, ids, context=context)
# FIXME: needs refactoring, this code is partially duplicated in stock_move.do_partial()!
def do_partial(self, cr, uid, ids, partial_datas, context=None):
""" Makes partial picking and moves done.
@param partial_datas : Dictionary containing details of partial picking
like partner_id, partner_id, delivery_date,
delivery moves with product_id, product_qty, uom
@return: Dictionary of values
"""
if context is None:
context = {}
else:
context = dict(context)
res = {}
move_obj = self.pool.get('stock.move')
product_obj = self.pool.get('product.product')
currency_obj = self.pool.get('res.currency')
uom_obj = self.pool.get('product.uom')
sequence_obj = self.pool.get('ir.sequence')
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids, context=context):
new_picking = None
complete, too_many, too_few = [], [], []
move_product_qty, prodlot_ids, product_avail, partial_qty, product_uoms = {}, {}, {}, {}, {}
for move in pick.move_lines:
if move.state in ('done', 'cancel'):
continue
partial_data = partial_datas.get('move%s'%(move.id), {})
product_qty = partial_data.get('product_qty',0.0)
move_product_qty[move.id] = product_qty
product_uom = partial_data.get('product_uom',False)
product_price = partial_data.get('product_price',0.0)
product_currency = partial_data.get('product_currency',False)
prodlot_id = partial_data.get('prodlot_id')
prodlot_ids[move.id] = prodlot_id
product_uoms[move.id] = product_uom
partial_qty[move.id] = uom_obj._compute_qty(cr, uid, product_uoms[move.id], product_qty, move.product_uom.id)
if move.product_qty == partial_qty[move.id]:
complete.append(move)
elif move.product_qty > partial_qty[move.id]:
too_few.append(move)
else:
too_many.append(move)
# Average price computation
if (pick.type == 'in') and (move.product_id.cost_method == 'average'):
product = product_obj.browse(cr, uid, move.product_id.id)
move_currency_id = move.company_id.currency_id.id
context['currency_id'] = move_currency_id
qty = uom_obj._compute_qty(cr, uid, product_uom, product_qty, product.uom_id.id)
if product.id not in product_avail:
# keep track of stock on hand including processed lines not yet marked as done
product_avail[product.id] = product.qty_available
if qty > 0:
new_price = currency_obj.compute(cr, uid, product_currency,
move_currency_id, product_price, round=False)
new_price = uom_obj._compute_price(cr, uid, product_uom, new_price,
product.uom_id.id)
if product_avail[product.id] <= 0:
product_avail[product.id] = 0
new_std_price = new_price
else:
# Get the standard price
amount_unit = product.price_get('standard_price', context=context)[product.id]
new_std_price = ((amount_unit * product_avail[product.id])\
+ (new_price * qty))/(product_avail[product.id] + qty)
# Write the field according to price type field
product_obj.write(cr, uid, [product.id], {'standard_price': new_std_price})
# Record the values that were chosen in the wizard, so they can be
# used for inventory valuation if real-time valuation is enabled.
move_obj.write(cr, uid, [move.id],
{'price_unit': product_price,
'price_currency_id': product_currency})
product_avail[product.id] += qty
for move in too_few:
product_qty = move_product_qty[move.id]
if not new_picking:
new_picking_name = pick.name
self.write(cr, uid, [pick.id],
{'name': sequence_obj.get(cr, uid,
'stock.picking.%s'%(pick.type)),
})
new_picking = self.copy(cr, uid, pick.id,
{
'name': new_picking_name,
'move_lines' : [],
'state':'draft',
})
if product_qty != 0:
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty, #TODO: put correct uos_qty
'picking_id' : new_picking,
'state': 'assigned',
'move_dest_id': False,
'price_unit': move.price_unit,
'product_uom': product_uoms[move.id]
}
prodlot_id = prodlot_ids[move.id]
if prodlot_id:
defaults.update(prodlot_id=prodlot_id)
move_obj.copy(cr, uid, move.id, defaults)
move_obj.write(cr, uid, [move.id],
{
'product_qty': move.product_qty - partial_qty[move.id],
'product_uos_qty': move.product_qty - partial_qty[move.id], #TODO: put correct uos_qty
'prodlot_id': False,
'tracking_id': False,
})
if new_picking:
move_obj.write(cr, uid, [c.id for c in complete], {'picking_id': new_picking})
for move in complete:
defaults = {'product_uom': product_uoms[move.id], 'product_qty': move_product_qty[move.id]}
if prodlot_ids.get(move.id):
defaults.update({'prodlot_id': prodlot_ids[move.id]})
move_obj.write(cr, uid, [move.id], defaults)
for move in too_many:
product_qty = move_product_qty[move.id]
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty, #TODO: put correct uos_qty
'product_uom': product_uoms[move.id]
}
prodlot_id = prodlot_ids.get(move.id)
if prodlot_ids.get(move.id):
defaults.update(prodlot_id=prodlot_id)
if new_picking:
defaults.update(picking_id=new_picking)
move_obj.write(cr, uid, [move.id], defaults)
# At first we confirm the new picking (if necessary)
if new_picking:
wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_confirm', cr)
# Then we finish the good picking
self.write(cr, uid, [pick.id], {'backorder_id': new_picking})
self.action_move(cr, uid, [new_picking], context=context)
wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_done', cr)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
delivered_pack_id = pick.id
back_order_name = self.browse(cr, uid, delivered_pack_id, context=context).name
self.message_post(cr, uid, new_picking, body=_("Back order <em>%s</em> has been <b>created</b>.") % (back_order_name), context=context)
else:
self.action_move(cr, uid, [pick.id], context=context)
wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_done', cr)
delivered_pack_id = pick.id
delivered_pack = self.browse(cr, uid, delivered_pack_id, context=context)
res[pick.id] = {'delivered_picking': delivered_pack.id or False}
return res
# views associated to each picking type
_VIEW_LIST = {
'out': 'view_picking_out_form',
'in': 'view_picking_in_form',
'internal': 'view_picking_form',
}
def _get_view_id(self, cr, uid, type):
"""Get the view id suiting the given type
@param type: the picking type as a string
@return: view i, or False if no view found
"""
res = self.pool.get('ir.model.data').get_object_reference(cr, uid,
'stock', self._VIEW_LIST.get(type, 'view_picking_form'))
return res and res[1] or False
class stock_production_lot(osv.osv):
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
reads = self.read(cr, uid, ids, ['name', 'prefix', 'ref'], context)
res = []
for record in reads:
name = record['name']
prefix = record['prefix']
if prefix:
name = prefix + '/' + name
if record['ref']:
name = '%s [%s]' % (name, record['ref'])
res.append((record['id'], name))
return res
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100):
args = args or []
ids = []
if name:
ids = self.search(cr, uid, [('prefix', '=', name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context)
else:
ids = self.search(cr, uid, args, limit=limit, context=context)
return self.name_get(cr, uid, ids, context)
_name = 'stock.production.lot'
_description = 'Serial Number'
def _get_stock(self, cr, uid, ids, field_name, arg, context=None):
""" Gets stock of products for locations
@return: Dictionary of values
"""
if context is None:
context = {}
if 'location_id' not in context:
locations = self.pool.get('stock.location').search(cr, uid, [('usage', '=', 'internal')], context=context)
else:
locations = context['location_id'] and [context['location_id']] or []
if isinstance(ids, (int, long)):
ids = [ids]
res = {}.fromkeys(ids, 0.0)
if locations:
cr.execute('''select
prodlot_id,
sum(qty)
from
stock_report_prodlots
where
location_id IN %s and prodlot_id IN %s group by prodlot_id''',(tuple(locations),tuple(ids),))
res.update(dict(cr.fetchall()))
return res
def _stock_search(self, cr, uid, obj, name, args, context=None):
""" Searches Ids of products
@return: Ids of locations
"""
locations = self.pool.get('stock.location').search(cr, uid, [('usage', '=', 'internal')])
cr.execute('''select
prodlot_id,
sum(qty)
from
stock_report_prodlots
where
location_id IN %s group by prodlot_id
having sum(qty) '''+ str(args[0][1]) + str(args[0][2]),(tuple(locations),))
res = cr.fetchall()
ids = [('id', 'in', map(lambda x: x[0], res))]
return ids
_columns = {
'name': fields.char('Serial Number', size=64, required=True, help="Unique Serial Number, will be displayed as: PREFIX/SERIAL [INT_REF]"),
'ref': fields.char('Internal Reference', size=256, help="Internal reference number in case it differs from the manufacturer's serial number"),
'prefix': fields.char('Prefix', size=64, help="Optional prefix to prepend when displaying this serial number: PREFIX/SERIAL [INT_REF]"),
'product_id': fields.many2one('product.product', 'Product', required=True, domain=[('type', '<>', 'service')]),
'date': fields.datetime('Creation Date', required=True),
'stock_available': fields.function(_get_stock, fnct_search=_stock_search, type="float", string="Available", select=True,
help="Current quantity of products with this Serial Number available in company warehouses",
digits_compute=dp.get_precision('Product Unit of Measure')),
'revisions': fields.one2many('stock.production.lot.revision', 'lot_id', 'Revisions'),
'company_id': fields.many2one('res.company', 'Company', select=True),
'move_ids': fields.one2many('stock.move', 'prodlot_id', 'Moves for this serial number', readonly=True),
}
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'),
'product_id': lambda x, y, z, c: c.get('product_id', False),
}
_sql_constraints = [
('name_ref_uniq', 'unique (name, ref)', 'The combination of Serial Number and internal reference must be unique !'),
]
def action_traceability(self, cr, uid, ids, context=None):
""" It traces the information of a product
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return: A dictionary of values
"""
value=self.pool.get('action.traceability').action_traceability(cr,uid,ids,context)
return value
def copy(self, cr, uid, id, default=None, context=None):
context = context or {}
default = default and default.copy() or {}
default.update(date=time.strftime('%Y-%m-%d %H:%M:%S'), move_ids=[])
return super(stock_production_lot, self).copy(cr, uid, id, default=default, context=context)
stock_production_lot()
class stock_production_lot_revision(osv.osv):
_name = 'stock.production.lot.revision'
_description = 'Serial Number Revision'
_columns = {
'name': fields.char('Revision Name', size=64, required=True),
'description': fields.text('Description'),
'date': fields.date('Revision Date'),
'indice': fields.char('Revision Number', size=16),
'author_id': fields.many2one('res.users', 'Author'),
'lot_id': fields.many2one('stock.production.lot', 'Serial Number', select=True, ondelete='cascade'),
'company_id': fields.related('lot_id','company_id',type='many2one',relation='res.company',string='Company', store=True, readonly=True),
}
_defaults = {
'author_id': lambda x, y, z, c: z,
'date': fields.date.context_today,
}
stock_production_lot_revision()
# ----------------------------------------------------
# Move
# ----------------------------------------------------
#
# Fields:
# location_dest_id is only used for predicting futur stocks
#
class stock_move(osv.osv):
def _getSSCC(self, cr, uid, context=None):
cr.execute('select id from stock_tracking where create_uid=%s order by id desc limit 1', (uid,))
res = cr.fetchone()
return (res and res[0]) or False
_name = "stock.move"
_description = "Stock Move"
_order = 'date_expected desc, id'
_log_create = False
def action_partial_move(self, cr, uid, ids, context=None):
if context is None: context = {}
if context.get('active_model') != self._name:
context.update(active_ids=ids, active_model=self._name)
partial_id = self.pool.get("stock.partial.move").create(
cr, uid, {}, context=context)
return {
'name':_("Products to Process"),
'view_mode': 'form',
'view_id': False,
'view_type': 'form',
'res_model': 'stock.partial.move',
'res_id': partial_id,
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'domain': '[]',
'context': context
}
def name_get(self, cr, uid, ids, context=None):
res = []
for line in self.browse(cr, uid, ids, context=context):
name = line.location_id.name+' > '+line.location_dest_id.name
# optional prefixes
if line.product_id.code:
name = line.product_id.code + ': ' + name
if line.picking_id.origin:
name = line.picking_id.origin + '/ ' + name
res.append((line.id, name))
return res
def _check_tracking(self, cr, uid, ids, context=None):
""" Checks if serial number is assigned to stock move or not.
@return: True or False
"""
for move in self.browse(cr, uid, ids, context=context):
if not move.prodlot_id and \
(move.state == 'done' and \
( \
(move.product_id.track_production and move.location_id.usage == 'production') or \
(move.product_id.track_production and move.location_dest_id.usage == 'production') or \
(move.product_id.track_incoming and move.location_id.usage == 'supplier') or \
(move.product_id.track_outgoing and move.location_dest_id.usage == 'customer') or \
(move.product_id.track_incoming and move.location_id.usage == 'inventory') \
)):
return False
return True
def _check_product_lot(self, cr, uid, ids, context=None):
""" Checks whether move is done or not and production lot is assigned to that move.
@return: True or False
"""
for move in self.browse(cr, uid, ids, context=context):
if move.prodlot_id and move.state == 'done' and (move.prodlot_id.product_id.id != move.product_id.id):
return False
return True
_columns = {
'name': fields.char('Description', required=True, select=True),
'priority': fields.selection([('0', 'Not urgent'), ('1', 'Urgent')], 'Priority'),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
'date': fields.datetime('Date', required=True, select=True, help="Move date: scheduled date until move is done, then date of actual move processing", states={'done': [('readonly', True)]}),
'date_expected': fields.datetime('Scheduled Date', states={'done': [('readonly', True)]},required=True, select=True, help="Scheduled date for the processing of this move"),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True, domain=[('type','<>','service')],states={'done': [('readonly', True)]}),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'),
required=True,states={'done': [('readonly', True)]},
help="This is the quantity of products from an inventory "
"point of view. For moves in the state 'done', this is the "
"quantity of products that were actually moved. For other "
"moves, this is the quantity of product that is planned to "
"be moved. Lowering this quantity does not generate a "
"backorder. Changing this quantity on assigned moves affects "
"the product reservation, and should be done with care."
),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', required=True,states={'done': [('readonly', True)]}),
'product_uos_qty': fields.float('Quantity (UOS)', digits_compute=dp.get_precision('Product Unit of Measure'), states={'done': [('readonly', True)]}),
'product_uos': fields.many2one('product.uom', 'Product UOS', states={'done': [('readonly', True)]}),
'product_packaging': fields.many2one('product.packaging', 'Packaging', help="It specifies attributes of packaging like type, quantity of packaging,etc."),
'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True,states={'done': [('readonly', True)]}, help="Sets a location if you produce at a fixed location. This can be a partner location if you subcontract the manufacturing operations."),
'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True,states={'done': [('readonly', True)]}, select=True, help="Location where the system will stock the finished products."),
'partner_id': fields.many2one('res.partner', 'Destination Address ', states={'done': [('readonly', True)]}, help="Optional address where goods are to be delivered, specifically used for allotment"),
'prodlot_id': fields.many2one('stock.production.lot', 'Serial Number', states={'done': [('readonly', True)]}, help="Serial number is used to put a serial number on the production", select=True),
'tracking_id': fields.many2one('stock.tracking', 'Pack', select=True, states={'done': [('readonly', True)]}, help="Logistical shipping unit: pallet, box, pack ..."),
'auto_validate': fields.boolean('Auto Validate'),
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True),
'move_history_ids': fields.many2many('stock.move', 'stock_move_history_ids', 'parent_id', 'child_id', 'Move History (child moves)'),
'move_history_ids2': fields.many2many('stock.move', 'stock_move_history_ids', 'child_id', 'parent_id', 'Move History (parent moves)'),
'picking_id': fields.many2one('stock.picking', 'Reference', select=True,states={'done': [('readonly', True)]}),
'note': fields.text('Notes'),
'state': fields.selection([('draft', 'New'),
('cancel', 'Cancelled'),
('waiting', 'Waiting Another Move'),
('confirmed', 'Waiting Availability'),
('assigned', 'Available'),
('done', 'Done'),
], 'Status', readonly=True, select=True,
help= "* New: When the stock move is created and not yet confirmed.\n"\
"* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"\
"* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to me manufactured...\n"\
"* Available: When products are reserved, it is set to \'Available\'.\n"\
"* Done: When the shipment is processed, the state is \'Done\'."),
'price_unit': fields.float('Unit Price', digits_compute= dp.get_precision('Product Price'), help="Technical field used to record the product cost set by the user during a picking confirmation (when average price costing method is used)"),
'price_currency_id': fields.many2one('res.currency', 'Currency for average price', help="Technical field used to record the currency chosen by the user during a picking confirmation (when average price costing method is used)"),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'backorder_id': fields.related('picking_id','backorder_id',type='many2one', relation="stock.picking", string="Back Order of", select=True),
'origin': fields.related('picking_id','origin',type='char', size=64, relation="stock.picking", string="Source", store=True),
# used for colors in tree views:
'scrapped': fields.related('location_dest_id','scrap_location',type='boolean',relation='stock.location',string='Scrapped', readonly=True),
'type': fields.related('picking_id', 'type', type='selection', selection=[('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], string='Shipping Type'),
}
def _check_location(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if (record.state=='done') and (record.location_id.usage == 'view'):
raise osv.except_osv(_('Error'), _('You cannot move product %s from a location of type view %s.')% (record.product_id.name, record.location_id.name))
if (record.state=='done') and (record.location_dest_id.usage == 'view' ):
raise osv.except_osv(_('Error'), _('You cannot move product %s to a location of type view %s.')% (record.product_id.name, record.location_dest_id.name))
return True
_constraints = [
(_check_tracking,
'You must assign a serial number for this product.',
['prodlot_id']),
(_check_location, 'You cannot move products from or to a location of the type view.',
['location_id','location_dest_id']),
(_check_product_lot,
'You try to assign a lot which is not from the same product.',
['prodlot_id'])]
def _default_location_destination(self, cr, uid, context=None):
""" Gets default address of partner for destination location
@return: Address id or False
"""
mod_obj = self.pool.get('ir.model.data')
picking_type = context.get('picking_type')
location_id = False
if context is None:
context = {}
if context.get('move_line', []):
if context['move_line'][0]:
if isinstance(context['move_line'][0], (tuple, list)):
location_id = context['move_line'][0][2] and context['move_line'][0][2].get('location_dest_id',False)
else:
move_list = self.pool.get('stock.move').read(cr, uid, context['move_line'][0], ['location_dest_id'])
location_id = move_list and move_list['location_dest_id'][0] or False
elif context.get('address_out_id', False):
property_out = self.pool.get('res.partner').browse(cr, uid, context['address_out_id'], context).property_stock_customer
location_id = property_out and property_out.id or False
else:
location_xml_id = False
if picking_type in ('in', 'internal'):
location_xml_id = 'stock_location_stock'
elif picking_type == 'out':
location_xml_id = 'stock_location_customers'
if location_xml_id:
try:
location_model, location_id = mod_obj.get_object_reference(cr, uid, 'stock', location_xml_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
def _default_location_source(self, cr, uid, context=None):
""" Gets default address of partner for source location
@return: Address id or False
"""
mod_obj = self.pool.get('ir.model.data')
picking_type = context.get('picking_type')
location_id = False
if context is None:
context = {}
if context.get('move_line', []):
try:
location_id = context['move_line'][0][2]['location_id']
except:
pass
elif context.get('address_in_id', False):
part_obj_add = self.pool.get('res.partner').browse(cr, uid, context['address_in_id'], context=context)
if part_obj_add:
location_id = part_obj_add.property_stock_supplier.id
else:
location_xml_id = False
if picking_type == 'in':
location_xml_id = 'stock_location_suppliers'
elif picking_type in ('out', 'internal'):
location_xml_id = 'stock_location_stock'
if location_xml_id:
try:
location_model, location_id = mod_obj.get_object_reference(cr, uid, 'stock', location_xml_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
def _default_destination_address(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
return user.company_id.partner_id.id
def _default_move_type(self, cr, uid, context=None):
""" Gets default type of move
@return: type
"""
if context is None:
context = {}
picking_type = context.get('picking_type')
type = 'internal'
if picking_type == 'in':
type = 'in'
elif picking_type == 'out':
type = 'out'
return type
_defaults = {
'location_id': _default_location_source,
'location_dest_id': _default_location_destination,
'partner_id': _default_destination_address,
'type': _default_move_type,
'state': 'draft',
'priority': '1',
'product_qty': 1.0,
'scrapped' : False,
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.move', context=c),
'date_expected': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
if uid != 1:
frozen_fields = set(['product_qty', 'product_uom', 'product_uos_qty', 'product_uos', 'location_id', 'location_dest_id', 'product_id'])
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
if frozen_fields.intersection(vals):
raise osv.except_osv(_('Operation Forbidden!'),
_('Quantities, Units of Measure, Products and Locations cannot be modified on stock moves that have already been processed (except by the Administrator).'))
return super(stock_move, self).write(cr, uid, ids, vals, context=context)
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
default.update({'move_history_ids2': [], 'move_history_ids': []})
return super(stock_move, self).copy(cr, uid, id, default, context=context)
def _auto_init(self, cursor, context=None):
res = super(stock_move, self)._auto_init(cursor, context=context)
cursor.execute('SELECT indexname \
FROM pg_indexes \
WHERE indexname = \'stock_move_location_id_location_dest_id_product_id_state\'')
if not cursor.fetchone():
cursor.execute('CREATE INDEX stock_move_location_id_location_dest_id_product_id_state \
ON stock_move (product_id, state, location_id, location_dest_id)')
return res
def onchange_lot_id(self, cr, uid, ids, prodlot_id=False, product_qty=False,
loc_id=False, product_id=False, uom_id=False, context=None):
""" On change of production lot gives a warning message.
@param prodlot_id: Changed production lot id
@param product_qty: Quantity of product
@param loc_id: Location id
@param product_id: Product id
@return: Warning message
"""
if not prodlot_id or not loc_id:
return {}
ctx = context and context.copy() or {}
ctx['location_id'] = loc_id
ctx.update({'raise-exception': True})
uom_obj = self.pool.get('product.uom')
product_obj = self.pool.get('product.product')
product_uom = product_obj.browse(cr, uid, product_id, context=ctx).uom_id
prodlot = self.pool.get('stock.production.lot').browse(cr, uid, prodlot_id, context=ctx)
location = self.pool.get('stock.location').browse(cr, uid, loc_id, context=ctx)
uom = uom_obj.browse(cr, uid, uom_id, context=ctx)
amount_actual = uom_obj._compute_qty_obj(cr, uid, product_uom, prodlot.stock_available, uom, context=ctx)
warning = {}
if (location.usage == 'internal') and (product_qty > (amount_actual or 0.0)):
warning = {
'title': _('Insufficient Stock for Serial Number !'),
'message': _('You are moving %.2f %s but only %.2f %s available for this serial number.') % (product_qty, uom.name, amount_actual, uom.name)
}
return {'warning': warning}
def onchange_quantity(self, cr, uid, ids, product_id, product_qty,
product_uom, product_uos):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_qty: Changed Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_uos_qty': 0.00
}
warning = {}
if (not product_id) or (product_qty <=0.0):
result['product_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# Warn if the quantity was decreased
if ids:
for move in self.read(cr, uid, ids, ['product_qty']):
if product_qty < move['product_qty']:
warning.update({
'title': _('Information'),
'message': _("By changing this quantity here, you accept the "
"new quantity as complete: OpenERP will not "
"automatically generate a back order.") })
break
if product_uos and product_uom and (product_uom != product_uos):
result['product_uos_qty'] = product_qty * uos_coeff['uos_coeff']
else:
result['product_uos_qty'] = product_qty
return {'value': result, 'warning': warning}
def onchange_uos_quantity(self, cr, uid, ids, product_id, product_uos_qty,
product_uos, product_uom):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_uos_qty: Changed UoS Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_qty': 0.00
}
warning = {}
if (not product_id) or (product_uos_qty <=0.0):
result['product_uos_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# Warn if the quantity was decreased
for move in self.read(cr, uid, ids, ['product_uos_qty']):
if product_uos_qty < move['product_uos_qty']:
warning.update({
'title': _('Warning: No Back Order'),
'message': _("By changing the quantity here, you accept the "
"new quantity as complete: OpenERP will not "
"automatically generate a Back Order.") })
break
if product_uos and product_uom and (product_uom != product_uos):
result['product_qty'] = product_uos_qty / uos_coeff['uos_coeff']
else:
result['product_qty'] = product_uos_qty
return {'value': result, 'warning': warning}
def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False,
loc_dest_id=False, partner_id=False):
""" On change of product id, if finds UoM, UoS, quantity and UoS quantity.
@param prod_id: Changed Product id
@param loc_id: Source location id
@param loc_dest_id: Destination location id
@param partner_id: Address id of partner
@return: Dictionary of values
"""
if not prod_id:
return {}
user = self.pool.get('res.users').browse(cr, uid, uid)
lang = user and user.lang or False
if partner_id:
addr_rec = self.pool.get('res.partner').browse(cr, uid, partner_id)
if addr_rec:
lang = addr_rec and addr_rec.lang or False
ctx = {'lang': lang}
product = self.pool.get('product.product').browse(cr, uid, [prod_id], context=ctx)[0]
uos_id = product.uos_id and product.uos_id.id or False
result = {
'product_uom': product.uom_id.id,
'product_uos': uos_id,
'product_qty': 1.00,
'product_uos_qty' : self.pool.get('stock.move').onchange_quantity(cr, uid, ids, prod_id, 1.00, product.uom_id.id, uos_id)['value']['product_uos_qty'],
'prodlot_id' : False,
}
if not ids:
result['name'] = product.partner_ref
if loc_id:
result['location_id'] = loc_id
if loc_dest_id:
result['location_dest_id'] = loc_dest_id
return {'value': result}
def onchange_move_type(self, cr, uid, ids, type, context=None):
""" On change of move type gives sorce and destination location.
@param type: Move Type
@return: Dictionary of values
"""
mod_obj = self.pool.get('ir.model.data')
location_source_id = 'stock_location_stock'
location_dest_id = 'stock_location_stock'
if type == 'in':
location_source_id = 'stock_location_suppliers'
location_dest_id = 'stock_location_stock'
elif type == 'out':
location_source_id = 'stock_location_stock'
location_dest_id = 'stock_location_customers'
try:
source_location = mod_obj.get_object_reference(cr, uid, 'stock', location_source_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [source_location[1]], 'read', context=context)
except (orm.except_orm, ValueError):
source_location = False
try:
dest_location = mod_obj.get_object_reference(cr, uid, 'stock', location_dest_id)
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [dest_location[1]], 'read', context=context)
except (orm.except_orm, ValueError):
dest_location = False
return {'value':{'location_id': source_location and source_location[1] or False, 'location_dest_id': dest_location and dest_location[1] or False}}
def onchange_date(self, cr, uid, ids, date, date_expected, context=None):
""" On change of Scheduled Date gives a Move date.
@param date_expected: Scheduled Date
@param date: Move Date
@return: Move Date
"""
if not date_expected:
date_expected = time.strftime('%Y-%m-%d %H:%M:%S')
return {'value':{'date': date_expected}}
def _chain_compute(self, cr, uid, moves, context=None):
""" Finds whether the location has chained location type or not.
@param moves: Stock moves
@return: Dictionary containing destination location with chained location type.
"""
result = {}
for m in moves:
dest = self.pool.get('stock.location').chained_location_get(
cr,
uid,
m.location_dest_id,
m.picking_id and m.picking_id.partner_id and m.picking_id.partner_id,
m.product_id,
context
)
if dest:
if dest[1] == 'transparent':
newdate = (datetime.strptime(m.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=dest[2] or 0)).strftime('%Y-%m-%d')
self.write(cr, uid, [m.id], {
'date': newdate,
'location_dest_id': dest[0].id})
if m.picking_id and (dest[3] or dest[5]):
self.pool.get('stock.picking').write(cr, uid, [m.picking_id.id], {
'stock_journal_id': dest[3] or m.picking_id.stock_journal_id.id,
'type': dest[5] or m.picking_id.type
}, context=context)
m.location_dest_id = dest[0]
res2 = self._chain_compute(cr, uid, [m], context=context)
for pick_id in res2.keys():
result.setdefault(pick_id, [])
result[pick_id] += res2[pick_id]
else:
result.setdefault(m.picking_id, [])
result[m.picking_id].append( (m, dest) )
return result
def _prepare_chained_picking(self, cr, uid, picking_name, picking, picking_type, moves_todo, context=None):
"""Prepare the definition (values) to create a new chained picking.
:param str picking_name: desired new picking name
:param browse_record picking: source picking (being chained to)
:param str picking_type: desired new picking type
:param list moves_todo: specification of the stock moves to be later included in this
picking, in the form::
[[move, (dest_location, auto_packing, chained_delay, chained_journal,
chained_company_id, chained_picking_type)],
...
]
See also :meth:`stock_location.chained_location_get`.
"""
res_company = self.pool.get('res.company')
return {
'name': picking_name,
'origin': tools.ustr(picking.origin or ''),
'type': picking_type,
'note': picking.note,
'move_type': picking.move_type,
'auto_picking': moves_todo[0][1][1] == 'auto',
'stock_journal_id': moves_todo[0][1][3],
'company_id': moves_todo[0][1][4] or res_company._company_default_get(cr, uid, 'stock.company', context=context),
'partner_id': picking.partner_id.id,
'invoice_state': 'none',
'date': picking.date,
}
def _create_chained_picking(self, cr, uid, picking_name, picking, picking_type, moves_todo, context=None):
picking_obj = self.pool.get('stock.picking')
return picking_obj.create(cr, uid, self._prepare_chained_picking(cr, uid, picking_name, picking, picking_type, moves_todo, context=context))
def create_chained_picking(self, cr, uid, moves, context=None):
res_obj = self.pool.get('res.company')
location_obj = self.pool.get('stock.location')
move_obj = self.pool.get('stock.move')
wf_service = netsvc.LocalService("workflow")
new_moves = []
if context is None:
context = {}
seq_obj = self.pool.get('ir.sequence')
for picking, todo in self._chain_compute(cr, uid, moves, context=context).items():
ptype = todo[0][1][5] and todo[0][1][5] or location_obj.picking_type_get(cr, uid, todo[0][0].location_dest_id, todo[0][1][0])
if picking:
# name of new picking according to its type
if ptype == 'internal':
new_pick_name = seq_obj.get(cr, uid,'stock.picking')
else :
new_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + ptype)
pickid = self._create_chained_picking(cr, uid, new_pick_name, picking, ptype, todo, context=context)
# Need to check name of old picking because it always considers picking as "OUT" when created from Sales Order
old_ptype = location_obj.picking_type_get(cr, uid, picking.move_lines[0].location_id, picking.move_lines[0].location_dest_id)
if old_ptype != picking.type:
old_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + old_ptype)
self.pool.get('stock.picking').write(cr, uid, [picking.id], {'name': old_pick_name, 'type': old_ptype}, context=context)
else:
pickid = False
for move, (loc, dummy, delay, dummy, company_id, ptype, invoice_state) in todo:
new_id = move_obj.copy(cr, uid, move.id, {
'location_id': move.location_dest_id.id,
'location_dest_id': loc.id,
'date': time.strftime('%Y-%m-%d'),
'picking_id': pickid,
'state': 'waiting',
'company_id': company_id or res_obj._company_default_get(cr, uid, 'stock.company', context=context) ,
'move_history_ids': [],
'date_expected': (datetime.strptime(move.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=delay or 0)).strftime('%Y-%m-%d'),
'move_history_ids2': []}
)
move_obj.write(cr, uid, [move.id], {
'move_dest_id': new_id,
'move_history_ids': [(4, new_id)]
})
new_moves.append(self.browse(cr, uid, [new_id])[0])
if pickid:
wf_service.trg_validate(uid, 'stock.picking', pickid, 'button_confirm', cr)
if new_moves:
new_moves += self.create_chained_picking(cr, uid, new_moves, context)
return new_moves
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms stock move.
@return: List of ids.
"""
moves = self.browse(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'state': 'confirmed'})
self.create_chained_picking(cr, uid, moves, context)
return []
def action_assign(self, cr, uid, ids, *args):
""" Changes state to confirmed or waiting.
@return: List of values
"""
todo = []
for move in self.browse(cr, uid, ids):
if move.state in ('confirmed', 'waiting'):
todo.append(move.id)
res = self.check_assign(cr, uid, todo)
return res
def force_assign(self, cr, uid, ids, context=None):
""" Changes the state to assigned.
@return: True
"""
self.write(cr, uid, ids, {'state': 'assigned'})
wf_service = netsvc.LocalService('workflow')
for move in self.browse(cr, uid, ids, context):
if move.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.picking_id.id, cr)
return True
def cancel_assign(self, cr, uid, ids, context=None):
""" Changes the state to confirmed.
@return: True
"""
self.write(cr, uid, ids, {'state': 'confirmed'})
# fix for bug lp:707031
# called write of related picking because changing move availability does
# not trigger workflow of picking in order to change the state of picking
wf_service = netsvc.LocalService('workflow')
for move in self.browse(cr, uid, ids, context):
if move.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.picking_id.id, cr)
return True
#
# Duplicate stock.move
#
def check_assign(self, cr, uid, ids, context=None):
""" Checks the product type and accordingly writes the state.
@return: No. of moves done
"""
done = []
count = 0
pickings = {}
if context is None:
context = {}
for move in self.browse(cr, uid, ids, context=context):
if move.product_id.type == 'consu' or move.location_id.usage == 'supplier':
if move.state in ('confirmed', 'waiting'):
done.append(move.id)
pickings[move.picking_id.id] = 1
continue
if move.state in ('confirmed', 'waiting'):
# Important: we must pass lock=True to _product_reserve() to avoid race conditions and double reservations
res = self.pool.get('stock.location')._product_reserve(cr, uid, [move.location_id.id], move.product_id.id, move.product_qty, {'uom': move.product_uom.id}, lock=True)
if res:
#_product_available_test depends on the next status for correct functioning
#the test does not work correctly if the same product occurs multiple times
#in the same order. This is e.g. the case when using the button 'split in two' of
#the stock outgoing form
self.write(cr, uid, [move.id], {'state':'assigned'})
done.append(move.id)
pickings[move.picking_id.id] = 1
r = res.pop(0)
product_uos_qty = self.pool.get('stock.move').onchange_quantity(cr, uid, ids, move.product_id.id, r[0], move.product_id.uom_id.id, move.product_id.uos_id.id)['value']['product_uos_qty']
cr.execute('update stock_move set location_id=%s, product_qty=%s, product_uos_qty=%s where id=%s', (r[1], r[0],product_uos_qty, move.id))
while res:
r = res.pop(0)
product_uos_qty = self.pool.get('stock.move').onchange_quantity(cr, uid, ids, move.product_id.id, r[0], move.product_id.uom_id.id, move.product_id.uos_id.id)['value']['product_uos_qty']
move_id = self.copy(cr, uid, move.id, {'product_uos_qty': product_uos_qty, 'product_qty': r[0], 'location_id': r[1]})
done.append(move_id)
if done:
count += len(done)
self.write(cr, uid, done, {'state': 'assigned'})
if count:
for pick_id in pickings:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, 'stock.picking', pick_id, cr)
return count
def setlast_tracking(self, cr, uid, ids, context=None):
tracking_obj = self.pool.get('stock.tracking')
picking = self.browse(cr, uid, ids, context=context)[0].picking_id
if picking:
last_track = [line.tracking_id.id for line in picking.move_lines if line.tracking_id]
if not last_track:
last_track = tracking_obj.create(cr, uid, {}, context=context)
else:
last_track.sort()
last_track = last_track[-1]
self.write(cr, uid, ids, {'tracking_id': last_track})
return True
#
# Cancel move => cancel others move and pickings
#
def action_cancel(self, cr, uid, ids, context=None):
""" Cancels the moves and if all moves are cancelled it cancels the picking.
@return: True
"""
if not len(ids):
return True
if context is None:
context = {}
pickings = set()
for move in self.browse(cr, uid, ids, context=context):
if move.state in ('confirmed', 'waiting', 'assigned', 'draft'):
if move.picking_id:
pickings.add(move.picking_id.id)
if move.move_dest_id and move.move_dest_id.state == 'waiting':
self.write(cr, uid, [move.move_dest_id.id], {'state': 'confirmed'}, context=context)
if context.get('call_unlink',False) and move.move_dest_id.picking_id:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr)
self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}, context=context)
if not context.get('call_unlink',False):
for pick in self.pool.get('stock.picking').browse(cr, uid, list(pickings), context=context):
if all(move.state == 'cancel' for move in pick.move_lines):
self.pool.get('stock.picking').write(cr, uid, [pick.id], {'state': 'cancel'}, context=context)
wf_service = netsvc.LocalService("workflow")
for id in ids:
wf_service.trg_trigger(uid, 'stock.move', id, cr)
return True
def _get_accounting_data_for_valuation(self, cr, uid, move, context=None):
"""
Return the accounts and journal to use to post Journal Entries for the real-time
valuation of the move.
:param context: context dictionary that can explicitly mention the company to consider via the 'force_company' key
:raise: osv.except_osv() is any mandatory account or journal is not defined.
"""
product_obj=self.pool.get('product.product')
accounts = product_obj.get_product_accounts(cr, uid, move.product_id.id, context)
if move.location_id.valuation_out_account_id:
acc_src = move.location_id.valuation_out_account_id.id
else:
acc_src = accounts['stock_account_input']
if move.location_dest_id.valuation_in_account_id:
acc_dest = move.location_dest_id.valuation_in_account_id.id
else:
acc_dest = accounts['stock_account_output']
acc_valuation = accounts.get('property_stock_valuation_account_id', False)
journal_id = accounts['stock_journal']
if acc_dest == acc_valuation:
raise osv.except_osv(_('Error!'), _('Cannot create Journal Entry, Output Account of this product and Valuation account on category of this product are same.'))
if acc_src == acc_valuation:
raise osv.except_osv(_('Error!'), _('Cannot create Journal Entry, Input Account of this product and Valuation account on category of this product are same.'))
if not acc_src:
raise osv.except_osv(_('Error!'), _('Please define stock input account for this product or its category: "%s" (id: %d)') % \
(move.product_id.name, move.product_id.id,))
if not acc_dest:
raise osv.except_osv(_('Error!'), _('Please define stock output account for this product or its category: "%s" (id: %d)') % \
(move.product_id.name, move.product_id.id,))
if not journal_id:
raise osv.except_osv(_('Error!'), _('Please define journal on the product category: "%s" (id: %d)') % \
(move.product_id.categ_id.name, move.product_id.categ_id.id,))
if not acc_valuation:
raise osv.except_osv(_('Error!'), _('Please define inventory valuation account on the product category: "%s" (id: %d)') % \
(move.product_id.categ_id.name, move.product_id.categ_id.id,))
return journal_id, acc_src, acc_dest, acc_valuation
def _get_reference_accounting_values_for_valuation(self, cr, uid, move, context=None):
"""
Return the reference amount and reference currency representing the inventory valuation for this move.
These reference values should possibly be converted before being posted in Journals to adapt to the primary
and secondary currencies of the relevant accounts.
"""
product_uom_obj = self.pool.get('product.uom')
# by default the reference currency is that of the move's company
reference_currency_id = move.company_id.currency_id.id
default_uom = move.product_id.uom_id.id
qty = product_uom_obj._compute_qty(cr, uid, move.product_uom.id, move.product_qty, default_uom)
# if product is set to average price and a specific value was entered in the picking wizard,
# we use it
if move.product_id.cost_method == 'average' and move.price_unit:
reference_amount = qty * move.price_unit
reference_currency_id = move.price_currency_id.id or reference_currency_id
# Otherwise we default to the company's valuation price type, considering that the values of the
# valuation field are expressed in the default currency of the move's company.
else:
if context is None:
context = {}
currency_ctx = dict(context, currency_id = move.company_id.currency_id.id)
amount_unit = move.product_id.price_get('standard_price', context=currency_ctx)[move.product_id.id]
reference_amount = amount_unit * qty
return reference_amount, reference_currency_id
def _create_product_valuation_moves(self, cr, uid, move, context=None):
"""
Generate the appropriate accounting moves if the product being moves is subject
to real_time valuation tracking, and the source or destination location is
a transit location or is outside of the company.
"""
if move.product_id.valuation == 'real_time': # FIXME: product valuation should perhaps be a property?
if context is None:
context = {}
src_company_ctx = dict(context,force_company=move.location_id.company_id.id)
dest_company_ctx = dict(context,force_company=move.location_dest_id.company_id.id)
account_moves = []
# Outgoing moves (or cross-company output part)
if move.location_id.company_id \
and (move.location_id.usage == 'internal' and move.location_dest_id.usage != 'internal'\
or move.location_id.company_id != move.location_dest_id.company_id):
journal_id, acc_src, acc_dest, acc_valuation = self._get_accounting_data_for_valuation(cr, uid, move, src_company_ctx)
reference_amount, reference_currency_id = self._get_reference_accounting_values_for_valuation(cr, uid, move, src_company_ctx)
#returning goods to supplier
if move.location_dest_id.usage == 'supplier':
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_valuation, acc_src, reference_amount, reference_currency_id, context))]
else:
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_valuation, acc_dest, reference_amount, reference_currency_id, context))]
# Incoming moves (or cross-company input part)
if move.location_dest_id.company_id \
and (move.location_id.usage != 'internal' and move.location_dest_id.usage == 'internal'\
or move.location_id.company_id != move.location_dest_id.company_id):
journal_id, acc_src, acc_dest, acc_valuation = self._get_accounting_data_for_valuation(cr, uid, move, dest_company_ctx)
reference_amount, reference_currency_id = self._get_reference_accounting_values_for_valuation(cr, uid, move, src_company_ctx)
#goods return from customer
if move.location_id.usage == 'customer':
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_dest, acc_valuation, reference_amount, reference_currency_id, context))]
else:
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_src, acc_valuation, reference_amount, reference_currency_id, context))]
move_obj = self.pool.get('account.move')
for j_id, move_lines in account_moves:
move_obj.create(cr, uid,
{
'journal_id': j_id,
'line_id': move_lines,
'ref': move.picking_id and move.picking_id.name}, context=context)
def action_done(self, cr, uid, ids, context=None):
""" Makes the move done and if all moves are done, it will finish the picking.
@return:
"""
picking_ids = []
move_ids = []
wf_service = netsvc.LocalService("workflow")
if context is None:
context = {}
todo = []
for move in self.browse(cr, uid, ids, context=context):
if move.state=="draft":
todo.append(move.id)
if todo:
self.action_confirm(cr, uid, todo, context=context)
todo = []
for move in self.browse(cr, uid, ids, context=context):
if move.state in ['done','cancel']:
continue
move_ids.append(move.id)
if move.picking_id:
picking_ids.append(move.picking_id.id)
if move.move_dest_id.id and (move.state != 'done'):
# Downstream move should only be triggered if this move is the last pending upstream move
other_upstream_move_ids = self.search(cr, uid, [('id','!=',move.id),('state','not in',['done','cancel']),
('move_dest_id','=',move.move_dest_id.id)], context=context)
if not other_upstream_move_ids:
self.write(cr, uid, [move.id], {'move_history_ids': [(4, move.move_dest_id.id)]})
if move.move_dest_id.state in ('waiting', 'confirmed'):
self.force_assign(cr, uid, [move.move_dest_id.id], context=context)
if move.move_dest_id.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr)
if move.move_dest_id.auto_validate:
self.action_done(cr, uid, [move.move_dest_id.id], context=context)
self._create_product_valuation_moves(cr, uid, move, context=context)
if move.state not in ('confirmed','done','assigned'):
todo.append(move.id)
if todo:
self.action_confirm(cr, uid, todo, context=context)
self.write(cr, uid, move_ids, {'state': 'done', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context)
for id in move_ids:
wf_service.trg_trigger(uid, 'stock.move', id, cr)
for pick_id in picking_ids:
wf_service.trg_write(uid, 'stock.picking', pick_id, cr)
return True
def _create_account_move_line(self, cr, uid, move, src_account_id, dest_account_id, reference_amount, reference_currency_id, context=None):
"""
Generate the account.move.line values to post to track the stock valuation difference due to the
processing of the given stock move.
"""
# prepare default values considering that the destination accounts have the reference_currency_id as their main currency
partner_id = (move.picking_id.partner_id and self.pool.get('res.partner')._find_accounting_partner(move.picking_id.partner_id).id) or False
debit_line_vals = {
'name': move.name,
'product_id': move.product_id and move.product_id.id or False,
'quantity': move.product_qty,
'ref': move.picking_id and move.picking_id.name or False,
'date': time.strftime('%Y-%m-%d'),
'partner_id': partner_id,
'debit': reference_amount,
'account_id': dest_account_id,
}
credit_line_vals = {
'name': move.name,
'product_id': move.product_id and move.product_id.id or False,
'quantity': move.product_qty,
'ref': move.picking_id and move.picking_id.name or False,
'date': time.strftime('%Y-%m-%d'),
'partner_id': partner_id,
'credit': reference_amount,
'account_id': src_account_id,
}
# if we are posting to accounts in a different currency, provide correct values in both currencies correctly
# when compatible with the optional secondary currency on the account.
# Financial Accounts only accept amounts in secondary currencies if there's no secondary currency on the account
# or if it's the same as that of the secondary amount being posted.
account_obj = self.pool.get('account.account')
src_acct, dest_acct = account_obj.browse(cr, uid, [src_account_id, dest_account_id], context=context)
src_main_currency_id = src_acct.company_id.currency_id.id
dest_main_currency_id = dest_acct.company_id.currency_id.id
cur_obj = self.pool.get('res.currency')
if reference_currency_id != src_main_currency_id:
# fix credit line:
credit_line_vals['credit'] = cur_obj.compute(cr, uid, reference_currency_id, src_main_currency_id, reference_amount, context=context)
if (not src_acct.currency_id) or src_acct.currency_id.id == reference_currency_id:
credit_line_vals.update(currency_id=reference_currency_id, amount_currency=-reference_amount)
if reference_currency_id != dest_main_currency_id:
# fix debit line:
debit_line_vals['debit'] = cur_obj.compute(cr, uid, reference_currency_id, dest_main_currency_id, reference_amount, context=context)
if (not dest_acct.currency_id) or dest_acct.currency_id.id == reference_currency_id:
debit_line_vals.update(currency_id=reference_currency_id, amount_currency=reference_amount)
return [(0, 0, debit_line_vals), (0, 0, credit_line_vals)]
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
ctx = context.copy()
for move in self.browse(cr, uid, ids, context=context):
if move.state != 'draft' and not ctx.get('call_unlink', False):
raise osv.except_osv(_('User Error!'), _('You can only delete draft moves.'))
return super(stock_move, self).unlink(
cr, uid, ids, context=ctx)
# _create_lot function is not used anywhere
def _create_lot(self, cr, uid, ids, product_id, prefix=False):
""" Creates production lot
@return: Production lot id
"""
prodlot_obj = self.pool.get('stock.production.lot')
prodlot_id = prodlot_obj.create(cr, uid, {'prefix': prefix, 'product_id': product_id})
return prodlot_id
def action_scrap(self, cr, uid, ids, quantity, location_id, context=None):
""" Move the scrap/damaged product into scrap location
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be scrapped
@param quantity : specify scrap qty
@param location_id : specify scrap location
@param context: context arguments
@return: Scraped lines
"""
#quantity should in MOVE UOM
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide a positive quantity to scrap.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
source_location = move.location_id
if move.state == 'done':
source_location = move.location_dest_id
if source_location.usage != 'internal':
#restrict to scrap from a virtual location because it's meaningless and it may introduce errors in stock ('creating' new products from nowhere)
raise osv.except_osv(_('Error!'), _('Forbidden operation: it is not allowed to scrap products from a virtual location.'))
move_qty = move.product_qty
uos_qty = quantity / move_qty * move.product_uos_qty
default_val = {
'location_id': source_location.id,
'product_qty': quantity,
'product_uos_qty': uos_qty,
'state': move.state,
'scrapped': True,
'location_dest_id': location_id,
'tracking_id': move.tracking_id.id,
'prodlot_id': move.prodlot_id.id,
}
new_move = self.copy(cr, uid, move.id, default_val)
res += [new_move]
product_obj = self.pool.get('product.product')
for product in product_obj.browse(cr, uid, [move.product_id.id], context=context):
if move.picking_id:
uom = product.uom_id.name if product.uom_id else ''
message = _("%s %s %s has been <b>moved to</b> scrap.") % (quantity, uom, product.name)
move.picking_id.message_post(body=message)
self.action_done(cr, uid, res, context=context)
return res
# action_split function is not used anywhere
# FIXME: deprecate this method
def action_split(self, cr, uid, ids, quantity, split_by_qty=1, prefix=False, with_lot=True, context=None):
""" Split Stock Move lines into production lot which specified split by quantity.
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be splited
@param split_by_qty : specify split by qty
@param prefix : specify prefix of production lot
@param with_lot : if true, prodcution lot will assign for split line otherwise not.
@param context: context arguments
@return: Splited move lines
"""
if context is None:
context = {}
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
if split_by_qty <= 0 or quantity == 0:
return res
uos_qty = split_by_qty / move.product_qty * move.product_uos_qty
quantity_rest = quantity % split_by_qty
uos_qty_rest = split_by_qty / move.product_qty * move.product_uos_qty
update_val = {
'product_qty': split_by_qty,
'product_uos_qty': uos_qty,
}
for idx in range(int(quantity//split_by_qty)):
if not idx and move.product_qty<=quantity:
current_move = move.id
else:
current_move = self.copy(cr, uid, move.id, {'state': move.state})
res.append(current_move)
if with_lot:
update_val['prodlot_id'] = self._create_lot(cr, uid, [current_move], move.product_id.id)
self.write(cr, uid, [current_move], update_val)
if quantity_rest > 0:
idx = int(quantity//split_by_qty)
update_val['product_qty'] = quantity_rest
update_val['product_uos_qty'] = uos_qty_rest
if not idx and move.product_qty<=quantity:
current_move = move.id
else:
current_move = self.copy(cr, uid, move.id, {'state': move.state})
res.append(current_move)
if with_lot:
update_val['prodlot_id'] = self._create_lot(cr, uid, [current_move], move.product_id.id)
self.write(cr, uid, [current_move], update_val)
return res
def action_consume(self, cr, uid, ids, quantity, location_id=False, context=None):
""" Consumed product with specific quatity from specific source location
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be consumed
@param quantity : specify consume quantity
@param location_id : specify source location
@param context: context arguments
@return: Consumed lines
"""
#quantity should in MOVE UOM
if context is None:
context = {}
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
move_qty = move.product_qty
if move_qty <= 0:
raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))
quantity_rest = move.product_qty
quantity_rest -= quantity
uos_qty_rest = quantity_rest / move_qty * move.product_uos_qty
if quantity_rest <= 0:
quantity_rest = 0
uos_qty_rest = 0
quantity = move.product_qty
uos_qty = quantity / move_qty * move.product_uos_qty
if float_compare(quantity_rest, 0, precision_rounding=move.product_id.uom_id.rounding):
default_val = {
'product_qty': quantity,
'product_uos_qty': uos_qty,
'state': move.state,
'location_id': location_id or move.location_id.id,
}
current_move = self.copy(cr, uid, move.id, default_val)
res += [current_move]
update_val = {}
update_val['product_qty'] = quantity_rest
update_val['product_uos_qty'] = uos_qty_rest
self.write(cr, uid, [move.id], update_val)
else:
quantity_rest = quantity
uos_qty_rest = uos_qty
res += [move.id]
update_val = {
'product_qty' : quantity_rest,
'product_uos_qty' : uos_qty_rest,
'location_id': location_id or move.location_id.id,
}
self.write(cr, uid, [move.id], update_val)
self.action_done(cr, uid, res, context=context)
return res
# FIXME: needs refactoring, this code is partially duplicated in stock_picking.do_partial()!
def do_partial(self, cr, uid, ids, partial_datas, context=None):
""" Makes partial pickings and moves done.
@param partial_datas: Dictionary containing details of partial picking
like partner_id, delivery_date, delivery
moves with product_id, product_qty, uom
"""
res = {}
picking_obj = self.pool.get('stock.picking')
product_obj = self.pool.get('product.product')
currency_obj = self.pool.get('res.currency')
uom_obj = self.pool.get('product.uom')
wf_service = netsvc.LocalService("workflow")
if context is None:
context = {}
complete, too_many, too_few = [], [], []
move_product_qty = {}
prodlot_ids = {}
for move in self.browse(cr, uid, ids, context=context):
if move.state in ('done', 'cancel'):
continue
partial_data = partial_datas.get('move%s'%(move.id), False)
assert partial_data, _('Missing partial picking data for move #%s.') % (move.id)
product_qty = partial_data.get('product_qty',0.0)
move_product_qty[move.id] = product_qty
product_uom = partial_data.get('product_uom',False)
product_price = partial_data.get('product_price',0.0)
product_currency = partial_data.get('product_currency',False)
prodlot_ids[move.id] = partial_data.get('prodlot_id')
if move.product_qty == product_qty:
complete.append(move)
elif move.product_qty > product_qty:
too_few.append(move)
else:
too_many.append(move)
# Average price computation
if (move.picking_id.type == 'in') and (move.product_id.cost_method == 'average'):
product = product_obj.browse(cr, uid, move.product_id.id)
move_currency_id = move.company_id.currency_id.id
context['currency_id'] = move_currency_id
qty = uom_obj._compute_qty(cr, uid, product_uom, product_qty, product.uom_id.id)
if qty > 0:
new_price = currency_obj.compute(cr, uid, product_currency,
move_currency_id, product_price, round=False)
new_price = uom_obj._compute_price(cr, uid, product_uom, new_price,
product.uom_id.id)
if product.qty_available <= 0:
new_std_price = new_price
else:
# Get the standard price
amount_unit = product.price_get('standard_price', context=context)[product.id]
new_std_price = ((amount_unit * product.qty_available)\
+ (new_price * qty))/(product.qty_available + qty)
product_obj.write(cr, uid, [product.id],{'standard_price': new_std_price})
# Record the values that were chosen in the wizard, so they can be
# used for inventory valuation if real-time valuation is enabled.
self.write(cr, uid, [move.id],
{'price_unit': product_price,
'price_currency_id': product_currency,
})
for move in too_few:
product_qty = move_product_qty[move.id]
if product_qty != 0:
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty,
'picking_id' : move.picking_id.id,
'state': 'assigned',
'move_dest_id': False,
'price_unit': move.price_unit,
}
prodlot_id = prodlot_ids[move.id]
if prodlot_id:
defaults.update(prodlot_id=prodlot_id)
new_move = self.copy(cr, uid, move.id, defaults)
complete.append(self.browse(cr, uid, new_move))
self.write(cr, uid, [move.id],
{
'product_qty': move.product_qty - product_qty,
'product_uos_qty': move.product_qty - product_qty,
'prodlot_id': False,
'tracking_id': False,
})
for move in too_many:
self.write(cr, uid, [move.id],
{
'product_qty': move.product_qty,
'product_uos_qty': move.product_qty,
})
complete.append(move)
for move in complete:
if prodlot_ids.get(move.id):
self.write(cr, uid, [move.id],{'prodlot_id': prodlot_ids.get(move.id)})
self.action_done(cr, uid, [move.id], context=context)
if move.picking_id.id :
# TOCHECK : Done picking if all moves are done
cr.execute("""
SELECT move.id FROM stock_picking pick
RIGHT JOIN stock_move move ON move.picking_id = pick.id AND move.state = %s
WHERE pick.id = %s""",
('done', move.picking_id.id))
res = cr.fetchall()
if len(res) == len(move.picking_id.move_lines):
picking_obj.action_move(cr, uid, [move.picking_id.id])
wf_service.trg_validate(uid, 'stock.picking', move.picking_id.id, 'button_done', cr)
return [move.id for move in complete]
stock_move()
class stock_inventory(osv.osv):
_name = "stock.inventory"
_description = "Inventory"
_columns = {
'name': fields.char('Inventory Reference', size=64, required=True, readonly=True, states={'draft': [('readonly', False)]}),
'date': fields.datetime('Creation Date', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'date_done': fields.datetime('Date done'),
'inventory_line_id': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=True, states={'draft': [('readonly', False)]}),
'move_ids': fields.many2many('stock.move', 'stock_inventory_move_rel', 'inventory_id', 'move_id', 'Created Moves'),
'state': fields.selection( (('draft', 'Draft'), ('cancel','Cancelled'), ('confirm','Confirmed'), ('done', 'Done')), 'Status', readonly=True, select=True),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, readonly=True, states={'draft':[('readonly',False)]}),
}
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'state': 'draft',
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c)
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
default.update({'move_ids': [], 'date_done': False})
return super(stock_inventory, self).copy(cr, uid, id, default, context=context)
def _inventory_line_hook(self, cr, uid, inventory_line, move_vals):
""" Creates a stock move from an inventory line
@param inventory_line:
@param move_vals:
@return:
"""
return self.pool.get('stock.move').create(cr, uid, move_vals)
def action_done(self, cr, uid, ids, context=None):
""" Finish the inventory
@return: True
"""
if context is None:
context = {}
move_obj = self.pool.get('stock.move')
for inv in self.browse(cr, uid, ids, context=context):
move_obj.action_done(cr, uid, [x.id for x in inv.move_ids], context=context)
self.write(cr, uid, [inv.id], {'state':'done', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
return True
def action_confirm(self, cr, uid, ids, context=None):
""" Confirm the inventory and writes its finished date
@return: True
"""
if context is None:
context = {}
# to perform the correct inventory corrections we need analyze stock location by
# location, never recursively, so we use a special context
product_context = dict(context, compute_child=False)
location_obj = self.pool.get('stock.location')
for inv in self.browse(cr, uid, ids, context=context):
move_ids = []
for line in inv.inventory_line_id:
pid = line.product_id.id
product_context.update(uom=line.product_uom.id, to_date=inv.date, date=inv.date, prodlot_id=line.prod_lot_id.id)
amount = location_obj._product_get(cr, uid, line.location_id.id, [pid], product_context)[pid]
change = line.product_qty - amount
lot_id = line.prod_lot_id.id
if change:
location_id = line.product_id.property_stock_inventory.id
value = {
'name': _('INV:') + (line.inventory_id.name or ''),
'product_id': line.product_id.id,
'product_uom': line.product_uom.id,
'prodlot_id': lot_id,
'date': inv.date,
}
if change > 0:
value.update( {
'product_qty': change,
'location_id': location_id,
'location_dest_id': line.location_id.id,
})
else:
value.update( {
'product_qty': -change,
'location_id': line.location_id.id,
'location_dest_id': location_id,
})
move_ids.append(self._inventory_line_hook(cr, uid, line, value))
self.write(cr, uid, [inv.id], {'state': 'confirm', 'move_ids': [(6, 0, move_ids)]})
self.pool.get('stock.move').action_confirm(cr, uid, move_ids, context=context)
return True
def action_cancel_draft(self, cr, uid, ids, context=None):
""" Cancels the stock move and change inventory state to draft.
@return: True
"""
for inv in self.browse(cr, uid, ids, context=context):
self.pool.get('stock.move').action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context)
self.write(cr, uid, [inv.id], {'state':'draft'}, context=context)
return True
def action_cancel_inventory(self, cr, uid, ids, context=None):
""" Cancels both stock move and inventory
@return: True
"""
move_obj = self.pool.get('stock.move')
account_move_obj = self.pool.get('account.move')
for inv in self.browse(cr, uid, ids, context=context):
move_obj.action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context)
for move in inv.move_ids:
account_move_ids = account_move_obj.search(cr, uid, [('name', '=', move.name)])
if account_move_ids:
account_move_data_l = account_move_obj.read(cr, uid, account_move_ids, ['state'], context=context)
for account_move in account_move_data_l:
if account_move['state'] == 'posted':
raise osv.except_osv(_('User Error!'),
_('In order to cancel this inventory, you must first unpost related journal entries.'))
account_move_obj.unlink(cr, uid, [account_move['id']], context=context)
self.write(cr, uid, [inv.id], {'state': 'cancel'}, context=context)
return True
stock_inventory()
class stock_inventory_line(osv.osv):
_name = "stock.inventory.line"
_description = "Inventory Line"
_rec_name = "inventory_id"
_columns = {
'inventory_id': fields.many2one('stock.inventory', 'Inventory', ondelete='cascade', select=True),
'location_id': fields.many2one('stock.location', 'Location', required=True),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure')),
'company_id': fields.related('inventory_id','company_id',type='many2one',relation='res.company',string='Company',store=True, select=True, readonly=True),
'prod_lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"),
'state': fields.related('inventory_id','state',type='char',string='Status',readonly=True),
}
def _default_stock_location(self, cr, uid, context=None):
try:
location_model, location_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock')
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context)
except (orm.except_orm, ValueError):
location_id = False
return location_id
_defaults = {
'location_id': _default_stock_location
}
def on_change_product_id(self, cr, uid, ids, location_id, product, uom=False, to_date=False):
""" Changes UoM and name if product_id changes.
@param location_id: Location id
@param product: Changed product_id
@param uom: UoM product
@return: Dictionary of changed values
"""
if not product:
return {'value': {'product_qty': 0.0, 'product_uom': False, 'prod_lot_id': False}}
obj_product = self.pool.get('product.product').browse(cr, uid, product)
uom = uom or obj_product.uom_id.id
amount = self.pool.get('stock.location')._product_get(cr, uid, location_id, [product], {'uom': uom, 'to_date': to_date, 'compute_child': False})[product]
result = {'product_qty': amount, 'product_uom': uom, 'prod_lot_id': False}
return {'value': result}
stock_inventory_line()
#----------------------------------------------------------
# Stock Warehouse
#----------------------------------------------------------
class stock_warehouse(osv.osv):
_name = "stock.warehouse"
_description = "Warehouse"
_columns = {
'name': fields.char('Name', size=128, required=True, select=True),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'partner_id': fields.many2one('res.partner', 'Owner Address'),
'lot_input_id': fields.many2one('stock.location', 'Location Input', required=True, domain=[('usage','<>','view')]),
'lot_stock_id': fields.many2one('stock.location', 'Location Stock', required=True, domain=[('usage','=','internal')]),
'lot_output_id': fields.many2one('stock.location', 'Location Output', required=True, domain=[('usage','<>','view')]),
}
def _default_lot_input_stock_id(self, cr, uid, context=None):
try:
lot_input_stock_model, lot_input_stock_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock')
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [lot_input_stock_id], 'read', context=context)
except (ValueError, orm.except_orm):
# the user does not have read access on the location or it does not exists
lot_input_stock_id = False
return lot_input_stock_id
def _default_lot_output_id(self, cr, uid, context=None):
try:
lot_output_model, lot_output_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_output')
with tools.mute_logger('openerp.osv.orm'):
self.pool.get('stock.location').check_access_rule(cr, uid, [lot_output_id], 'read', context=context)
except (ValueError, orm.except_orm):
# the user does not have read access on the location or it does not exists
lot_output_id = False
return lot_output_id
_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c),
'lot_input_id': _default_lot_input_stock_id,
'lot_stock_id': _default_lot_input_stock_id,
'lot_output_id': _default_lot_output_id,
}
stock_warehouse()
#----------------------------------------------------------
# "Empty" Classes that are used to vary from the original stock.picking (that are dedicated to the internal pickings)
# in order to offer a different usability with different views, labels, available reports/wizards...
#----------------------------------------------------------
class stock_picking_in(osv.osv):
_name = "stock.picking.in"
_inherit = "stock.picking"
_table = "stock_picking"
_description = "Incoming Shipments"
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
return self.pool.get('stock.picking').search(cr, user, args, offset, limit, order, context, count)
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
return self.pool.get('stock.picking').read(cr, uid, ids, fields=fields, context=context, load=load)
def check_access_rights(self, cr, uid, operation, raise_exception=True):
#override in order to redirect the check of acces rights on the stock.picking object
return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
#override in order to redirect the check of acces rules on the stock.picking object
return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
#override in order to trigger the workflow of stock.picking at the end of create, write and unlink operation
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_trigger(cr, uid, ids, trigger, context=context)
def _workflow_signal(self, cr, uid, ids, signal, context=None):
#override in order to fire the workflow signal on given stock.picking workflow instance
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_signal(cr, uid, ids, signal, context=context)
def message_post(self, *args, **kwargs):
"""Post the message on stock.picking to be able to see it in the form view when using the chatter"""
return self.pool.get('stock.picking').message_post(*args, **kwargs)
def message_subscribe(self, *args, **kwargs):
"""Send the subscribe action on stock.picking model as it uses _name in request"""
return self.pool.get('stock.picking').message_subscribe(*args, **kwargs)
def message_unsubscribe(self, *args, **kwargs):
"""Send the unsubscribe action on stock.picking model to match with subscribe"""
return self.pool.get('stock.picking').message_unsubscribe(*args, **kwargs)
def default_get(self, cr, uid, fields_list, context=None):
# merge defaults from stock.picking with possible defaults defined on stock.picking.in
defaults = self.pool['stock.picking'].default_get(cr, uid, fields_list, context=context)
in_defaults = super(stock_picking_in, self).default_get(cr, uid, fields_list, context=context)
defaults.update(in_defaults)
return defaults
_columns = {
'backorder_id': fields.many2one('stock.picking.in', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'state': fields.selection(
[('draft', 'Draft'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Receive'),
('done', 'Received'),
('cancel', 'Cancelled'),],
'Status', readonly=True, select=True,
help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Receive: products reserved, simply waiting for confirmation.\n
* Received: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""),
}
_defaults = {
'type': 'in',
}
class stock_picking_out(osv.osv):
_name = "stock.picking.out"
_inherit = "stock.picking"
_table = "stock_picking"
_description = "Delivery Orders"
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
return self.pool.get('stock.picking').search(cr, user, args, offset, limit, order, context, count)
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
return self.pool.get('stock.picking').read(cr, uid, ids, fields=fields, context=context, load=load)
def check_access_rights(self, cr, uid, operation, raise_exception=True):
#override in order to redirect the check of acces rights on the stock.picking object
return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
#override in order to redirect the check of acces rules on the stock.picking object
return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
#override in order to trigger the workflow of stock.picking at the end of create, write and unlink operation
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_trigger(cr, uid, ids, trigger, context=context)
def _workflow_signal(self, cr, uid, ids, signal, context=None):
#override in order to fire the workflow signal on given stock.picking workflow instance
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_signal(cr, uid, ids, signal, context=context)
def message_post(self, *args, **kwargs):
"""Post the message on stock.picking to be able to see it in the form view when using the chatter"""
return self.pool.get('stock.picking').message_post(*args, **kwargs)
def message_subscribe(self, *args, **kwargs):
"""Send the subscribe action on stock.picking model as it uses _name in request"""
return self.pool.get('stock.picking').message_subscribe(*args, **kwargs)
def message_unsubscribe(self, *args, **kwargs):
"""Send the unsubscribe action on stock.picking model to match with subscribe"""
return self.pool.get('stock.picking').message_unsubscribe(*args, **kwargs)
def default_get(self, cr, uid, fields_list, context=None):
# merge defaults from stock.picking with possible defaults defined on stock.picking.out
defaults = self.pool['stock.picking'].default_get(cr, uid, fields_list, context=context)
out_defaults = super(stock_picking_out, self).default_get(cr, uid, fields_list, context=context)
defaults.update(out_defaults)
return defaults
_columns = {
'backorder_id': fields.many2one('stock.picking.out', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'state': fields.selection(
[('draft', 'Draft'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Deliver'),
('done', 'Delivered'),
('cancel', 'Cancelled'),],
'Status', readonly=True, select=True,
help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Deliver: products reserved, simply waiting for confirmation.\n
* Delivered: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""),
}
_defaults = {
'type': 'out',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| alanjw/GreenOpenERP-Win-X86 | openerp/addons/stock/stock.py | Python | agpl-3.0 | 163,768 |
/********
* This file is part of Ext.NET.
*
* Ext.NET is free software: you can redistribute it and/or modify
* it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Ext.NET is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU AFFERO GENERAL PUBLIC LICENSE for more details.
*
* You should have received a copy of the GNU AFFERO GENERAL PUBLIC LICENSE
* along with Ext.NET. If not, see <http://www.gnu.org/licenses/>.
*
*
* @version : 2.0.0.beta - Community Edition (AGPLv3 License)
* @author : Ext.NET, Inc. http://www.ext.net/
* @date : 2012-03-07
* @copyright : Copyright (c) 2007-2012, Ext.NET, Inc. (http://www.ext.net/). All rights reserved.
* @license : GNU AFFERO GENERAL PUBLIC LICENSE (AGPL) 3.0.
* See license.txt and http://www.ext.net/license/.
* See AGPL License at http://www.gnu.org/licenses/agpl-3.0.txt
********/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Web.UI;
using System.Web.UI.WebControls;
namespace Ext.Net
{
/// <summary>
///
/// </summary>
public partial class RadialAxis
{
/* Ctor
-----------------------------------------------------------------------------------------------*/
/// <summary>
///
/// </summary>
public RadialAxis(Config config)
{
this.Apply(config);
}
/* Implicit RadialAxis.Config Conversion to RadialAxis
-----------------------------------------------------------------------------------------------*/
/// <summary>
///
/// </summary>
public static implicit operator RadialAxis(RadialAxis.Config config)
{
return new RadialAxis(config);
}
/// <summary>
///
/// </summary>
new public partial class Config : AbstractAxis.Config
{
/* Implicit RadialAxis.Config Conversion to RadialAxis.Builder
-----------------------------------------------------------------------------------------------*/
/// <summary>
///
/// </summary>
public static implicit operator RadialAxis.Builder(RadialAxis.Config config)
{
return new RadialAxis.Builder(config);
}
/* ConfigOptions
-----------------------------------------------------------------------------------------------*/
private int steps = 0;
/// <summary>
///
/// </summary>
[DefaultValue(0)]
public virtual int Steps
{
get
{
return this.steps;
}
set
{
this.steps = value;
}
}
private int maximum = 0;
/// <summary>
///
/// </summary>
[DefaultValue(0)]
public virtual int Maximum
{
get
{
return this.maximum;
}
set
{
this.maximum = value;
}
}
}
}
} | codeyu/Ext.NET.Community | Ext.Net/Factory/Config/RadialAxisConfig.cs | C# | agpl-3.0 | 3,283 |
package uploads
import (
"context"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"sync"
"time"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/grafana/loki/pkg/storage/chunk"
"github.com/grafana/loki/pkg/storage/chunk/local"
chunk_util "github.com/grafana/loki/pkg/storage/chunk/util"
"github.com/grafana/loki/pkg/storage/stores/shipper/util"
util_log "github.com/grafana/loki/pkg/util/log"
)
type Config struct {
Uploader string
IndexDir string
UploadInterval time.Duration
DBRetainPeriod time.Duration
MakePerTenantBuckets bool
}
type TableManager struct {
cfg Config
boltIndexClient BoltDBIndexClient
storageClient StorageClient
metrics *metrics
tables map[string]*Table
tablesMtx sync.RWMutex
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
}
func NewTableManager(cfg Config, boltIndexClient BoltDBIndexClient, storageClient StorageClient, registerer prometheus.Registerer) (*TableManager, error) {
ctx, cancel := context.WithCancel(context.Background())
tm := TableManager{
cfg: cfg,
boltIndexClient: boltIndexClient,
storageClient: storageClient,
metrics: newMetrics(registerer),
ctx: ctx,
cancel: cancel,
}
tables, err := tm.loadTables()
if err != nil {
return nil, err
}
tm.tables = tables
go tm.loop()
return &tm, nil
}
func (tm *TableManager) loop() {
tm.wg.Add(1)
defer tm.wg.Done()
tm.uploadTables(context.Background(), false)
syncTicker := time.NewTicker(tm.cfg.UploadInterval)
defer syncTicker.Stop()
for {
select {
case <-syncTicker.C:
tm.uploadTables(context.Background(), false)
case <-tm.ctx.Done():
return
}
}
}
func (tm *TableManager) Stop() {
level.Info(util_log.Logger).Log("msg", "stopping table manager")
tm.cancel()
tm.wg.Wait()
tm.uploadTables(context.Background(), true)
}
func (tm *TableManager) QueryPages(ctx context.Context, queries []chunk.IndexQuery, callback chunk.QueryPagesCallback) error {
queriesByTable := util.QueriesByTable(queries)
for tableName, queries := range queriesByTable {
err := tm.query(ctx, tableName, queries, callback)
if err != nil {
return err
}
}
return nil
}
func (tm *TableManager) query(ctx context.Context, tableName string, queries []chunk.IndexQuery, callback chunk.QueryPagesCallback) error {
tm.tablesMtx.RLock()
defer tm.tablesMtx.RUnlock()
table, ok := tm.tables[tableName]
if !ok {
return nil
}
return util.DoParallelQueries(ctx, table, queries, callback)
}
func (tm *TableManager) BatchWrite(ctx context.Context, batch chunk.WriteBatch) error {
boltWriteBatch, ok := batch.(*local.BoltWriteBatch)
if !ok {
return errors.New("invalid write batch")
}
for tableName, tableWrites := range boltWriteBatch.Writes {
table, err := tm.getOrCreateTable(tableName)
if err != nil {
return err
}
err = table.Write(ctx, tableWrites)
if err != nil {
return err
}
}
return nil
}
func (tm *TableManager) getOrCreateTable(tableName string) (*Table, error) {
tm.tablesMtx.RLock()
table, ok := tm.tables[tableName]
tm.tablesMtx.RUnlock()
if !ok {
tm.tablesMtx.Lock()
defer tm.tablesMtx.Unlock()
table, ok = tm.tables[tableName]
if !ok {
var err error
table, err = NewTable(filepath.Join(tm.cfg.IndexDir, tableName), tm.cfg.Uploader, tm.storageClient,
tm.boltIndexClient, tm.cfg.MakePerTenantBuckets)
if err != nil {
return nil, err
}
tm.tables[tableName] = table
}
}
return table, nil
}
func (tm *TableManager) uploadTables(ctx context.Context, force bool) {
tm.tablesMtx.RLock()
defer tm.tablesMtx.RUnlock()
level.Info(util_log.Logger).Log("msg", "uploading tables")
status := statusSuccess
for _, table := range tm.tables {
err := table.Snapshot()
if err != nil {
// we do not want to stop uploading of dbs due to failures in snapshotting them so logging just the error here.
level.Error(util_log.Logger).Log("msg", "failed to snapshot table for reads", "table", table.name, "err", err)
}
err = table.Upload(ctx, force)
if err != nil {
// continue uploading other tables while skipping cleanup for a failed one.
status = statusFailure
level.Error(util_log.Logger).Log("msg", "failed to upload dbs", "table", table.name, "err", err)
continue
}
// cleanup unwanted dbs from the table
err = table.Cleanup(tm.cfg.DBRetainPeriod)
if err != nil {
// we do not want to stop uploading of dbs due to failures in cleaning them up so logging just the error here.
level.Error(util_log.Logger).Log("msg", "failed to cleanup uploaded dbs past their retention period", "table", table.name, "err", err)
}
}
tm.metrics.tablesUploadOperationTotal.WithLabelValues(status).Inc()
}
func (tm *TableManager) loadTables() (map[string]*Table, error) {
localTables := make(map[string]*Table)
filesInfo, err := ioutil.ReadDir(tm.cfg.IndexDir)
if err != nil {
return nil, err
}
// regex matching table name patters, i.e prefix+period_number
re, err := regexp.Compile(`.+[0-9]+$`)
if err != nil {
return nil, err
}
for _, fileInfo := range filesInfo {
if !re.MatchString(fileInfo.Name()) {
continue
}
// since we are moving to keeping files for same table in a folder, if current element is a file we need to move it inside a directory with the same name
// i.e file index_123 would be moved to path index_123/index_123.
if !fileInfo.IsDir() {
level.Info(util_log.Logger).Log("msg", fmt.Sprintf("found a legacy file %s, moving it to folder with same name", fileInfo.Name()))
filePath := filepath.Join(tm.cfg.IndexDir, fileInfo.Name())
// create a folder with .temp suffix since we can't create a directory with same name as file.
tempDirPath := filePath + ".temp"
if err := chunk_util.EnsureDirectory(tempDirPath); err != nil {
return nil, err
}
// move the file to temp dir.
if err := os.Rename(filePath, filepath.Join(tempDirPath, fileInfo.Name())); err != nil {
return nil, err
}
// rename the directory to name of the file
if err := os.Rename(tempDirPath, filePath); err != nil {
return nil, err
}
}
level.Info(util_log.Logger).Log("msg", fmt.Sprintf("loading table %s", fileInfo.Name()))
table, err := LoadTable(filepath.Join(tm.cfg.IndexDir, fileInfo.Name()), tm.cfg.Uploader, tm.storageClient,
tm.boltIndexClient, tm.cfg.MakePerTenantBuckets, tm.metrics)
if err != nil {
return nil, err
}
if table == nil {
// if table is nil it means it has no files in it so remove the folder for that table.
err := os.Remove(filepath.Join(tm.cfg.IndexDir, fileInfo.Name()))
if err != nil {
level.Error(util_log.Logger).Log("msg", "failed to remove empty table folder", "table", fileInfo.Name(), "err", err)
}
continue
}
// Queries are only done against table snapshots so it's important we snapshot as soon as the table is loaded.
err = table.Snapshot()
if err != nil {
return nil, err
}
localTables[fileInfo.Name()] = table
}
return localTables, nil
}
| grafana/loki | pkg/storage/stores/shipper/uploads/table_manager.go | GO | agpl-3.0 | 7,136 |
/*
* SessionShiny.cpp
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
#include "SessionShiny.hpp"
#include <boost/algorithm/string/predicate.hpp>
#include <core/Error.hpp>
#include <core/Exec.hpp>
#include <r/RExec.hpp>
#include <session/SessionOptions.hpp>
#include <session/SessionModuleContext.hpp>
using namespace core;
namespace session {
namespace modules {
namespace shiny {
namespace {
void onPackageLoaded(const std::string& pkgname)
{
// we need an up to date version of shiny when running in server mode
// to get the websocket protocol/path and port randomizing changes
if (session::options().programMode() == kSessionProgramModeServer)
{
if (pkgname == "shiny")
{
if (!module_context::isPackageVersionInstalled("shiny", "0.8"))
{
module_context::consoleWriteError("\nWARNING: To run Shiny "
"applications with RStudio you need to install the "
"latest version of the Shiny package from CRAN (version 0.8 "
"or higher is required).\n\n");
}
}
}
}
bool isShinyAppDir(const FilePath& filePath)
{
bool hasServer = filePath.childPath("server.R").exists() ||
filePath.childPath("server.r").exists();
if (hasServer)
{
bool hasUI = filePath.childPath("ui.R").exists() ||
filePath.childPath("ui.r").exists() ||
filePath.childPath("www").exists();
return hasUI;
}
else
{
return false;
}
}
std::string onDetectShinySourceType(
boost::shared_ptr<source_database::SourceDocument> pDoc)
{
const char * const kShinyType = "shiny";
if (!pDoc->path().empty())
{
FilePath filePath = module_context::resolveAliasedPath(pDoc->path());
std::string filename = filePath.filename();
if (boost::algorithm::iequals(filename, "ui.r") &&
boost::algorithm::icontains(pDoc->contents(), "shinyUI"))
{
return kShinyType;
}
else if (boost::algorithm::iequals(filename, "server.r") &&
boost::algorithm::icontains(pDoc->contents(), "shinyServer"))
{
return kShinyType;
}
else if (filePath.extensionLowerCase() == ".r" &&
isShinyAppDir(filePath.parent()))
{
return kShinyType;
}
}
return std::string();
}
Error getShinyCapabilities(const json::JsonRpcRequest& request,
json::JsonRpcResponse* pResponse)
{
json::Object capsJson;
capsJson["installed"] = module_context::isPackageInstalled("shiny");
pResponse->setResult(capsJson);
return Success();
}
} // anonymous namespace
Error initialize()
{
using namespace module_context;
events().onPackageLoaded.connect(onPackageLoaded);
// run app features require shiny v0.8 (the version where the
// shiny.launch.browser option can be a function)
if (module_context::isPackageVersionInstalled("shiny", "0.8"))
events().onDetectSourceExtendedType.connect(onDetectShinySourceType);
ExecBlock initBlock;
initBlock.addFunctions()
(boost::bind(registerRpcMethod, "get_shiny_capabilities", getShinyCapabilities));
return initBlock.execute();
}
} // namespace crypto
} // namespace modules
} // namesapce session
| nvoron23/rstudio | src/cpp/session/modules/shiny/SessionShiny.cpp | C++ | agpl-3.0 | 3,808 |
from django import forms
# future use | DemocracyFoundation/Epitome | Agora/forms.py | Python | agpl-3.0 | 40 |
/**
* Copyright (C) 2009-2014 BIMserver.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package cn.dlb.bim.models.ifc4;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.eclipse.emf.common.util.Enumerator;
/**
* <!-- begin-user-doc -->
* A representation of the literals of the enumeration '<em><b>Ifc Sub Contract Resource Type Enum</b></em>',
* and utility methods for working with them.
* <!-- end-user-doc -->
* @see cn.dlb.bim.models.ifc4.Ifc4Package#getIfcSubContractResourceTypeEnum()
* @model
* @generated
*/
public enum IfcSubContractResourceTypeEnum implements Enumerator {
/**
* The '<em><b>NULL</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #NULL_VALUE
* @generated
* @ordered
*/
NULL(0, "NULL", "NULL"),
/**
* The '<em><b>NOTDEFINED</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #NOTDEFINED_VALUE
* @generated
* @ordered
*/
NOTDEFINED(1, "NOTDEFINED", "NOTDEFINED"),
/**
* The '<em><b>WORK</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #WORK_VALUE
* @generated
* @ordered
*/
WORK(2, "WORK", "WORK"),
/**
* The '<em><b>USERDEFINED</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #USERDEFINED_VALUE
* @generated
* @ordered
*/
USERDEFINED(3, "USERDEFINED", "USERDEFINED"),
/**
* The '<em><b>PURCHASE</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #PURCHASE_VALUE
* @generated
* @ordered
*/
PURCHASE(4, "PURCHASE", "PURCHASE");
/**
* The '<em><b>NULL</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>NULL</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #NULL
* @model
* @generated
* @ordered
*/
public static final int NULL_VALUE = 0;
/**
* The '<em><b>NOTDEFINED</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>NOTDEFINED</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #NOTDEFINED
* @model
* @generated
* @ordered
*/
public static final int NOTDEFINED_VALUE = 1;
/**
* The '<em><b>WORK</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>WORK</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #WORK
* @model
* @generated
* @ordered
*/
public static final int WORK_VALUE = 2;
/**
* The '<em><b>USERDEFINED</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>USERDEFINED</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #USERDEFINED
* @model
* @generated
* @ordered
*/
public static final int USERDEFINED_VALUE = 3;
/**
* The '<em><b>PURCHASE</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>PURCHASE</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #PURCHASE
* @model
* @generated
* @ordered
*/
public static final int PURCHASE_VALUE = 4;
/**
* An array of all the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static final IfcSubContractResourceTypeEnum[] VALUES_ARRAY = new IfcSubContractResourceTypeEnum[] { NULL, NOTDEFINED, WORK, USERDEFINED, PURCHASE, };
/**
* A public read-only list of all the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static final List<IfcSubContractResourceTypeEnum> VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY));
/**
* Returns the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' literal with the specified literal value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param literal the literal.
* @return the matching enumerator or <code>null</code>.
* @generated
*/
public static IfcSubContractResourceTypeEnum get(String literal) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
IfcSubContractResourceTypeEnum result = VALUES_ARRAY[i];
if (result.toString().equals(literal)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' literal with the specified name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param name the name.
* @return the matching enumerator or <code>null</code>.
* @generated
*/
public static IfcSubContractResourceTypeEnum getByName(String name) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
IfcSubContractResourceTypeEnum result = VALUES_ARRAY[i];
if (result.getName().equals(name)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' literal with the specified integer value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the integer value.
* @return the matching enumerator or <code>null</code>.
* @generated
*/
public static IfcSubContractResourceTypeEnum get(int value) {
switch (value) {
case NULL_VALUE:
return NULL;
case NOTDEFINED_VALUE:
return NOTDEFINED;
case WORK_VALUE:
return WORK;
case USERDEFINED_VALUE:
return USERDEFINED;
case PURCHASE_VALUE:
return PURCHASE;
}
return null;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final int value;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String name;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String literal;
/**
* Only this class can construct instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private IfcSubContractResourceTypeEnum(int value, String name, String literal) {
this.value = value;
this.name = name;
this.literal = literal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getValue() {
return value;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getLiteral() {
return literal;
}
/**
* Returns the literal value of the enumerator, which is its string representation.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
return literal;
}
} //IfcSubContractResourceTypeEnum
| shenan4321/BIMplatform | generated/cn/dlb/bim/models/ifc4/IfcSubContractResourceTypeEnum.java | Java | agpl-3.0 | 7,689 |
# frozen_string_literal: true
require 'rails_helper'
describe Api::LessonsController, type: :request do
include RequestSpecUserSetup
context 'is not logged in' do
it 'can not create' do
post '/api/lessons', {}
expect(response.status).to eq 401
end
context 'data exists' do
let(:lesson) { create(:lesson, host: organization) }
it 'can read all' do
get "/api/lessons?organization_id=#{organization.id}"
expect(response.status).to eq 200
end
it 'can read' do
get "/api/lessons/#{lesson.id}"
expect(response.status).to eq 200
end
it 'can not delete' do
delete "/api/lessons/#{lesson.id}"
expect(response.status).to eq 401
end
it 'can not update' do
put "/api/lessons/#{lesson.id}", {}
expect(response.status).to eq 401
end
end
end
context 'is logged in' do
# for each permission set (owner, collaborator, admin)
context 'for each permission set' do
# users = [owner, collaborator, admin]
context 'is owner' do
before(:each) do
set_login_header_as.call(owner)
end
context 'creating' do
it 'can create' do
create_params = jsonapi_params('lessons',
attributes: { name: 'hi', price: '2' },
relationships: { host: organization })
post '/api/lessons', create_params, @headers
expect(response.status).to eq 201
end
it 'creates a lesson' do
create_params = jsonapi_params('lessons',
attributes: { name: 'hi', price: '2' },
relationships: { host: organization })
expect do
post '/api/lessons', create_params, @headers
end.to change(LineItem::Lesson, :count).by 1
end
end
context 'on existing' do
let!(:lesson) { create(:lesson, host: organization) }
it 'can update' do
put "/api/lessons/#{lesson.id}",
jsonapi_params('lessons', id: lesson.id, attributes: { name: 'hi' }),
@headers
expect(response.status).to eq 200
end
it 'can destroy' do
delete "/api/lessons/#{lesson.id}", {}, @headers
expect(response.status).to eq 200
end
it 'destroys' do
expect do
delete "/api/lessons/#{lesson.id}", {}, @headers
end.to change(LineItem::Lesson, :count).by(-1)
end
it 'can read all' do
get "/api/lessons?organization_id=#{organization.id}", {}, @headers
expect(response.status).to eq 200
end
end
end
end
context 'is non collaborator' do
before(:each) do
set_login_header_as.call(create_confirmed_user)
end
it 'can not create' do
create_params = {
data: {
type: 'lessons',
attributes: {
name: 'Yoga',
price: '10',
host_type: Organization.name,
host_id: organization.id
}
}
}
post '/api/lessons', create_params, @headers
expect(response.status).to eq 403
end
context 'data exists' do
let(:lesson) { create(:lesson, host: organization) }
let(:fake_json_api) do
{
data: {
type: 'lessons',
id: lesson.id,
attributes: {}
}
}
end
it 'can read all' do
get "/api/lessons?organization_id=#{organization.id}", {}, @headers
expect(response.status).to eq 200
end
it 'can read' do
get "/api/lessons/#{lesson.id}", {}, @headers
expect(response.status).to eq 200
end
it 'can not delete' do
delete "/api/lessons/#{lesson.id}", {}, @headers
expect(response.status).to eq 403
end
it 'can not update' do
put "/api/lessons/#{lesson.id}", fake_json_api, @headers
expect(response.status).to eq 403
end
end
end
end
end
| NullVoxPopuli/aeonvera | app/resources/api/lessons/request_spec.rb | Ruby | agpl-3.0 | 4,322 |
<?php
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
global $mod_strings;
$module_menu = Array(
Array("index.php?module=Teams&action=EditView&return_module=Teams&return_action=DetailView", $mod_strings['LNK_NEW_TEAM'], "CreateTeams"),
Array("index.php?module=Teams&action=index", $mod_strings['LNK_LIST_TEAM'], "Teams"),
Array("index.php?module=TeamNotices&action=index", $mod_strings['LNK_LIST_TEAMNOTICE'], "Teams"),
Array("index.php?module=TeamNotices&action=EditView", translate('LNK_NEW_TEAM_NOTICE','TeamNotices'), "Teams")
);
| harish-patel/ecrm | modules/Teams/Menu.php | PHP | agpl-3.0 | 565 |
///*
// * Tanaguru - Automated webpage assessment
// * Copyright (C) 2008-2017 Tanaguru.org
// *
// * This program is free software: you can redistribute it and/or modify
// * it under the terms of the GNU Affero General Public License as
// * published by the Free Software Foundation, either version 3 of the
// * License, or (at your option) any later version.
// *
// * This program is distributed in the hope that it will be useful,
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// * GNU Affero General Public License for more details.
// *
// * You should have received a copy of the GNU Affero General Public License
// * along with this program. If not, see <http://www.gnu.org/licenses/>.
// *
// * Contact us by mail: tanaguru AT tanaguru DOT org
// */
//package org.tanaguru.rules.rgaa32017;
//
//import org.apache.commons.lang3.tuple.ImmutablePair;
//import org.tanaguru.entity.audit.ProcessResult;
//import org.tanaguru.entity.audit.TestSolution;
//import org.tanaguru.rules.keystore.HtmlElementStore;
//import org.tanaguru.rules.keystore.RemarkMessageStore;
//import org.tanaguru.rules.rgaa32017.test.Rgaa32017RuleImplementationTestCase;
//
///**
// * Unit test class for the implementation of the rule 10-9-1 of the referential Rgaa 3-2017.
// *
// * @author
// */
//public class Rgaa32017Rule100901Test extends Rgaa32017RuleImplementationTestCase {
//
// /**
// * Default constructor
// * @param testName
// */
// public Rgaa32017Rule100901Test (String testName){
// super(testName);
// }
//
// @Override
// protected void setUpRuleImplementationClassName() {
// setRuleImplementationClassName(
// "org.tanaguru.rules.rgaa32017.Rgaa32017Rule100901");
// }
//
// @Override
// protected void setUpWebResourceMap() {
// addWebResource("Rgaa32017.Test.10.9.1-1Passed-01");
// addWebResource("Rgaa32017.Test.10.9.1-2Failed-01");
// addWebResource("Rgaa32017.Test.10.9.1-2Failed-02");
// // addWebResource("Rgaa32017.Test.10.9.1-3NMI-01");
//// addWebResource("Rgaa32017.Test.10.9.1-4NA-01");
// }
//
// @Override
// protected void setProcess() {
// //----------------------------------------------------------------------
// //------------------------------1Passed-01------------------------------
// //----------------------------------------------------------------------
// // checkResultIsPassed(processPageTest("Rgaa32017.Test.10.9.1-1Passed-01"), 0);
//
// //----------------------------------------------------------------------
// //------------------------------2Failed-01------------------------------
// //----------------------------------------------------------------------
// ProcessResult processResult = processPageTest("Rgaa32017.Test.10.9.1-2Failed-01");
// checkResultIsFailed(processResult, 1, 1);
//// checkRemarkIsPresent(
//// processResult,
//// TestSolution.FAILED,
//// CHECK_IF_USER_HAVE_MECHANISM_TO_DELETE_JUSTIFY_TEXT_ALIGN_MSG,
//// "h1",
//// 1,
//// new ImmutablePair("#ExtractedAttributeAsEvidence", "#ExtractedAttributeValue"));
// //----------------------------------------------------------------------
// //------------------------------2Failed-02------------------------------
// //----------------------------------------------------------------------
// processResult = processPageTest("Rgaa32017.Test.10.9.1-2Failed-02");
// checkResultIsFailed(processResult, 1, 1);
//// checkRemarkIsPresent(
//// processResult,
//// TestSolution.FAILED,
//// RemarkMessageStore.CHECK_IF_USER_HAVE_MECHANISM_TO_DELETE_JUSTIFY_TEXT_ALIGN_MSG,
//// HtmlElementStore.P_ELEMENT,
//// 1,
//// new ImmutablePair("#ExtractedAttributeAsEvidence", "#ExtractedAttributeValue"));
//
// //----------------------------------------------------------------------
// //------------------------------3NMI-01---------------------------------
// //----------------------------------------------------------------------
//// ProcessResult processResult = processPageTest("Rgaa32017.Test.10.9.1-3NMI-01");
//// checkResultIsNotTested(processResult); // temporary result to make the result buildable before implementation
//// checkResultIsPreQualified(processResult, 1, 1);
//// checkRemarkIsPresent(
//// processResult,
//// TestSolution.NEED_MORE_INFO,
//// CHECK_IF_USER_HAVE_MECHANISM_TO_DELETE_JUSTIFY_TEXT_ALIGN_MSG,
//// "p",
//// 1);
//
//
// //----------------------------------------------------------------------
// //------------------------------4NA-01------------------------------
// //----------------------------------------------------------------------
//// checkResultIsNotApplicable(processPageTest("Rgaa32017.Test.10.9.1-4NA-01"));
// }
//
// @Override
// protected void setConsolidate() {
//
// // The consolidate method can be removed when real implementation is done.
// // The assertions are automatically tested regarding the file names by
// // the abstract parent class
//// assertEquals(TestSolution.NOT_TESTED,
//// consolidate("Rgaa32017.Test.10.9.1-3NMI-01").getValue());
// }
//
//}
| Tanaguru/Tanaguru | rules/rgaa3-2017/src/test/java/org/tanaguru/rules/rgaa32017/Rgaa32017Rule100901Test.java | Java | agpl-3.0 | 5,642 |
import { NPS } from './NPS'
export default NPS
| botpress/botpress | packages/ui-admin/src/app/NetPromoterScore/NetPromotingScore/index.tsx | TypeScript | agpl-3.0 | 47 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Media
* @subpackage ID3
* @copyright Copyright (c) 2005-2009 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: Tpos.php 177 2010-03-09 13:13:34Z svollbehr $
*/
/**#@+ @ignore */
require_once DEDALO_ROOT . '/lib/Zend/Media/Id3/TextFrame.php';
/**#@-*/
/**
* The <i>Number of a set</i> frame is a numeric string that describes which part
* of a set the audio came from. This frame is used if the source described in
* the {@link Zend_Media_Id3_Frame_Talb TALB} frame is divided into several
* mediums, e.g. a double CD. The value may be extended with a '/' character and
* a numeric string containing the total number of parts in the set. E.g. '1/2'.
*
* @category Zend
* @package Zend_Media
* @subpackage ID3
* @author Sven Vollbehr <sven@vollbehr.eu>
* @copyright Copyright (c) 2005-2009 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: Tpos.php 177 2010-03-09 13:13:34Z svollbehr $
*/
final class Zend_Media_Id3_Frame_Tpos extends Zend_Media_Id3_TextFrame
{
private $_number;
private $_total;
/**
* Constructs the class with given parameters and parses object related
* data.
*
* @param Zend_Io_Reader $reader The reader object.
* @param Array $options The options array.
*/
public function __construct($reader = null, &$options = array())
{
Zend_Media_Id3_Frame::__construct($reader, $options);
$this->setEncoding(Zend_Media_Id3_Encoding::ISO88591);
if ($this->_reader === null) {
return;
}
$this->_reader->skip(1);
$this->setText($this->_reader->readString8($this->_reader->getSize()));
@list ($this->_number, $this->_total) = explode("/", $this->getText());
}
/**
* Returns the number.
*
* @return integer
*/
public function getNumber()
{
return intval($this->_number);
}
/**
* Sets the number.
*
* @param integer $number The number.
*/
public function setNumber($part)
{
$this->setText
($this->_number = strval($part) .
($this->_total ? '/' . $this->_total : ''),
Zend_Media_Id3_Encoding::ISO88591);
}
/**
* Returns the total number.
*
* @return integer
*/
public function getTotal()
{
return intval($this->_total);
}
/**
* Sets the total number.
*
* @param integer $total The total number.
*/
public function setTotal($total)
{
$this->setText
(($this->_number ? $this->_number : '?') . "/" .
($this->_total = strval($total)),
Zend_Media_Id3_Encoding::ISO88591);
}
}
| renderpci/dedalo-4 | lib/Zend/Media/Id3/Frame/Tpos.php | PHP | agpl-3.0 | 3,380 |
<?php
/* @var $this AenderungsantraegeController */
/* @var $model Aenderungsantrag */
$this->breadcrumbs = array(
Yii::t('app', 'Administration') => $this->createUrl('/admin/index'),
$model->label(2) => array('index'),
Yii::t('app', 'Create'),
);
$this->menu = array(
array('label' => $model->label(2), 'url' => array('index'), "icon" => "home"),
array('label' => "Durchsuchen", 'url' => array('admin'), "icon" => "th-list"),
);
?>
<h1><?php echo GxHtml::encode($model->label()) . ' ' . Yii::t('app', 'Create'); ?></h1>
<?php
$this->renderPartial('_form', array(
'model' => $model,
'buttons' => 'create'));
?>
| joriki/antragsgruen | protected/views/admin/aenderungsantraege/create.php | PHP | agpl-3.0 | 639 |
'use strict';
/**
* @ngdoc directive
* @name GO.Core.CustomFields.goCustomFieldsEdit
*
* @description
* Prints custom fields form fieldsets.
*
*
* @param {string} ngModel The customFields model property of the model the customFields belong to
* @param {string} serverModel The custom fields server model.
*
* @example
* <go-custom-fields-edit ng-model="contact.customFields" server-model="GO\Modules\GroupOffice\Contacts\Model\ContactCustomFields"></go-custom-fields-edit>
*/
angular.module('GO.Core').directive('goCustomFieldsEdit', [
'$templateCache',
'$compile',
'GO.Core.Directives.CustomFields',
function ($templateCache, $compile, CustomFields) {
var buildTemplate = function (customFieldSetStore) {
var tpl = '';
for (var i = 0, l = customFieldSetStore.items.length; i < l; i++) {
var fieldSet = customFieldSetStore.items[i];
tpl += '<fieldset><h3>{{::"' + fieldSet.name + '" | goT}}</h3>';
for (var n = 0, cl = fieldSet.fields.length; n < cl; n++) {
var field = fieldSet.fields[n];
tpl += buildFunctions[field.type](field);
}
tpl += '</fieldset>';
}
return tpl;
};
var buildFunctions = {
formName: null,
text: function (field) {
return '<md-input-container class="md-block">\
<md-icon>star</md-icon>\
<label>{{::"' + field.name + '" | goT}}</label>\
<input name="' + field.databaseName + '" type="text" maxlength="' + field.data.maxLength + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '" />\
<md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\
<div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\
<div ng-message="required">\
{{::"This field is required" | goT}}\
</div>\
</div>\
</md-input-container>';
},
textarea: function (field) {
return '<md-input-container class="md-block">\
<md-icon>star</md-icon>\
<label>{{::"' + field.name + '" | goT}}</label>\
<textarea id="' + field.databaseName + '" name="' + field.databaseName + '" maxlength="' + field.data.maxLength + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '"></textarea>\
<md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\
<div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\
<div ng-message="required">\
{{::"This field is required" | goT}}\
</div>\
</div>\
</md-input-container>';
},
select: function (field) {
var tpl = '<md-input-container class="md-block">\
<md-icon>star</md-icon>\
<label>{{::"' + field.name + '" | goT}}</label>\
<md-select name="' + field.databaseName + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '">';
for (var i = 0, l = field.data.options.length; i < l; i++) {
tpl += '<md-option value="' + field.data.options[i] + '">{{::"' + field.data.options[i] + '" | goT}}</md-option>';
}
tpl += '</md-select>\
<md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\
<div class="md-errors-spacer"></div>\
<div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\
<div ng-message="required">\
{{::"This field is required" | goT}}\
</div>\
</div>';
tpl += '</md-input-container>';
return tpl;
},
checkbox: function (field) {
return '<md-input-container class="md-block">\
<md-checkbox id="cf_{{field.id}}" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '"> {{::"' + field.name + '" | goT}}</md-checkbox>\
<md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\
</md-input-container>';
},
date: function (field) {
return '<go-date-picker id="cf_{{field.id}}" name="dateOfBirth" hint="{{::\''+field.hintText+'\' | goT }}" label="' + field.name + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '"></go-date-picker>';
},
number: function (field) {
return '<md-input-container class="md-block">\
<md-icon>star</md-icon>\
<label>{{::"' + field.name + '" | goT}}</label>\
<input go-number id="cf_{{field.id}}" name="' + field.databaseName + '" type="text" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '" />\
<md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\
<div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\
<div ng-message="required">\
{{::"This field is required" | goT}}\
</div>\
</div>\
</md-input-container>';
}
};
return {
restrict: 'E',
scope: {
goModel: '=ngModel',
serverModel: '@',
formController: '='
},
link: function (scope, element, attrs) {
var customFieldSetStore = CustomFields.getFieldSetStore(attrs.serverModel);
//TODO load is called twice now
customFieldSetStore.promise.then(function () {
var tpl = buildTemplate(customFieldSetStore);
element.html(tpl);
$compile(element.contents())(scope);
});
}
};
}]); | Intermesh/groupoffice-webclient | app/core/directives/custom-fields/custom-fields-edit-directive.js | JavaScript | agpl-3.0 | 5,358 |
/*
Copyright (C) 2017 Cloudbase Solutions SRL
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import React from 'react'
import { observer } from 'mobx-react'
import styled from 'styled-components'
import autobind from 'autobind-decorator'
import SearchInput from '../SearchInput'
import Palette from '../../styleUtils/Palette'
import filterImage from './images/filter'
const border = '1px solid rgba(216, 219, 226, 0.4)'
const Wrapper = styled.div<any>`
position: relative;
margin-top: -1px;
`
const Button = styled.div<any>`
width: 16px;
height: 16px;
cursor: pointer;
display: flex;
justify-content: center;
align-items: center;
`
const List = styled.div<any>`
position: absolute;
top: 24px;
right: -7px;
z-index: 9999;
padding: 8px;
background: ${Palette.grayscale[1]};
border-radius: 4px;
border: ${border};
box-shadow: 0 0 4px 0 rgba(32, 34, 52, 0.13);
`
const Tip = styled.div<any>`
position: absolute;
top: -6px;
right: 8px;
width: 10px;
height: 10px;
background: ${Palette.grayscale[1]};
border-top: ${border};
border-left: ${border};
border-bottom: 1px solid transparent;
border-right: 1px solid transparent;
transform: rotate(45deg);
`
const ListItems = styled.div<any>`
width: 199px;
height: 32px;
`
type Props = {
searchPlaceholder?: string,
searchValue?: string,
onSearchChange?: (value: string) => void,
}
type State = {
showDropdownList: boolean
}
@observer
class DropdownFilter extends React.Component<Props, State> {
static defaultProps = {
searchPlaceholder: 'Filter',
}
state: State = {
showDropdownList: false,
}
itemMouseDown: boolean | undefined
componentDidMount() {
window.addEventListener('mousedown', this.handlePageClick, false)
}
componentWillUnmount() {
window.removeEventListener('mousedown', this.handlePageClick, false)
}
@autobind
handlePageClick() {
if (!this.itemMouseDown) {
this.setState({ showDropdownList: false })
}
}
handleButtonClick() {
this.setState(prevState => ({ showDropdownList: !prevState.showDropdownList }))
}
handleCloseClick() {
this.setState({ showDropdownList: false })
}
renderList() {
if (!this.state.showDropdownList) {
return null
}
return (
<List
onMouseDown={() => { this.itemMouseDown = true }}
onMouseUp={() => { this.itemMouseDown = false }}
data-test-id="dropdownFilter-list"
>
<Tip />
<ListItems>
<SearchInput
width="100%"
alwaysOpen
placeholder={this.props.searchPlaceholder}
value={this.props.searchValue}
onChange={this.props.onSearchChange}
useFilterIcon
focusOnMount
disablePrimary
onCloseClick={() => { this.handleCloseClick() }}
/>
</ListItems>
</List>
)
}
renderButton() {
return (
<Button
data-test-id="dropdownFilter-button"
onMouseDown={() => { this.itemMouseDown = true }}
onMouseUp={() => { this.itemMouseDown = false }}
onClick={() => { this.handleButtonClick() }}
dangerouslySetInnerHTML={{
__html:
filterImage(this.props.searchValue ? Palette.primary : Palette.grayscale[5]),
}}
/>
)
}
render() {
return (
<Wrapper>
{this.renderButton()}
{this.renderList()}
</Wrapper>
)
}
}
export default DropdownFilter
| aznashwan/coriolis-web | src/components/molecules/DropdownFilter/DropdownFilter.tsx | TypeScript | agpl-3.0 | 4,086 |
"""
Block Depth Transformer
"""
from __future__ import absolute_import
from openedx.core.djangoapps.content.block_structure.transformer import BlockStructureTransformer
class BlockDepthTransformer(BlockStructureTransformer):
"""
Keep track of the depth of each block within the block structure. In case
of multiple paths to a given node (in a DAG), use the shallowest depth.
"""
WRITE_VERSION = 1
READ_VERSION = 1
BLOCK_DEPTH = 'block_depth'
def __init__(self, requested_depth=None):
self.requested_depth = requested_depth
@classmethod
def name(cls):
return "blocks_api:block_depth"
@classmethod
def get_block_depth(cls, block_structure, block_key):
"""
Return the precalculated depth of a block within the block_structure:
Arguments:
block_structure: a BlockStructure instance
block_key: the key of the block whose depth we want to know
Returns:
int
"""
return block_structure.get_transformer_block_field(
block_key,
cls,
cls.BLOCK_DEPTH,
)
def transform(self, usage_info, block_structure):
"""
Mutates block_structure based on the given usage_info.
"""
for block_key in block_structure.topological_traversal():
parents = block_structure.get_parents(block_key)
if parents:
block_depth = min(
self.get_block_depth(block_structure, parent_key)
for parent_key in parents
) + 1
else:
block_depth = 0
block_structure.set_transformer_block_field(
block_key,
self,
self.BLOCK_DEPTH,
block_depth
)
if self.requested_depth is not None:
block_structure.remove_block_traversal(
lambda block_key: self.get_block_depth(block_structure, block_key) > self.requested_depth
)
| ESOedX/edx-platform | lms/djangoapps/course_api/blocks/transformers/block_depth.py | Python | agpl-3.0 | 2,059 |
#!/usr/bin/env ruby
# Exit codes:
# 0 Test run successful (even with reruns)
# 1 Unspecified error
# 2 Linting failed
# 4 No profile given
# 8 Gettext isn't installed
# 16 Gettext files did not validate
# 32 Cucumber failed
# 64 Rspec failed
# TODO: Use Open4 to continuously flush STDOUT output from the cucumber
# processes.
require 'rubygems'
require 'fileutils'
require 'pry'
require 'open4'
PROFILES = ['default']
def die(exit_code, error)
puts "Error: #{error}"
exit exit_code
end
def run_command(cmd)
pid, stdin, stdout, _stderr = Open4.open4("#{cmd} 2>&1")
stdin.close
puts stdout.read 1024 until stdout.eof?
Process.waitpid2(pid).last.exitstatus
end
def gettext_installed?
`which msgcat >> /dev/null`
if $CHILD_STATUS.exitstatus == 0
return true
else
return false
end
end
def gettext_file_valid?(file)
`msgcat #{file} >> /dev/null`
if $CHILD_STATUS.exitstatus == 0
return true
else
return false
end
end
def gettext_files_valid?
files = ['locale/vinylla.pot']
files += Dir.glob('locale/**/leihs.po')
files.each do |file|
return false unless gettext_file_valid?(file)
end
true
end
def rerun_cucumber(maximum = 3, run_count = 0)
return true if run_count >= maximum
if File.exist?('tmp/rererun.txt')
FileUtils.mv('tmp/rererun.txt', 'tmp/rerun.txt')
end
return false unless File.exist?('tmp/rerun.txt') && File.size('tmp/rerun.txt') > 0
puts 'Rerun necessary.'
exitstatus = run_command('bundle exec cucumber -p rerun')
run_count += 1
if exitstatus != 0
rerun_cucumber(maximum, run_count)
else
die(0, 'All went well after rerunning.')
end
end
# Do we know what we're doing?
profile = ARGV[0]
if PROFILES.include?(profile) == false
die(4, "Please specify a valid profile, one of #{PROFILES.join(', ')}.")
end
# 1. Prerequisites for testing
# We're not actually using gettext yet in Vinylla and it's undecided
# whether we will.
# if not gettext_installed?
# die(8, "Gettext isn't installed. Make sure you have gettext and \
# msgcat and msgmerge are in your PATH.")
# end
# if not gettext_files_valid?
# die(16, 'The gettext files did not validate.')
# end
# 2. Linting
exitstatus = run_command('bundle exec rubocop --lint')
die(2, 'Rubocop is disgusted. Clean up that filthy code!') if exitstatus != 0
# 3. Testing proper
exitstatus = run_command('bundle exec rspec')
die(64, 'Rspec failed') if exitstatus != 0
# puts 'Prerequisites for running the tests are met'
# puts 'Starting Cucumber...'
# FileUtils.rm_f(['tmp/rerun.txt', 'tmp/rererun.txt'])
# exitstatus = run_command("bundle exec cucumber -p #{profile}")
# Rerun for failures, up to n times
# if exitstatus != 0
# rerun(4)
# else
# die(0, 'All went well on the very first run. The planets must be in alignment.')
# end
| psy-q/vinylla | bin/run_tests.rb | Ruby | agpl-3.0 | 2,817 |
# Copyright 2018 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import ast
from odoo import api, exceptions, models, _
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
@api.model
def _get_priorities(self):
"""
Load priorities from parameters.
:return: dict
"""
key = 'mail.sending.job.priorities'
try:
priorities = ast.literal_eval(
self.env['ir.config_parameter'].sudo().get_param(
key, default='{}'))
# Catch exception to have a understandable error message
except (ValueError, SyntaxError):
raise exceptions.UserError(
_("Error to load the system parameter (%s) "
"of priorities") % key)
# As literal_eval can transform str into any format, check if we
# have a real dict
if not isinstance(priorities, dict):
raise exceptions.UserError(
_("Error to load the system parameter (%s) of priorities.\n"
"Invalid dictionary") % key)
return priorities
@api.multi
def send_mail(self, auto_commit=False):
"""
Set a priority on subsequent generated mail.mail, using priorities
set into the configuration.
:return: dict/action
"""
active_ids = self.env.context.get('active_ids')
default_priority = self.env.context.get('default_mail_job_priority')
if active_ids and not default_priority:
priorities = self._get_priorities()
size = len(active_ids)
limits = [lim for lim in priorities if lim <= size]
if limits:
prio = priorities.get(max(limits))
self = self.with_context(default_mail_job_priority=prio)
return super().send_mail(auto_commit=auto_commit)
| mozaik-association/mozaik | mail_job_priority/wizards/mail_compose_message.py | Python | agpl-3.0 | 1,920 |
<?php
/**
* @package Billing
* @copyright Copyright (C) 2012-2016 BillRun Technologies Ltd. All rights reserved.
* @license GNU Affero General Public License Version 3; see LICENSE.txt
*/
/**
* This is a prototype for a services action.
*
*/
abstract class Billrun_ActionManagers_Services_Action implements Billrun_ActionManagers_IAPIAction {
use Billrun_ActionManagers_ErrorReporter;
protected $collection = null;
/**
* Create an instance of the ServiceAction type.
*/
public function __construct($params = array()) {
$this->collection = Billrun_Factory::db()->servicesCollection();
$this->baseCode = 1500;
}
/**
* Get the array of fields to be set in the query record from the user input.
* @return array - Array of fields to set.
*/
protected function getQueryFields() {
return Billrun_Factory::config()->getConfigValue('services.fields');
}
}
| BillRun/system | library/Billrun/ActionManagers/Services/Action.php | PHP | agpl-3.0 | 905 |
/*
* _ _ _
* | | | | | |
* | | __ _| |__ ___ ___ __ _| |_ Labcoat (R)
* | |/ _` | '_ \ / __/ _ \ / _` | __| Powerful development environment for Quirrel.
* | | (_| | |_) | (_| (_) | (_| | |_ Copyright (C) 2010 - 2013 SlamData, Inc.
* |_|\__,_|_.__/ \___\___/ \__,_|\__| All Rights Reserved.
*
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
define(function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var Tokenizer = require("../tokenizer").Tokenizer;
var CSharpHighlightRules = require("./csharp_highlight_rules").CSharpHighlightRules;
var MatchingBraceOutdent = require("./matching_brace_outdent").MatchingBraceOutdent;
var CstyleBehaviour = require("./behaviour/cstyle").CstyleBehaviour;
var CStyleFoldMode = require("./folding/cstyle").FoldMode;
var Mode = function() {
this.$tokenizer = new Tokenizer(new CSharpHighlightRules().getRules());
this.$outdent = new MatchingBraceOutdent();
this.$behaviour = new CstyleBehaviour();
this.foldingRules = new CStyleFoldMode();
};
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = "//";
this.blockComment = {start: "/*", end: "*/"};
this.getNextLineIndent = function(state, line, tab) {
var indent = this.$getIndent(line);
var tokenizedLine = this.$tokenizer.getLineTokens(line, state);
var tokens = tokenizedLine.tokens;
if (tokens.length && tokens[tokens.length-1].type == "comment") {
return indent;
}
if (state == "start") {
var match = line.match(/^.*[\{\(\[]\s*$/);
if (match) {
indent += tab;
}
}
return indent;
};
this.checkOutdent = function(state, line, input) {
return this.$outdent.checkOutdent(line, input);
};
this.autoOutdent = function(state, doc, row) {
this.$outdent.autoOutdent(doc, row);
};
this.createWorker = function(session) {
return null;
};
}).call(Mode.prototype);
exports.Mode = Mode;
});
| precog/labcoat-legacy | js/ace/mode/csharp.js | JavaScript | agpl-3.0 | 2,838 |
/*
* Copyright 2013 Anton Tananaev (anton.tananaev@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pl.datamatica.traccar.model;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.gwt.core.shared.GwtIncompatible;
import com.google.gwt.user.client.rpc.*;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.persistence.*;
import org.hibernate.annotations.Filter;
import org.hibernate.annotations.FilterDef;
import org.hibernate.annotations.SQLDelete;
import com.google.gwt.user.datepicker.client.CalendarUtil;
import java.util.HashSet;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
@Entity
@Table(name = "devices",
indexes = { @Index(name = "devices_pkey", columnList = "id") },
uniqueConstraints = { @UniqueConstraint(name = "devices_ukey_uniqueid", columnNames = "uniqueid") })
@SQLDelete(sql="UPDATE devices d SET d.deleted = 1 WHERE d.id = ?")
@FilterDef(name="softDelete", defaultCondition="deleted = 0")
@Filter(name="softDelete")
public class Device extends TimestampedEntity implements IsSerializable, GroupedDevice {
private static final long serialVersionUID = 1;
public static final short DEFAULT_TIMEOUT = 5 * 60;
public static final short DEFAULT_MIN_IDLE_TIME = 1 * 60;
public static final String DEFAULT_MOVING_ARROW_COLOR = "00017A";
public static final String DEFAULT_PAUSED_ARROW_COLOR = "B12222";
public static final String DEFAULT_STOPPED_ARROW_COLOR = "016400";
public static final String DEFAULT_OFFLINE_ARROW_COLOR = "778899";
public static final String DEFAULT_COLOR = "0000FF";
public static final double DEFAULT_ARROW_RADIUS = 5;
public static final int NEAR_EXPIRATION_THRESHOLD_DAYS = 7;
public Device() {
iconType = DeviceIconType.DEFAULT;
iconMode = DeviceIconMode.ICON;
iconArrowMovingColor = DEFAULT_MOVING_ARROW_COLOR;
iconArrowPausedColor = DEFAULT_PAUSED_ARROW_COLOR;
iconArrowStoppedColor = DEFAULT_STOPPED_ARROW_COLOR;
iconArrowOfflineColor = DEFAULT_OFFLINE_ARROW_COLOR;
iconArrowRadius = DEFAULT_ARROW_RADIUS;
color = DEFAULT_COLOR;
deviceModelId = -1;
showName = true;
showProtocol = true;
showOdometer = true;
}
public Device(Device device) {
id = device.id;
uniqueId = device.uniqueId;
name = device.name;
description = device.description;
phoneNumber = device.phoneNumber;
plateNumber = device.plateNumber;
vehicleInfo = device.vehicleInfo;
timeout = device.timeout;
idleSpeedThreshold = device.idleSpeedThreshold;
minIdleTime = device.minIdleTime;
speedLimit = device.speedLimit;
fuelCapacity = device.fuelCapacity;
iconType = device.iconType;
icon = device.getIcon();
photo = device.getPhoto();
odometer = device.odometer;
autoUpdateOdometer = device.autoUpdateOdometer;
if (device.maintenances != null) {
maintenances = new ArrayList<>(device.maintenances.size());
for (Maintenance maintenance : device.maintenances) {
maintenances.add(new Maintenance(maintenance));
}
}
if (device.registrations != null) {
registrations = new ArrayList<>(device.registrations.size());
for(RegistrationMaintenance registration : device.registrations)
registrations.add(new RegistrationMaintenance(registration));
}
if (device.sensors != null) {
sensors = new ArrayList<>(device.sensors.size());
for (Sensor sensor : device.sensors) {
sensors.add(new Sensor(sensor));
}
}
if (device.latestPosition != null)
latestPosition = new Position(device.latestPosition);
group = device.group == null ? null : new Group(device.group.getId()).copyFrom(device.group);
deviceModelId = device.deviceModelId;
iconId = device.iconId;
customIconId = device.customIconId;
iconMode = device.iconMode;
iconArrowMovingColor = device.iconArrowMovingColor;
iconArrowPausedColor = device.iconArrowPausedColor;
iconArrowStoppedColor = device.iconArrowStoppedColor;
iconArrowOfflineColor = device.iconArrowOfflineColor;
iconArrowRadius = device.iconArrowRadius;
showName = device.showName;
showProtocol = device.showProtocol;
showOdometer = device.showOdometer;
timezoneOffset = device.timezoneOffset;
commandPassword = device.commandPassword;
protocol = device.protocol;
historyLength = device.historyLength;
validTo = device.validTo;
color = device.color;
users = new HashSet<>(device.users);
owner = device.owner;
ignition = device.ignition;
ignTime = device.ignTime;
setLastUpdate(device.getLastUpdate());
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id", nullable = false, updatable = false, unique = true)
private long id;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
@GwtTransient
@OneToOne(fetch = FetchType.EAGER)
@JoinColumn(foreignKey = @ForeignKey(name = "devices_fkey_position_id"))
@JsonIgnore
private Position latestPosition;
public void setLatestPosition(Position latestPosition) {
this.latestPosition = latestPosition;
}
public Position getLatestPosition() {
return latestPosition;
}
private String uniqueId;
public void setUniqueId(String uniqueId) {
this.uniqueId = uniqueId;
}
public String getUniqueId() {
return uniqueId;
}
private String name;
public void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
private String description;
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
/**
* Consider device offline after 'timeout' seconds spent from last position
*/
@Column(nullable = true)
private int timeout = DEFAULT_TIMEOUT;
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
@Column(nullable = true)
private double idleSpeedThreshold;
public double getIdleSpeedThreshold() {
return idleSpeedThreshold;
}
public void setIdleSpeedThreshold(double idleSpeedThreshold) {
this.idleSpeedThreshold = idleSpeedThreshold;
}
@Column(nullable = true)
private int minIdleTime = DEFAULT_MIN_IDLE_TIME;
public int getMinIdleTime() {
return minIdleTime;
}
public void setMinIdleTime(int minIdleTime) {
this.minIdleTime = minIdleTime;
}
@Column(nullable = true)
private Double speedLimit;
public Double getSpeedLimit() {
return speedLimit;
}
public void setSpeedLimit(Double speedLimit) {
this.speedLimit = speedLimit;
}
@Column(nullable = true)
private Double fuelCapacity;
public Double getFuelCapacity() {
return fuelCapacity;
}
public void setFuelCapacity(Double fuelCapacity) {
this.fuelCapacity = fuelCapacity;
}
// Hibernate bug HHH-8783: (http://hibernate.atlassian.net/browse/HHH-8783)
// ForeignKey(name) has no effect in JoinTable (and others). It is
// reported as closed but the comments indicate it is still not fixed
// for @JoinTable() and targeted to be fixed in 5.x :-(.
//
@GwtTransient
@ManyToMany(fetch = FetchType.LAZY)
@Fetch(FetchMode.SUBSELECT)
@JoinTable(name = "users_devices",
foreignKey = @ForeignKey(name = "users_devices_fkey_devices_id"),
joinColumns = { @JoinColumn(name = "devices_id", table = "devices", referencedColumnName = "id") },
inverseJoinColumns = { @JoinColumn(name = "users_id", table = "users", referencedColumnName = "id") })
@JsonIgnore
private Set<User> users;
public Set<User> getUsers() {
return users;
}
public void setUsers(Set<User> users) {
this.users = users;
}
@GwtTransient
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(foreignKey = @ForeignKey(name = "devices_fkey_owner_id"))
@JsonIgnore
private User owner;
public User getOwner() {
return owner;
}
public void setOwner(User owner) {
this.owner = owner;
}
@Enumerated(EnumType.STRING)
private DeviceIconType iconType;
public DeviceIconType getIconType() {
return iconType;
}
public void setIconType(DeviceIconType iconType) {
this.iconType = iconType;
}
@ManyToOne
@JoinColumn(foreignKey = @ForeignKey(name = "devices_fkey_icon_id"))
private DeviceIcon icon;
public DeviceIcon getIcon() {
return icon;
}
public void setIcon(DeviceIcon icon) {
this.icon = icon;
}
@ManyToOne
@JoinColumn(foreignKey = @ForeignKey(name = "devices_fkey_photo_id"))
@JsonIgnore
private Picture photo;
public Picture getPhoto() {
return photo;
}
public void setPhoto(Picture photo) {
this.photo = photo;
}
@JsonIgnore
private String phoneNumber;
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
@JsonIgnore
private String plateNumber;
public String getPlateNumber() {
return plateNumber;
}
public void setPlateNumber(String plateNumber) {
this.plateNumber = plateNumber;
}
@JsonIgnore
private String vehicleInfo;
public String getVehicleInfo() {
return vehicleInfo;
}
public void setVehicleInfo(String vehicleInfo) {
this.vehicleInfo = vehicleInfo;
}
// contains current odometer value in kilometers
@Column(nullable = true)
@JsonIgnore
private double odometer;
public double getOdometer() {
return odometer;
}
public void setOdometer(double odometer) {
this.odometer = odometer;
}
// indicates that odometer must be updated automatically by positions history
@Column(nullable = true)
@JsonIgnore
private boolean autoUpdateOdometer;
public boolean isAutoUpdateOdometer() {
return autoUpdateOdometer;
}
public void setAutoUpdateOdometer(boolean autoUpdateOdometer) {
this.autoUpdateOdometer = autoUpdateOdometer;
}
@Transient
private List<Maintenance> maintenances;
public List<Maintenance> getMaintenances() {
return maintenances;
}
public void setMaintenances(List<Maintenance> maintenances) {
this.maintenances = maintenances;
}
@Transient
private List<RegistrationMaintenance> registrations;
public List<RegistrationMaintenance> getRegistrations() {
return registrations;
}
public void setRegistrations(List<RegistrationMaintenance> registrations) {
this.registrations = registrations;
}
@Transient
private List<Sensor> sensors;
public List<Sensor> getSensors() {
return sensors;
}
public void setSensors(List<Sensor> sensors) {
this.sensors = sensors;
}
@ManyToOne
@JoinColumn(foreignKey = @ForeignKey(name = "devices_fkey_group_id"))
private Group group;
public Group getGroup() {
return group;
}
public void setGroup(Group group) {
this.group = group;
}
@Column(nullable = true, length = 128)
private String status;
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
@Enumerated(EnumType.STRING)
private DeviceIconMode iconMode;
public DeviceIconMode getIconMode() {
return iconMode;
}
public void setIconMode(DeviceIconMode iconMode) {
this.iconMode = iconMode;
}
private String iconArrowMovingColor;
private String iconArrowPausedColor;
private String iconArrowStoppedColor;
private String iconArrowOfflineColor;
public String getIconArrowMovingColor() {
return iconArrowMovingColor;
}
public void setIconArrowMovingColor(String iconArrowMovingColor) {
this.iconArrowMovingColor = iconArrowMovingColor;
}
public String getIconArrowPausedColor() {
return iconArrowPausedColor;
}
public void setIconArrowPausedColor(String iconArrowPausedColor) {
this.iconArrowPausedColor = iconArrowPausedColor;
}
public String getIconArrowStoppedColor() {
return iconArrowStoppedColor;
}
public void setIconArrowStoppedColor(String iconArrowStoppedColor) {
this.iconArrowStoppedColor = iconArrowStoppedColor;
}
public String getIconArrowOfflineColor() {
return iconArrowOfflineColor;
}
public void setIconArrowOfflineColor(String iconArrowOfflineColor) {
this.iconArrowOfflineColor = iconArrowOfflineColor;
}
@Column(nullable = true)
private boolean iconRotation;
@Column(nullable = true)
private double iconArrowRadius;
public double getIconArrowRadius() {
return iconArrowRadius;
}
public void setIconArrowRadius(double iconArrowRadius) {
this.iconArrowRadius = iconArrowRadius;
}
@Column(nullable = true)
private boolean showName;
public boolean isShowName() {
return showName;
}
public void setShowName(boolean showName) {
this.showName = showName;
}
@Column(nullable = true)
private boolean showProtocol;
@Column(nullable = true)
private boolean showOdometer;
public boolean isShowProtocol() {
return showProtocol;
}
public void setShowProtocol(boolean showProtocol) {
this.showProtocol = showProtocol;
}
public boolean isShowOdometer() {
return showOdometer;
}
public void setShowOdometer(boolean showOdometer) {
this.showOdometer = showOdometer;
}
@Column(nullable = true)
private Integer timezoneOffset;
public int getTimezoneOffset() {
return timezoneOffset == null ? 0 : timezoneOffset;
}
public void setTimezoneOffset(Integer timezoneOffset) {
this.timezoneOffset = timezoneOffset;
}
@Transient
private String protocol;
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
@Column(nullable = true)
private String commandPassword;
public String getCommandPassword() {
return commandPassword;
}
public void setCommandPassword(String commandPassword) {
this.commandPassword = commandPassword;
}
@Transient
private boolean isAlarmEnabled;
public boolean isAlarmEnabled() {
return isAlarmEnabled;
}
public void setAlarmEnabled(boolean isEnabled) {
isAlarmEnabled = isEnabled;
}
@Transient
private boolean unreadAlarms;
public boolean hasUnreadAlarms() {
return unreadAlarms;
}
public void setUnreadAlarms(boolean unreadAlarms) {
this.unreadAlarms = unreadAlarms;
}
@Transient
@JsonIgnore
private Date lastAlarmsCheck;
public Date getLastAlarmsCheck() {
return lastAlarmsCheck;
}
public void setLastAlarmsCheck(Date date) {
lastAlarmsCheck = date;
}
@Column(nullable=false, columnDefinition = "boolean default false")
private boolean deleted;
public boolean isDeleted() {
return deleted;
}
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
@Column(nullable=false, columnDefinition = "CHAR(6) default '0000FF'")
private String color;
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
@Column(nullable=false, columnDefinition = "BIGINT default -1")
private long deviceModelId;
public long getDeviceModelId() {
return deviceModelId;
}
public void setDeviceModelId(long id) {
this.deviceModelId = id;
}
@GwtTransient
@JsonIgnore
@OneToMany(fetch = FetchType.LAZY, mappedBy="device")
private List<Position> positions = new ArrayList<>();
public List<Position> getPositions() {
return positions;
}
private Long iconId;
public Long getIconId() {
return iconId;
}
public void setIconId(Long iconId) {
this.iconId = iconId;
}
private Long customIconId;
public Long getCustomIconId() {
return customIconId;
}
public void setCustomIconId(Long value) {
this.customIconId = value;
}
@Temporal(TemporalType.DATE)
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ssZ",
timezone="GMT")
private Date validTo;
public Date getValidTo() {
return validTo;
}
public void setValidTo(Date validTo) {
this.validTo = validTo;
}
@GwtIncompatible
public boolean isValid(Date today) {
if(getValidTo() == null)
return false;
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
today = sdf.parse(sdf.format(today));
return today.compareTo(getValidTo()) <= 0;
} catch (ParseException ex) {
Logger.getLogger(Device.class.getName()).log(Level.SEVERE, null, ex);
return false;
}
}
@GwtIncompatible
public Date getLastAvailablePositionDate(Date today, int freeHistoryDays) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
try {
today = sdf.parse(sdf.format(today));
} catch (ParseException e) {
Logger.getLogger(Device.class.getName()).log(Level.SEVERE, null, e);
}
int availableHistoryLength = freeHistoryDays;
if (isValid(today)) {
availableHistoryLength = getHistoryLength();
}
Calendar cal = Calendar.getInstance();
cal.setTime(today);
cal.add(Calendar.DATE, -availableHistoryLength);
return cal.getTime();
}
public int getSubscriptionDaysLeft(Date from) {
if (validTo == null) {
return 0;
}
int daysDiff = CalendarUtil.getDaysBetween(from, validTo);
int daysLeft = daysDiff + 1;
if (daysLeft < 0) {
daysLeft = 0;
}
return daysLeft;
}
public boolean isCloseToExpire(Date from) {
int daysLeft = getSubscriptionDaysLeft(from);
return (daysLeft <= NEAR_EXPIRATION_THRESHOLD_DAYS && daysLeft > 0);
}
@Column(nullable = false, columnDefinition = "integer")
private int historyLength;
public int getHistoryLength() {
return historyLength;
}
public void setHistoryLength(int historyLength) {
this.historyLength = historyLength;
}
@GwtIncompatible
public int getAlertsHistoryLength(ApplicationSettings settings) {
int historyLength = settings.getFreeHistory();
if(isValid(new Date()))
historyLength = getHistoryLength();
return Math.min(historyLength, 7);
}
@Column(nullable = false, columnDefinition="bit default false")
private boolean isBlocked;
public boolean isBlocked() {
return isBlocked;
}
public void setBlocked(boolean isBlocked) {
this.isBlocked = isBlocked;
}
@JsonIgnore
private Integer battery;
@JsonIgnore
public Integer getBatteryLevel() {
return battery;
}
@JsonIgnore
public void setBatteryLevel(Integer level) {
this.battery = level;
}
@Temporal(javax.persistence.TemporalType.TIMESTAMP)
@JsonIgnore
private Date battTime;
@JsonIgnore
public Date getBatteryTime() {
return battTime;
}
@JsonIgnore
public void setBatteryTime(Date time) {
this.battTime = time;
}
@JsonIgnore
public int getBatteryTimeout() {
return 3600;
}
private Boolean ignition;
public Boolean getIgnition() {
return ignition;
}
public void setIgnition(Boolean ignition) {
this.ignition = ignition;
}
@Temporal(javax.persistence.TemporalType.TIMESTAMP)
private Date ignTime;
public Date getIgnitionTime() {
return ignTime;
}
public void setIgnitionTime(Date ignitionTime) {
ignTime = ignitionTime;
}
@JsonIgnore
private Integer positionFreq;
public Integer getPositionFreq() {
return positionFreq;
}
private Boolean autoArm;
public Boolean isAutoArmed() {
return autoArm;
}
private Double fuelLevel;
public Double getFuelLevel() {
return fuelLevel;
}
public void setFuelLevel(Double lvl) {
this.fuelLevel = lvl;
}
private Double fuelUsed;
public Double getFuelUsed() {
return fuelUsed;
}
public void setFuelUsed(Double used) {
this.fuelUsed = used;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof Device)) return false;
Device device = (Device) o;
if (getUniqueId() != null ? !getUniqueId().equals(device.getUniqueId()) : device.getUniqueId() != null) return false;
return true;
}
@Override
public int hashCode() {
return getUniqueId() != null ? getUniqueId().hashCode() : 0;
}
}
| datamatica-pl/traccar-orm | src/main/java/pl/datamatica/traccar/model/Device.java | Java | agpl-3.0 | 23,210 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# res_partner
# Copyright (c) 2013 Codeback Software S.L. (http://codeback.es)
# @author: Miguel García <miguel@codeback.es>
# @author: Javier Fuentes <javier@codeback.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from datetime import datetime, timedelta
from openerp.tools.translate import _
class res_company(osv.osv):
"""añadimos los nuevos campos"""
_name = "res.company"
_inherit = "res.company"
_columns = {
'web_discount': fields.float('Descuento web (%)'),
}
| codeback/openerp-cbk_company_web_discount | res_company.py | Python | agpl-3.0 | 1,385 |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Ajax.org Code Editor (ACE).
*
* The Initial Developer of the Original Code is
* Ajax.org B.V.
* Portions created by the Initial Developer are Copyright (C) 2010
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Fabian Jakobs <fabian AT ajax DOT org>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
define(function(require, exports, module) {
var lang = require("pilot/lang");
var oop = require("pilot/oop");
var Range = require("ace/range").Range;
var Search = function() {
this.$options = {
needle: "",
backwards: false,
wrap: false,
caseSensitive: false,
wholeWord: false,
scope: Search.ALL,
regExp: false
};
};
Search.ALL = 1;
Search.SELECTION = 2;
(function() {
this.set = function(options) {
oop.mixin(this.$options, options);
return this;
};
this.getOptions = function() {
return lang.copyObject(this.$options);
};
this.find = function(session) {
if (!this.$options.needle)
return null;
if (this.$options.backwards) {
var iterator = this.$backwardMatchIterator(session);
} else {
iterator = this.$forwardMatchIterator(session);
}
var firstRange = null;
iterator.forEach(function(range) {
firstRange = range;
return true;
});
return firstRange;
};
this.findAll = function(session) {
if (!this.$options.needle)
return [];
if (this.$options.backwards) {
var iterator = this.$backwardMatchIterator(session);
} else {
iterator = this.$forwardMatchIterator(session);
}
var ranges = [];
iterator.forEach(function(range) {
ranges.push(range);
});
return ranges;
};
this.replace = function(input, replacement) {
var re = this.$assembleRegExp();
var match = re.exec(input);
if (match && match[0].length == input.length) {
if (this.$options.regExp) {
return input.replace(re, replacement);
} else {
return replacement;
}
} else {
return null;
}
};
this.$forwardMatchIterator = function(session) {
var re = this.$assembleRegExp();
var self = this;
return {
forEach: function(callback) {
self.$forwardLineIterator(session).forEach(function(line, startIndex, row) {
if (startIndex) {
line = line.substring(startIndex);
}
var matches = [];
line.replace(re, function(str) {
var offset = arguments[arguments.length-2];
matches.push({
str: str,
offset: startIndex + offset
});
return str;
});
for (var i=0; i<matches.length; i++) {
var match = matches[i];
var range = self.$rangeFromMatch(row, match.offset, match.str.length);
if (callback(range))
return true;
}
});
}
};
};
this.$backwardMatchIterator = function(session) {
var re = this.$assembleRegExp();
var self = this;
return {
forEach: function(callback) {
self.$backwardLineIterator(session).forEach(function(line, startIndex, row) {
if (startIndex) {
line = line.substring(startIndex);
}
var matches = [];
line.replace(re, function(str, offset) {
matches.push({
str: str,
offset: startIndex + offset
});
return str;
});
for (var i=matches.length-1; i>= 0; i--) {
var match = matches[i];
var range = self.$rangeFromMatch(row, match.offset, match.str.length);
if (callback(range))
return true;
}
});
}
};
};
this.$rangeFromMatch = function(row, column, length) {
return new Range(row, column, row, column+length);
};
this.$assembleRegExp = function() {
if (this.$options.regExp) {
var needle = this.$options.needle;
} else {
needle = lang.escapeRegExp(this.$options.needle);
}
if (this.$options.wholeWord) {
needle = "\\b" + needle + "\\b";
}
var modifier = "g";
if (!this.$options.caseSensitive) {
modifier += "i";
}
var re = new RegExp(needle, modifier);
return re;
};
this.$forwardLineIterator = function(session) {
var searchSelection = this.$options.scope == Search.SELECTION;
var range = session.getSelection().getRange();
var start = session.getSelection().getCursor();
var firstRow = searchSelection ? range.start.row : 0;
var firstColumn = searchSelection ? range.start.column : 0;
var lastRow = searchSelection ? range.end.row : session.getLength() - 1;
var wrap = this.$options.wrap;
function getLine(row) {
var line = session.getLine(row);
if (searchSelection && row == range.end.row) {
line = line.substring(0, range.end.column);
}
return line;
}
return {
forEach: function(callback) {
var row = start.row;
var line = getLine(row);
var startIndex = start.column;
var stop = false;
while (!callback(line, startIndex, row)) {
if (stop) {
return;
}
row++;
startIndex = 0;
if (row > lastRow) {
if (wrap) {
row = firstRow;
startIndex = firstColumn;
} else {
return;
}
}
if (row == start.row)
stop = true;
line = getLine(row);
}
}
};
};
this.$backwardLineIterator = function(session) {
var searchSelection = this.$options.scope == Search.SELECTION;
var range = session.getSelection().getRange();
var start = searchSelection ? range.end : range.start;
var firstRow = searchSelection ? range.start.row : 0;
var firstColumn = searchSelection ? range.start.column : 0;
var lastRow = searchSelection ? range.end.row : session.getLength() - 1;
var wrap = this.$options.wrap;
return {
forEach : function(callback) {
var row = start.row;
var line = session.getLine(row).substring(0, start.column);
var startIndex = 0;
var stop = false;
while (!callback(line, startIndex, row)) {
if (stop)
return;
row--;
startIndex = 0;
if (row < firstRow) {
if (wrap) {
row = lastRow;
} else {
return;
}
}
if (row == start.row)
stop = true;
line = session.getLine(row);
if (searchSelection) {
if (row == firstRow)
startIndex = firstColumn;
else if (row == lastRow)
line = line.substring(0, range.end.column);
}
}
}
};
};
}).call(Search.prototype);
exports.Search = Search;
});
| reidab/icecondor-server-rails | public/javascripts/ace/ace/search.js | JavaScript | agpl-3.0 | 9,844 |
/**
* Copyright (C) 2000 - 2013 Silverpeas
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of the GPL, you may
* redistribute this Program in connection with Free/Libre Open Source Software ("FLOSS")
* applications as described in Silverpeas's FLOSS exception. You should have received a copy of the
* text describing the FLOSS exception, and it is also available here:
* "http://www.silverpeas.org/docs/core/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*/
package com.stratelia.webactiv.util.node.model;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.io.Serializable;
import java.text.ParseException;
import org.silverpeas.search.indexEngine.model.IndexEntry;
import com.silverpeas.util.clipboard.ClipboardSelection;
import com.silverpeas.util.clipboard.SilverpeasKeyData;
import com.stratelia.silverpeas.silvertrace.SilverTrace;
import com.stratelia.webactiv.util.DateUtil;
public class NodeSelection extends ClipboardSelection implements Serializable {
private static final long serialVersionUID = -6462797069972573255L;
public static DataFlavor NodeDetailFlavor;
static {
NodeDetailFlavor = new DataFlavor(NodeDetail.class, "Node");
}
private NodeDetail nodeDetail;
public NodeSelection(NodeDetail node) {
super();
nodeDetail = node;
super.addFlavor(NodeDetailFlavor);
}
@Override
public synchronized Object getTransferData(DataFlavor parFlavor)
throws UnsupportedFlavorException {
Object transferedData;
try {
transferedData = super.getTransferData(parFlavor);
} catch (UnsupportedFlavorException e) {
if (parFlavor.equals(NodeDetailFlavor)) {
transferedData = nodeDetail;
} else {
throw e;
}
}
return transferedData;
}
@Override
public IndexEntry getIndexEntry() {
NodePK pk = nodeDetail.getNodePK();
IndexEntry indexEntry = new IndexEntry(pk.getInstanceId(), "Node", pk.getId());
indexEntry.setTitle(nodeDetail.getName());
return indexEntry;
}
@Override
public SilverpeasKeyData getKeyData() {
SilverpeasKeyData keyData = new SilverpeasKeyData();
keyData.setTitle(nodeDetail.getName());
keyData.setAuthor(nodeDetail.getCreatorId());
try {
keyData.setCreationDate(DateUtil.parse(nodeDetail.getCreationDate()));
} catch (ParseException e) {
SilverTrace.error("node", "NodeSelection.getKeyData()", "root.EX_NO_MESSAGE", e);
}
keyData.setDesc(nodeDetail.getDescription());
return keyData;
}
}
| CecileBONIN/Silverpeas-Core | ejb-core/node/src/main/java/com/stratelia/webactiv/util/node/model/NodeSelection.java | Java | agpl-3.0 | 3,220 |
import React, { useState } from 'react';
import { connect, ConnectedProps } from 'react-redux';
import { TimeZone } from '@grafana/data';
import { CollapsableSection, Field, Input, RadioButtonGroup, TagsInput } from '@grafana/ui';
import { selectors } from '@grafana/e2e-selectors';
import { FolderPicker } from 'app/core/components/Select/FolderPicker';
import { DashboardModel } from '../../state/DashboardModel';
import { DeleteDashboardButton } from '../DeleteDashboard/DeleteDashboardButton';
import { TimePickerSettings } from './TimePickerSettings';
import { updateTimeZoneDashboard, updateWeekStartDashboard } from 'app/features/dashboard/state/actions';
import { PreviewSettings } from './PreviewSettings';
import { config } from '@grafana/runtime';
interface OwnProps {
dashboard: DashboardModel;
}
export type Props = OwnProps & ConnectedProps<typeof connector>;
const GRAPH_TOOLTIP_OPTIONS = [
{ value: 0, label: 'Default' },
{ value: 1, label: 'Shared crosshair' },
{ value: 2, label: 'Shared Tooltip' },
];
export function GeneralSettingsUnconnected({ dashboard, updateTimeZone, updateWeekStart }: Props): JSX.Element {
const [renderCounter, setRenderCounter] = useState(0);
const onFolderChange = (folder: { id: number; title: string }) => {
dashboard.meta.folderId = folder.id;
dashboard.meta.folderTitle = folder.title;
dashboard.meta.hasUnsavedFolderChange = true;
};
const onBlur = (event: React.FocusEvent<HTMLInputElement>) => {
dashboard[event.currentTarget.name as 'title' | 'description'] = event.currentTarget.value;
};
const onTooltipChange = (graphTooltip: number) => {
dashboard.graphTooltip = graphTooltip;
setRenderCounter(renderCounter + 1);
};
const onRefreshIntervalChange = (intervals: string[]) => {
dashboard.timepicker.refresh_intervals = intervals.filter((i) => i.trim() !== '');
};
const onNowDelayChange = (nowDelay: string) => {
dashboard.timepicker.nowDelay = nowDelay;
};
const onHideTimePickerChange = (hide: boolean) => {
dashboard.timepicker.hidden = hide;
setRenderCounter(renderCounter + 1);
};
const onLiveNowChange = (v: boolean) => {
dashboard.liveNow = v;
setRenderCounter(renderCounter + 1);
};
const onTimeZoneChange = (timeZone: TimeZone) => {
dashboard.timezone = timeZone;
setRenderCounter(renderCounter + 1);
updateTimeZone(timeZone);
};
const onWeekStartChange = (weekStart: string) => {
dashboard.weekStart = weekStart;
setRenderCounter(renderCounter + 1);
updateWeekStart(weekStart);
};
const onTagsChange = (tags: string[]) => {
dashboard.tags = tags;
setRenderCounter(renderCounter + 1);
};
const onEditableChange = (value: boolean) => {
dashboard.editable = value;
setRenderCounter(renderCounter + 1);
};
const editableOptions = [
{ label: 'Editable', value: true },
{ label: 'Read-only', value: false },
];
return (
<div style={{ maxWidth: '600px' }}>
<h3 className="dashboard-settings__header" aria-label={selectors.pages.Dashboard.Settings.General.title}>
General
</h3>
<div className="gf-form-group">
<Field label="Name">
<Input id="title-input" name="title" onBlur={onBlur} defaultValue={dashboard.title} />
</Field>
<Field label="Description">
<Input id="description-input" name="description" onBlur={onBlur} defaultValue={dashboard.description} />
</Field>
<Field label="Tags">
<TagsInput id="tags-input" tags={dashboard.tags} onChange={onTagsChange} />
</Field>
<Field label="Folder">
<FolderPicker
inputId="dashboard-folder-input"
initialTitle={dashboard.meta.folderTitle}
initialFolderId={dashboard.meta.folderId}
onChange={onFolderChange}
enableCreateNew={true}
dashboardId={dashboard.id}
skipInitialLoad={true}
/>
</Field>
<Field
label="Editable"
description="Set to read-only to disable all editing. Reload the dashboard for changes to take effect"
>
<RadioButtonGroup value={dashboard.editable} options={editableOptions} onChange={onEditableChange} />
</Field>
</div>
{config.featureToggles.dashboardPreviews && config.featureToggles.dashboardPreviewsAdmin && (
<PreviewSettings uid={dashboard.uid} />
)}
<TimePickerSettings
onTimeZoneChange={onTimeZoneChange}
onWeekStartChange={onWeekStartChange}
onRefreshIntervalChange={onRefreshIntervalChange}
onNowDelayChange={onNowDelayChange}
onHideTimePickerChange={onHideTimePickerChange}
onLiveNowChange={onLiveNowChange}
refreshIntervals={dashboard.timepicker.refresh_intervals}
timePickerHidden={dashboard.timepicker.hidden}
nowDelay={dashboard.timepicker.nowDelay}
timezone={dashboard.timezone}
weekStart={dashboard.weekStart}
liveNow={dashboard.liveNow}
/>
<CollapsableSection label="Panel options" isOpen={true}>
<Field
label="Graph tooltip"
description="Controls tooltip and hover highlight behavior across different panels"
>
<RadioButtonGroup onChange={onTooltipChange} options={GRAPH_TOOLTIP_OPTIONS} value={dashboard.graphTooltip} />
</Field>
</CollapsableSection>
<div className="gf-form-button-row">
{dashboard.meta.canDelete && <DeleteDashboardButton dashboard={dashboard} />}
</div>
</div>
);
}
const mapDispatchToProps = {
updateTimeZone: updateTimeZoneDashboard,
updateWeekStart: updateWeekStartDashboard,
};
const connector = connect(null, mapDispatchToProps);
export const GeneralSettings = connector(GeneralSettingsUnconnected);
| grafana/grafana | public/app/features/dashboard/components/DashboardSettings/GeneralSettings.tsx | TypeScript | agpl-3.0 | 5,865 |
# Copyright (c) 2016 Sebastian Kanis
# This file is part of pi-led-control.
# pi-led-control is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pi-led-control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with pi-led-control. If not, see <http://www.gnu.org/licenses/>.
import datetime
import logging
from server.programs.abstractprogram import AbstractProgram
class ScheduledProgram(AbstractProgram):
def __init__(self, program, timeOfDay):
super().__init__()
self._program = program
self._timeOfDay = timeOfDay
def run(self):
now = datetime.datetime.now()
secondsInCurrentDay = now.hour * 3600 + now.minute * 60 + now.second
if secondsInCurrentDay < self._timeOfDay:
sleepDuration = self._timeOfDay - secondsInCurrentDay
else:
sleepDuration = self._timeOfDay + 3600 * 24 - secondsInCurrentDay
logging.getLogger("main").info("sleeping for " + str(sleepDuration) + " seconds")
self._waitIfNotStopped(sleepDuration)
self._program.run()
def setThreadStopEvent(self, threadStopEvent):
self.threadStopEvent = threadStopEvent
self._program.setThreadStopEvent(threadStopEvent)
def setColorSetter(self, colorSetter):
self._colorSetter = colorSetter
self._program.setColorSetter(colorSetter)
def getCurrentColor(self):
return self._program.getCurrentColor()
def setLastColor(self, lastColor):
self._program.setLastColor(lastColor)
| s0riak/pi-led-control | src/server/programs/scheduledprogram.py | Python | agpl-3.0 | 1,953 |
/**
* Based conceptually on the _.extend() function in underscore.js ( see http://documentcloud.github.com/underscore/#extend for more details )
* Copyright (C) 2012 Kurt Milam - http://xioup.com | Source: https://gist.github.com/1868955
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/.
**/
var _ = require('underscore');
deepExtend = function(obj) {
var parentRE = /#{\s*?_\s*?}/,
slice = Array.prototype.slice,
hasOwnProperty = Object.prototype.hasOwnProperty;
_.each(slice.call(arguments, 1), function(source) {
for (var prop in source) {
if (hasOwnProperty.call(source, prop)) {
if (_.isUndefined(obj[prop]) || _.isFunction(obj[prop]) || _.isNull(source[prop])) {
obj[prop] = source[prop];
}
else if (_.isString(source[prop]) && parentRE.test(source[prop])) {
if (_.isString(obj[prop])) {
obj[prop] = source[prop].replace(parentRE, obj[prop]);
}
}
else if (_.isArray(obj[prop]) || _.isArray(source[prop])){
if (!_.isArray(obj[prop]) || !_.isArray(source[prop])){
throw 'Error: Trying to combine an array with a non-array (' + prop + ')';
} else {
obj[prop] = _.reject(_.deepExtend(obj[prop], source[prop]), function (item) { return _.isNull(item);});
}
}
else if (_.isObject(obj[prop]) || _.isObject(source[prop])){
if (!_.isObject(obj[prop]) || !_.isObject(source[prop])){
throw 'Error: Trying to combine an object with a non-object (' + prop + ')';
} else {
obj[prop] = _.deepExtend(obj[prop], source[prop]);
}
} else {
obj[prop] = source[prop];
}
}
}
});
return obj;
};
exports.deepExtend = deepExtend;
| 4poc/anpaste | app/lib/deep_extend_underscore_mixin.js | JavaScript | agpl-3.0 | 2,385 |
package output;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import javax.swing.JOptionPane;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.w3c.dom.Node;
public class XmlParserJhove {
public static void main(String args[]) throws Exception {
JOptionPane.showMessageDialog(null, "Please choose the XML File to analyse", "XmlParsing", JOptionPane.QUESTION_MESSAGE);
String xmlfile = utilities.BrowserDialogs.chooseFile();
parseXmlFile(xmlfile);
}
public static void parseXmlFile(String xmlfile) {
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(xmlfile);
PrintWriter xmlsummary = new PrintWriter(new FileWriter((jhoveValidations.JhoveGuiStarterDialog.jhoveExaminationFolder + "//" + "JhoveExaminationSummary" + ".xml")));
String xmlVersion = "xml version='1.0'";
String xmlEncoding = "encoding='ISO-8859-1'";
String xmlxslStyleSheet = "<?xml-stylesheet type=\"text/xsl\" href=\"JhoveCustomized.xsl\"?>";
xmlsummary.println("<?" + xmlVersion + " " + xmlEncoding + "?>");
xmlsummary.println(xmlxslStyleSheet);
xmlsummary.println("<JhoveFindingsSummary>");
output.XslStyleSheetsJhove.JhoveCustomizedXsl();
ArrayList<String> errormessages = new ArrayList<String>();
doc.getDocumentElement().normalize();
NodeList nList = doc.getElementsByTagName("item");
for (int temp = 0; temp < nList.getLength(); temp++) {
Node nNode = nList.item(temp);
if (nNode.getNodeType() == Node.ELEMENT_NODE) {
Element eElement = (Element) nNode;
xmlsummary.println("<File>");
String testutf8 = eElement.getElementsByTagName("filename").item(0).getTextContent();
if (testutf8.contains("&")) {
String sub = utilities.GenericUtilities.normaliseToUtf8(testutf8);
xmlsummary.println("<FileName>" + sub + "</FileName>");
} else {
xmlsummary.println("<FileName>" + eElement.getElementsByTagName("filename").item(0).getTextContent() + "</FileName>");
}
if (eElement.getElementsByTagName("creationyear").item(0)!= null) {
xmlsummary.println("<CreationYear>" + eElement.getElementsByTagName("creationyear").item(0).getTextContent() + "</CreationYear>");
}
if (eElement.getElementsByTagName("creationsoftware").item(0)!= null) {
xmlsummary.println("<CreationSoftware>" + eElement.getElementsByTagName("creationsoftware").item(0).getTextContent() + "</CreationSoftware>");
}
if (eElement.getElementsByTagName("encryption").item(0)!= null) {
xmlsummary.println("<Encryption>" + eElement.getElementsByTagName("encryption").item(0).getTextContent() + "</Encryption>");
}
if (eElement.getElementsByTagName("PdfType").item(0)!= null) {
xmlsummary.println("<PdfType>" + eElement.getElementsByTagName("PdfType").item(0).getTextContent() + "</PdfType>");
}
xmlsummary.println("<Module>" + eElement.getElementsByTagName("reportingModule").item(0).getTextContent() + "</Module>");
xmlsummary.println("<Status>" + eElement.getElementsByTagName("status").item(0).getTextContent() + "</Status>");
String status = eElement.getElementsByTagName("status").item(0).getTextContent();
if ((status.contains("Not")) || (status.contains("not"))) {
System.out.println(eElement.getElementsByTagName("filename").item(0).getTextContent());
int lenmessages = eElement.getElementsByTagName("message").getLength();
xmlsummary.println("<JhoveMessages>" + lenmessages + "</JhoveMessages>");
for (int temp3 = 0; temp3 < lenmessages; temp3++) {
String error = eElement.getElementsByTagName("message").item(temp3).getTextContent();
int writtenmessage = temp3 + 1;
//TODO: get rid of xml escaping characters
error = error.replace("\"", """);
error = error.replace("\'", "'");
error = error.replace("<", "<");
error = error.replace(">", ">");
error = error.replace("&", " &");
xmlsummary.println("<Message" + writtenmessage + ">" + error + "</Message" + writtenmessage + ">");
errormessages.add(error);
}
}
xmlsummary.println("</File>"); //TODO: should be changed to File, but as well in XSLT
}
}
Collections.sort(errormessages);
int i;
// copy ErrorList because later the no. of entries of each
// element will be counted
ArrayList<String> originerrors = new ArrayList<String>();
for (i = 0; i < errormessages.size(); i++) { // There might be a
// pre-defined
// function for this
originerrors.add(errormessages.get(i));
}
// get rid of redundant entries
i = 0;
while (i < errormessages.size() - 1) {
if (errormessages.get(i).equals(errormessages.get(i + 1))) {
errormessages.remove(i);
} else {
i++;
}
}
xmlsummary.println("<SampleSummary>");
xmlsummary.println("<ExaminedPdfFiles>" + nList.getLength() + "</ExaminedPdfFiles>");
xmlsummary.println("<DifferentJhoveMessages>" + errormessages.size() + "</DifferentJhoveMessages>");
// how often does each JHOVE error occur?
int j = 0;
int temp1;
for (i = 0; i < errormessages.size(); i++) {
temp1 = 0;
for (j = 0; j < originerrors.size(); j++) {
if (errormessages.get(i).equals(originerrors.get(j))) {
temp1++;
}
}
xmlsummary.println("<JhoveMessage>");
xmlsummary.println("<MessageText>" + errormessages.get(i) + "</MessageText>");
xmlsummary.println("<Occurance>" + temp1 + "</Occurance>");
xmlsummary.println("</JhoveMessage>");
}
xmlsummary.println("</SampleSummary>");
xmlsummary.println("</JhoveFindingsSummary>");
xmlsummary.close();
}
catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(null, e, "error message", JOptionPane.ERROR_MESSAGE);
}
}
}
| YvonneTunnat/File-Format-Utilities | master/pdf-tools/src/main/java/output/XmlParserJhove.java | Java | agpl-3.0 | 6,325 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django_pgjson.fields
import django.utils.timezone
import django.db.models.deletion
import djorm_pgarray.fields
import taiga.projects.history.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('users', '0002_auto_20140903_0916'),
]
operations = [
migrations.CreateModel(
name='Membership',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('is_owner', models.BooleanField(default=False)),
('email', models.EmailField(max_length=255, null=True, default=None, verbose_name='email', blank=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='creado el')),
('token', models.CharField(max_length=60, null=True, default=None, verbose_name='token', blank=True)),
('invited_by_id', models.IntegerField(null=True, blank=True)),
],
options={
'ordering': ['project', 'user__full_name', 'user__username', 'user__email', 'email'],
'verbose_name_plural': 'membershipss',
'permissions': (('view_membership', 'Can view membership'),),
'verbose_name': 'membership',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('tags', djorm_pgarray.fields.TextArrayField(dbtype='text', verbose_name='tags')),
('name', models.CharField(max_length=250, unique=True, verbose_name='name')),
('slug', models.SlugField(max_length=250, unique=True, verbose_name='slug', blank=True)),
('description', models.TextField(verbose_name='description')),
('created_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created date')),
('modified_date', models.DateTimeField(verbose_name='modified date')),
('total_milestones', models.IntegerField(null=True, default=0, verbose_name='total of milestones', blank=True)),
('total_story_points', models.FloatField(default=0, verbose_name='total story points')),
('is_backlog_activated', models.BooleanField(default=True, verbose_name='active backlog panel')),
('is_kanban_activated', models.BooleanField(default=False, verbose_name='active kanban panel')),
('is_wiki_activated', models.BooleanField(default=True, verbose_name='active wiki panel')),
('is_issues_activated', models.BooleanField(default=True, verbose_name='active issues panel')),
('videoconferences', models.CharField(max_length=250, null=True, choices=[('appear-in', 'AppearIn'), ('talky', 'Talky'), ('jitsi', 'Jitsi')], verbose_name='videoconference system', blank=True)),
('videoconferences_salt', models.CharField(max_length=250, null=True, verbose_name='videoconference room salt', blank=True)),
('anon_permissions', djorm_pgarray.fields.TextArrayField(choices=[('view_project', 'View project'), ('view_milestones', 'View milestones'), ('view_us', 'View user stories'), ('view_tasks', 'View tasks'), ('view_issues', 'View issues'), ('view_wiki_pages', 'View wiki pages'), ('view_wiki_links', 'View wiki links')], dbtype='text', default=[], verbose_name='anonymous permissions')),
('public_permissions', djorm_pgarray.fields.TextArrayField(choices=[('view_project', 'View project'), ('view_milestones', 'View milestones'), ('view_us', 'View user stories'), ('view_issues', 'View issues'), ('vote_issues', 'Vote issues'), ('view_tasks', 'View tasks'), ('view_wiki_pages', 'View wiki pages'), ('view_wiki_links', 'View wiki links'), ('request_membership', 'Request membership'), ('add_us_to_project', 'Add user story to project'), ('add_comments_to_us', 'Add comments to user stories'), ('add_comments_to_task', 'Add comments to tasks'), ('add_issue', 'Add issues'), ('add_comments_issue', 'Add comments to issues'), ('add_wiki_page', 'Add wiki page'), ('modify_wiki_page', 'Modify wiki page'), ('add_wiki_link', 'Add wiki link'), ('modify_wiki_link', 'Modify wiki link')], dbtype='text', default=[], verbose_name='user permissions')),
('is_private', models.BooleanField(default=False, verbose_name='is private')),
('tags_colors', djorm_pgarray.fields.TextArrayField(dbtype='text', dimension=2, default=[], null=False, verbose_name='tags colors')),
],
options={
'ordering': ['name'],
'verbose_name_plural': 'projects',
'permissions': (('view_project', 'Can view project'),),
'verbose_name': 'project',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='project',
name='members',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, related_name='projects', verbose_name='members', through='projects.Membership'),
preserve_default=True,
),
migrations.AddField(
model_name='project',
name='owner',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, related_name='owned_projects', verbose_name='owner'),
preserve_default=True,
),
migrations.AddField(
model_name='membership',
name='user',
field=models.ForeignKey(blank=True, default=None, to=settings.AUTH_USER_MODEL, null=True, related_name='memberships'),
preserve_default=True,
),
migrations.AddField(
model_name='membership',
name='project',
field=models.ForeignKey(default=1, to='projects.Project', related_name='memberships'),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='membership',
unique_together=set([('user', 'project')]),
),
migrations.AddField(
model_name='membership',
name='role',
field=models.ForeignKey(related_name='memberships', to='users.Role', default=1),
preserve_default=False,
),
]
| 19kestier/taiga-back | taiga/projects/migrations/0001_initial.py | Python | agpl-3.0 | 6,634 |
<?php
/*-------------------------------------------------------+
| PHP-Fusion Content Management System
| Copyright (C) PHP-Fusion Inc
| https://www.php-fusion.co.uk/
+--------------------------------------------------------+
| Filename: gallery_settings.php
| Author: PHP-Fusion Development Team
+--------------------------------------------------------+
| This program is released as free software under the
| Affero GPL license. You can redistribute it and/or
| modify it under the terms of this license which you
| can read by viewing the included agpl.txt or online
| at www.gnu.org/licenses/agpl.html. Removal of this
| copyright header is strictly prohibited without
| written permission from the original author(s).
+--------------------------------------------------------*/
pageAccess("PH");
if (!defined("IN_FUSION")) {
die("Access Denied");
}
include LOCALE.LOCALESET."admin/settings.php";
if (isset($_POST['delete_watermarks'])) {
$result = dbquery("SELECT album_id,photo_filename FROM ".DB_PHOTOS." ORDER BY album_id, photo_id");
$rows = dbrows($result);
if ($rows) {
$parts = array();
$watermark1 = "";
$watermark2 = "";
$photodir = "";
while ($data = dbarray($result)) {
$parts = explode(".", $data['photo_filename']);
$watermark1 = $parts[0]."_w1.".$parts[1];
$watermark2 = $parts[0]."_w2.".$parts[1];
$photodir = IMAGES_G;
if (file_exists($photodir.$watermark1)) unlink($photodir.$watermark1);
if (file_exists($photodir.$watermark2)) unlink($photodir.$watermark2);
unset($parts);
}
redirect(FUSION_REQUEST);
} else {
redirect(FUSION_REQUEST);
}
} else if (isset($_POST['savesettings'])) {
print_p($_POST);
$inputArray = array(
"thumb_w" => form_sanitizer($_POST['thumb_w'], 200, "thumb_w"),
"thumb_h" => form_sanitizer($_POST['thumb_h'], 200, "thumb_h"),
"photo_w" => form_sanitizer($_POST['photo_w'], 800, "photo_w"),
"photo_h" => form_sanitizer($_POST['photo_h'], 800, "photo_h"),
"photo_max_w" => form_sanitizer($_POST['photo_max_w'], 2400, "photo_max_w"),
"photo_max_h" => form_sanitizer($_POST['photo_max_h'], 1800, "photo_max_h"),
"photo_max_b" => form_sanitizer($_POST['calc_b'] * $_POST['calc_c'], 2000000, ""),
"gallery_pagination" => form_sanitizer($_POST['gallery_pagination'], 24, "gallery_pagination"),
"photo_watermark" => form_sanitizer($_POST['photo_watermark'], 0, "photo_watermark"),
"photo_watermark_save" => isset($_POST['photo_watermark_save']) ? 1 : 0,
"photo_watermark_image" => isset($_POST['photo_watermark_image']) ? form_sanitizer($_POST['photo_watermark_image'], "", "photo_watermark_image") : IMAGES_G."watermark.png",
"photo_watermark_text" => isset($_POST['photo_watermark_text']) ? 1 : 0,
"photo_watermark_text_color1" => isset($_POST['photo_watermark_text_color1']) ? form_sanitizer($_POST['photo_watermark_text_color1'], "#000000", "photo_watermark_text_color1") : "#000000",
"photo_watermark_text_color2" => isset($_POST['photo_watermark_text_color2']) ? form_sanitizer($_POST['photo_watermark_text_color2'], "#000000", "photo_watermark_text_color2") : "#000000",
"photo_watermark_text_color3" => isset($_POST['photo_watermark_text_color3']) ? form_sanitizer($_POST['photo_watermark_text_color3'], "#000000", "photo_watermark_text_color3") : "#000000",
"gallery_allow_submission" => isset($_POST['gallery_allow_submission']) ? 1 : 0,
"gallery_extended_required" => isset($_POST['gallery_extended_required']) ? 1 : 0,
);
if (defender::safe()) {
foreach ($inputArray as $settings_name => $settings_value) {
$inputSettings = array(
"settings_name" => $settings_name,
"settings_value" => $settings_value,
"settings_inf" => "gallery",
);
dbquery_insert(DB_SETTINGS_INF, $inputSettings, "update", array("primary_key" => "settings_name"));
}
addNotice("success", $locale['900']);
redirect(FUSION_REQUEST);
} else {
addNotice('danger', $locale['901']);
}
}
echo openform('settingsform', 'post', FUSION_REQUEST, array("class" => "m-t-20"));
echo "<div class='well'>".$locale['gallery_0022']."</div>";
$choice_opts = array('1' => $locale['518'], '0' => $locale['519']);
$calc_opts = array(1 => 'Bytes (bytes)', 1000 => 'KB (Kilobytes)', 1000000 => 'MB (Megabytes)');
$calc_c = calculate_byte($gll_settings['photo_max_b']);
$calc_b = $gll_settings['photo_max_b']/$calc_c;
echo "<div class='row'><div class='col-xs-12 col-sm-8'>\n";
openside('');
echo form_text('gallery_pagination', $locale['gallery_0202'], $gll_settings['gallery_pagination'], array(
'max_length' => 2,
'inline' => 1,
'width' => '100px',
"type" => "number",
));
echo "
<div class='row m-0'>\n
<label class='label-control col-xs-12 col-sm-3 p-l-0' for='thumb_w'>".$locale['gallery_0203']."</label>\n
<div class='col-xs-12 col-sm-9 p-l-0'>\n
".form_text('thumb_w', '', $gll_settings['thumb_w'], array(
'class' => 'pull-left m-r-10',
'max_length' => 4,
"type" => "number",
'width' => '150px'
))."
<i class='entypo icancel pull-left m-r-10 m-l-0 m-t-10'></i>\n
".form_text('thumb_h', '', $gll_settings['thumb_h'], array(
'class' => 'pull-left',
'max_length' => 4,
"type" => "number",
'width' => '150px'
))."
<small class='m-l-10 mid-opacity text-uppercase pull-left m-t-10'>( ".$locale['gallery_0204']." )</small>\n
</div>\n
</div>\n
";
echo "
<div class='row m-0'>\n
<label class='label-control col-xs-12 col-sm-3 p-l-0' for='photo_max_w'>".$locale['gallery_0205']."</label>\n
<div class='col-xs-12 col-sm-9 p-l-0'>\n
".form_text('photo_w', '', $gll_settings['photo_w'], array(
'class' => 'pull-left m-r-10',
'max_length' => 4,
"type" => "number",
'width' => '150px'
))."
<i class='entypo icancel pull-left m-r-10 m-l-0 m-t-10'></i>\n
".form_text('photo_h', '', $gll_settings['photo_h'], array(
'class' => 'pull-left',
'max_length' => 4,
"type" => "number",
'width' => '150px'
))."
<small class='m-l-10 mid-opacity text-uppercase pull-left m-t-10'>( ".$locale['gallery_0204']." )</small>\n
</div>\n
</div>\n";
echo "
<div class='row m-0'>\n
<label class='label-control col-xs-12 col-sm-3 p-l-0' for='photo_w'>".$locale['gallery_0206']."</label>\n
<div class='col-xs-12 col-sm-9 p-l-0'>\n
".form_text('photo_max_w', '', $gll_settings['photo_max_w'], array(
'class' => 'pull-left m-r-10',
'max_length' => 4,
"type" => "number",
'width' => '150px'
))."
<i class='entypo icancel pull-left m-r-10 m-l-0 m-t-10'></i>\n
".form_text('photo_max_h', '', $gll_settings['photo_max_h'], array(
'class' => 'pull-left',
'max_length' => 4,
"type" => "number",
'width' => '150px'
))."
<small class='m-l-10 mid-opacity text-uppercase pull-left m-t-10'>( ".$locale['gallery_0204']." )</small>\n
</div>\n
</div>\n";
echo "
<div class='row m-0'>\n
<label class='col-xs-12 col-sm-3 p-l-0' for='calc_b'>".$locale['gallery_0207']."</label>\n
<div class='col-xs-12 col-sm-9 p-l-0'>\n
".form_text('calc_b', '', $calc_b, array(
'required' => 1,
"type" => "number",
'error_text' => $locale['error_rate'],
'width' => '150px',
'max_length' => 4,
'class' => 'pull-left m-r-10'
))."
".form_select('calc_c', '', $calc_c, array('options' => $calc_opts, 'class' => 'pull-left', 'width' => '180px'))."
</div>\n
</div>\n
";
closeside();
openside('');
echo form_checkbox("gallery_allow_submission", $locale['gallery_0200'], $gll_settings['gallery_allow_submission']);
echo form_checkbox("gallery_extended_required", $locale['gallery_0201'], $gll_settings['gallery_extended_required']);
closeside();
echo "</div><div class='col-xs-12 col-sm-4'>\n";
openside("");
echo form_select('photo_watermark', $locale['gallery_0214'], $gll_settings['photo_watermark'], array(
"options" => array("0"=>$locale['disable'], "1"=>$locale['enable']),
"width" => "100%",
));
echo form_checkbox('photo_watermark_text', $locale['gallery_0213'], $gll_settings['photo_watermark_text']);
echo form_checkbox('photo_watermark_save', $locale['gallery_0215'], $gll_settings['photo_watermark_save']);
echo form_text('photo_watermark_image', $locale['gallery_0212'], $gll_settings['photo_watermark_image'], array(
'deactivate' => !$gll_settings['photo_watermark'] ? 1 : 0,
));
echo form_colorpicker('photo_watermark_text_color1', $locale['gallery_0208'], $gll_settings['photo_watermark_text_color1'], array(
'deactivate' => !$gll_settings['photo_watermark'] ? 1 : 0,
//"format"=>"rgb",
));
echo form_colorpicker('photo_watermark_text_color2', $locale['gallery_0209'], $gll_settings['photo_watermark_text_color2'], array(
'deactivate' => !$gll_settings['photo_watermark'] ? 1 : 0,
//"format"=>"rgb",
));
echo form_colorpicker('photo_watermark_text_color3', $locale['gallery_0210'], $gll_settings['photo_watermark_text_color3'], array(
'deactivate' => !$gll_settings['photo_watermark'] ? 1 : 0,
//"format"=>"rgb",
));
echo form_button('savesettings', $locale['gallery_0216'], $locale['gallery_0216'], array('class' => 'btn-success m-r-10'));
echo form_button('delete_watermarks', $locale['gallery_0211'], $locale['gallery_0211'], array(
'deactivate' => !$gll_settings['photo_watermark'] ? 1 : 0,
'class' => 'btn-default',
));
closeside();
echo "</div>\n</div>\n";
echo form_button('savesettings', $locale['gallery_0216'], $locale['gallery_0216'], array('class' => 'btn-success'));
echo closeform();
add_to_jquery("
$('#photo_watermark').bind('change', function(){
var vals = $(this).select2().val();
if (vals == 1) {
$('#photo_watermark_save').select2('enable');
$('#delete_watermarks').removeAttr('disabled');
$('#photo_watermark_image').removeAttr('disabled');
$('#photo_watermark_text').select2('enable');
$('#photo_watermark_text_color1').colorpicker('enable');
$('#photo_watermark_text_color2').colorpicker('enable');
$('#photo_watermark_text_color3').colorpicker('enable');
} else {
$('#photo_watermark_save').select2('disable');
$('#delete_watermarks').attr('disabled', 'disabled');
$('#photo_watermark_image').attr('disabled', 'disabled');
$('#photo_watermark_text').select2('disable');
$('#photo_watermark_text_color1').colorpicker('disable');
$('#photo_watermark_text_color2').colorpicker('disable');
$('#photo_watermark_text_color3').colorpicker('disable');
}
});
");
function calculate_byte($download_max_b) {
$calc_opts = array(1 => 'Bytes (bytes)', 1000 => 'KB (Kilobytes)', 1000000 => 'MB (Megabytes)');
foreach ($calc_opts as $byte => $val) {
if ($download_max_b/$byte <= 999) {
return $byte;
}
}
return 1000000;
}
function color_mapper($field, $value) {
global $gll_settings;
$cvalue[] = "00";
$cvalue[] = "33";
$cvalue[] = "66";
$cvalue[] = "99";
$cvalue[] = "CC";
$cvalue[] = "FF";
$select = "";
$select = "<select name='".$field."' class='textbox' onchange=\"document.getElementById('preview_".$field."').style.background = '#' + this.options[this.selectedIndex].value;\" ".(!$gll_settings['photo_watermark'] ? "disabled='disabled'" : "").">\n";
for ($ca = 0; $ca < count($cvalue); $ca++) {
for ($cb = 0; $cb < count($cvalue); $cb++) {
for ($cc = 0; $cc < count($cvalue); $cc++) {
$hcolor = $cvalue[$ca].$cvalue[$cb].$cvalue[$cc];
$select .= "<option value='".$hcolor."'".($value == $hcolor ? " selected='selected' " : " ")."style='background-color:#".$hcolor.";'>#".$hcolor."</option>\n";
}
}
}
$select .= "</select>\n";
return $select;
} | Talocha/PHP-Fusion | infusions/gallery/admin/gallery_settings.php | PHP | agpl-3.0 | 11,482 |
class Transfer < Entry
state_machine :initial => :open, :namespace => :transfer do
event :close do
transition :paid => :closed
end
state :open, :value => 400
state :closed, :value => 430
end
def required_account_types
[BankAccount, CreditCard]
end
end
| MHMDhub/regdel | app/models/entries/transfer.rb | Ruby | agpl-3.0 | 291 |
import CodeClipboard from './CodeClipboard';
export default CodeClipboard; | StarterInc/Ignite | src/docs/src/components/CodeClipboard/index.js | JavaScript | agpl-3.0 | 75 |
/*
* $Id: CardCollectionDao.java 475 2005-12-08 23:44:08 -0800 (Thu, 08 Dec 2005) ivaynberg $
* $Revision: 475 $
* $Date: 2005-12-08 23:44:08 -0800 (Thu, 08 Dec 2005) $
*
* ==============================================================================
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.alienlabs.hatchetharry.persistence.dao;
import java.io.Serializable;
import java.util.List;
import org.alienlabs.hatchetharry.model.CardCollection;
import org.hibernate.Session;
/**
* The implementation-independent DAO interface. Defines the operations required
* to be supported by an implementation.
*
* @author igor
*/
public interface CardCollectionDao extends Serializable
{
Session getSession();
/**
* Load a {@link CardCollection} from the DB, given it's <tt>id</tt>.
*
* @param id
* The id of the Contact to load.
* @return CardCollection
*/
CardCollection load(long id);
/**
* Save the CardCollection to the DB
*
* @param CardCollection
* @return persistent instance of contact
*/
CardCollection save(CardCollection contact);
/**
* Delete a {@link CardCollection} from the DB, given it's <tt>id</tt>.
*
* @param id
* The id of the CardCollection to delete.
*/
void delete(long id);
/**
* Return the number of CardCollections in the DB.
*
* @return count
*/
int count();
/**
* Returns the list of all unique last names in the database
*
* @return the list of all unique last names in the database
*/
List<String> getUniqueLastNames();
}
| AlienQueen/HatchetHarry | src/main/java/org/alienlabs/hatchetharry/persistence/dao/CardCollectionDao.java | Java | agpl-3.0 | 2,144 |
/*
* Copyright (C) 2011-2013 The Animo Project
* http://animotron.org
*
* This file is part of Animotron.
*
* Animotron is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* Animotron is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of
* the GNU Affero General Public License along with Animotron.
* If not, see <http://www.gnu.org/licenses/>.
*/
package org.animotron.cache;
import java.io.IOException;
import java.io.OutputStream;
/**
* @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
* @author <a href="mailto:gazdovsky@gmail.com">Evgeny Gazdovsky</a>
*
*/
public interface Cache {
public boolean available(String key) throws IOException;
public void get(String key, OutputStream out) throws IOException;
public void get(String key, StringBuilder out) throws IOException;
public String get(String key) throws IOException;
public OutputStream stream(String key, OutputStream out) throws IOException;
public OutputStream stream(String key, StringBuilder out) throws IOException;
public void drop(String key) throws IOException;
} | animotron/core | src/main/java/org/animotron/cache/Cache.java | Java | agpl-3.0 | 1,548 |
<?php
/*
* This file is part of Kelinux-php.
* Copyright (C) 2012 Carlos Garcia Gomez neorazorx@gmail.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
require_once 'ke_db.php';
require_once 'ke_tools.php';
abstract class ke_model extends ke_tools
{
protected $db;
protected $table_name;
public $errors;
public function __construct($name)
{
$this->db = new ke_db();
$this->table_name = $name;
$this->errors = '';
}
protected function new_error_msg($msg)
{
$this->errors .= $msg;
}
/*
* Esta función devuelve TRUE si los datos del objeto se encuentran
* en la base de datos.
*/
abstract public function exists();
/*
* Esta función sirve tanto para insertar como para actualizar
* los datos del objeto en la base de datos.
*/
abstract public function save();
/// Esta función sirve para eliminar los datos del objeto de la base de datos
abstract public function delete();
/// devuelve el número de elementos de la tabla
public function total()
{
$num = 0;
$aux = $this->db->select("SELECT COUNT(*) as num FROM ".$this->table_name.";");
if($aux)
$num = intval($aux[0]['num']);
return $num;
}
}
?>
| NeoRazorX/kelinux-php | core/ke_model.php | PHP | agpl-3.0 | 1,890 |
// ----------> GENERATED FILE - DON'T TOUCH! <----------
// generator: ilarkesto.mda.legacy.generator.EntityGenerator
package scrum.server.collaboration;
import java.util.*;
import ilarkesto.persistence.*;
import ilarkesto.core.logging.Log;
import ilarkesto.base.*;
import ilarkesto.base.time.*;
import ilarkesto.auth.*;
public abstract class GComment
extends AEntity
implements ilarkesto.auth.ViewProtected<scrum.server.admin.User>, ilarkesto.search.Searchable, java.lang.Comparable<Comment> {
// --- AEntity ---
public final CommentDao getDao() {
return commentDao;
}
protected void repairDeadDatob(ADatob datob) {
}
@Override
public void storeProperties(Map properties) {
super.storeProperties(properties);
properties.put("parentId", this.parentId);
properties.put("authorId", this.authorId);
properties.put("published", this.published);
properties.put("authorName", this.authorName);
properties.put("authorEmail", this.authorEmail);
properties.put("authorNameVisible", this.authorNameVisible);
properties.put("text", this.text);
properties.put("dateAndTime", this.dateAndTime == null ? null : this.dateAndTime.toString());
}
public int compareTo(Comment other) {
return toString().toLowerCase().compareTo(other.toString().toLowerCase());
}
private static final ilarkesto.core.logging.Log LOG = ilarkesto.core.logging.Log.get(GComment.class);
public static final String TYPE = "comment";
// -----------------------------------------------------------
// - Searchable
// -----------------------------------------------------------
public boolean matchesKey(String key) {
if (super.matchesKey(key)) return true;
if (matchesKey(getText(), key)) return true;
return false;
}
// -----------------------------------------------------------
// - parent
// -----------------------------------------------------------
private String parentId;
private transient ilarkesto.persistence.AEntity parentCache;
private void updateParentCache() {
parentCache = this.parentId == null ? null : (ilarkesto.persistence.AEntity)getDaoService().getById(this.parentId);
}
public final String getParentId() {
return this.parentId;
}
public final ilarkesto.persistence.AEntity getParent() {
if (parentCache == null) updateParentCache();
return parentCache;
}
public final void setParent(ilarkesto.persistence.AEntity parent) {
parent = prepareParent(parent);
if (isParent(parent)) return;
this.parentId = parent == null ? null : parent.getId();
parentCache = parent;
fireModified("parent="+parent);
}
protected ilarkesto.persistence.AEntity prepareParent(ilarkesto.persistence.AEntity parent) {
return parent;
}
protected void repairDeadParentReference(String entityId) {
if (this.parentId == null || entityId.equals(this.parentId)) {
repairMissingMaster();
}
}
public final boolean isParentSet() {
return this.parentId != null;
}
public final boolean isParent(ilarkesto.persistence.AEntity parent) {
if (this.parentId == null && parent == null) return true;
return parent != null && parent.getId().equals(this.parentId);
}
protected final void updateParent(Object value) {
setParent(value == null ? null : (ilarkesto.persistence.AEntity)getDaoService().getById((String)value));
}
// -----------------------------------------------------------
// - author
// -----------------------------------------------------------
private String authorId;
private transient scrum.server.admin.User authorCache;
private void updateAuthorCache() {
authorCache = this.authorId == null ? null : (scrum.server.admin.User)userDao.getById(this.authorId);
}
public final String getAuthorId() {
return this.authorId;
}
public final scrum.server.admin.User getAuthor() {
if (authorCache == null) updateAuthorCache();
return authorCache;
}
public final void setAuthor(scrum.server.admin.User author) {
author = prepareAuthor(author);
if (isAuthor(author)) return;
this.authorId = author == null ? null : author.getId();
authorCache = author;
fireModified("author="+author);
}
protected scrum.server.admin.User prepareAuthor(scrum.server.admin.User author) {
return author;
}
protected void repairDeadAuthorReference(String entityId) {
if (this.authorId == null || entityId.equals(this.authorId)) {
setAuthor(null);
}
}
public final boolean isAuthorSet() {
return this.authorId != null;
}
public final boolean isAuthor(scrum.server.admin.User author) {
if (this.authorId == null && author == null) return true;
return author != null && author.getId().equals(this.authorId);
}
protected final void updateAuthor(Object value) {
setAuthor(value == null ? null : (scrum.server.admin.User)userDao.getById((String)value));
}
// -----------------------------------------------------------
// - published
// -----------------------------------------------------------
private boolean published;
public final boolean isPublished() {
return published;
}
public final void setPublished(boolean published) {
published = preparePublished(published);
if (isPublished(published)) return;
this.published = published;
fireModified("published="+published);
}
protected boolean preparePublished(boolean published) {
return published;
}
public final boolean isPublished(boolean published) {
return this.published == published;
}
protected final void updatePublished(Object value) {
setPublished((Boolean)value);
}
// -----------------------------------------------------------
// - authorName
// -----------------------------------------------------------
private java.lang.String authorName;
public final java.lang.String getAuthorName() {
return authorName;
}
public final void setAuthorName(java.lang.String authorName) {
authorName = prepareAuthorName(authorName);
if (isAuthorName(authorName)) return;
this.authorName = authorName;
fireModified("authorName="+authorName);
}
protected java.lang.String prepareAuthorName(java.lang.String authorName) {
authorName = Str.removeUnreadableChars(authorName);
return authorName;
}
public final boolean isAuthorNameSet() {
return this.authorName != null;
}
public final boolean isAuthorName(java.lang.String authorName) {
if (this.authorName == null && authorName == null) return true;
return this.authorName != null && this.authorName.equals(authorName);
}
protected final void updateAuthorName(Object value) {
setAuthorName((java.lang.String)value);
}
// -----------------------------------------------------------
// - authorEmail
// -----------------------------------------------------------
private java.lang.String authorEmail;
public final java.lang.String getAuthorEmail() {
return authorEmail;
}
public final void setAuthorEmail(java.lang.String authorEmail) {
authorEmail = prepareAuthorEmail(authorEmail);
if (isAuthorEmail(authorEmail)) return;
this.authorEmail = authorEmail;
fireModified("authorEmail="+authorEmail);
}
protected java.lang.String prepareAuthorEmail(java.lang.String authorEmail) {
authorEmail = Str.removeUnreadableChars(authorEmail);
return authorEmail;
}
public final boolean isAuthorEmailSet() {
return this.authorEmail != null;
}
public final boolean isAuthorEmail(java.lang.String authorEmail) {
if (this.authorEmail == null && authorEmail == null) return true;
return this.authorEmail != null && this.authorEmail.equals(authorEmail);
}
protected final void updateAuthorEmail(Object value) {
setAuthorEmail((java.lang.String)value);
}
// -----------------------------------------------------------
// - authorNameVisible
// -----------------------------------------------------------
private boolean authorNameVisible;
public final boolean isAuthorNameVisible() {
return authorNameVisible;
}
public final void setAuthorNameVisible(boolean authorNameVisible) {
authorNameVisible = prepareAuthorNameVisible(authorNameVisible);
if (isAuthorNameVisible(authorNameVisible)) return;
this.authorNameVisible = authorNameVisible;
fireModified("authorNameVisible="+authorNameVisible);
}
protected boolean prepareAuthorNameVisible(boolean authorNameVisible) {
return authorNameVisible;
}
public final boolean isAuthorNameVisible(boolean authorNameVisible) {
return this.authorNameVisible == authorNameVisible;
}
protected final void updateAuthorNameVisible(Object value) {
setAuthorNameVisible((Boolean)value);
}
// -----------------------------------------------------------
// - text
// -----------------------------------------------------------
private java.lang.String text;
public final java.lang.String getText() {
return text;
}
public final void setText(java.lang.String text) {
text = prepareText(text);
if (isText(text)) return;
this.text = text;
fireModified("text="+text);
}
protected java.lang.String prepareText(java.lang.String text) {
text = Str.removeUnreadableChars(text);
return text;
}
public final boolean isTextSet() {
return this.text != null;
}
public final boolean isText(java.lang.String text) {
if (this.text == null && text == null) return true;
return this.text != null && this.text.equals(text);
}
protected final void updateText(Object value) {
setText((java.lang.String)value);
}
// -----------------------------------------------------------
// - dateAndTime
// -----------------------------------------------------------
private ilarkesto.base.time.DateAndTime dateAndTime;
public final ilarkesto.base.time.DateAndTime getDateAndTime() {
return dateAndTime;
}
public final void setDateAndTime(ilarkesto.base.time.DateAndTime dateAndTime) {
dateAndTime = prepareDateAndTime(dateAndTime);
if (isDateAndTime(dateAndTime)) return;
this.dateAndTime = dateAndTime;
fireModified("dateAndTime="+dateAndTime);
}
protected ilarkesto.base.time.DateAndTime prepareDateAndTime(ilarkesto.base.time.DateAndTime dateAndTime) {
return dateAndTime;
}
public final boolean isDateAndTimeSet() {
return this.dateAndTime != null;
}
public final boolean isDateAndTime(ilarkesto.base.time.DateAndTime dateAndTime) {
if (this.dateAndTime == null && dateAndTime == null) return true;
return this.dateAndTime != null && this.dateAndTime.equals(dateAndTime);
}
protected final void updateDateAndTime(Object value) {
value = value == null ? null : new ilarkesto.base.time.DateAndTime((String)value);
setDateAndTime((ilarkesto.base.time.DateAndTime)value);
}
public void updateProperties(Map<?, ?> properties) {
for (Map.Entry entry : properties.entrySet()) {
String property = (String) entry.getKey();
if (property.equals("id")) continue;
Object value = entry.getValue();
if (property.equals("parentId")) updateParent(value);
if (property.equals("authorId")) updateAuthor(value);
if (property.equals("published")) updatePublished(value);
if (property.equals("authorName")) updateAuthorName(value);
if (property.equals("authorEmail")) updateAuthorEmail(value);
if (property.equals("authorNameVisible")) updateAuthorNameVisible(value);
if (property.equals("text")) updateText(value);
if (property.equals("dateAndTime")) updateDateAndTime(value);
}
}
protected void repairDeadReferences(String entityId) {
super.repairDeadReferences(entityId);
repairDeadParentReference(entityId);
repairDeadAuthorReference(entityId);
}
// --- ensure integrity ---
public void ensureIntegrity() {
super.ensureIntegrity();
if (!isParentSet()) {
repairMissingMaster();
return;
}
try {
getParent();
} catch (EntityDoesNotExistException ex) {
LOG.info("Repairing dead parent reference");
repairDeadParentReference(this.parentId);
}
try {
getAuthor();
} catch (EntityDoesNotExistException ex) {
LOG.info("Repairing dead author reference");
repairDeadAuthorReference(this.authorId);
}
}
static CommentDao commentDao;
public static final void setCommentDao(CommentDao commentDao) {
GComment.commentDao = commentDao;
}
} | hogi/kunagi | src/generated/java/scrum/server/collaboration/GComment.java | Java | agpl-3.0 | 13,504 |
const { Array } = require.main.require('./Tag/Classes');
class GetTag extends Array {
constructor(client) {
super(client, {
name: 'get',
args: [
{
name: 'array'
}, {
name: 'index'
}
],
minArgs: 2, maxArgs: 2
});
}
async execute(ctx, args) {
const res = await super.execute(ctx, args, true);
args = args.parsedArgs;
let arr = await this.loadArray(ctx, args.array);
let index = this.parseInt(args.index, 'index');
return res.setContent(arr[index]);
}
get implicit() { return false; }
}
module.exports = GetTag; | Ratismal/blargbot | Production/Tags/Array/Get.js | JavaScript | agpl-3.0 | 619 |
/*
This file is part of the Juju GUI, which lets users view and manage Juju
environments within a graphical interface (https://launchpad.net/juju-gui).
Copyright (C) 2012-2013 Canonical Ltd.
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License version 3, as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
General Public License for more details.
You should have received a copy of the GNU Affero General Public License along
with this program. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
(function() {
describe('Browser charm view', function() {
var container, CharmView, cleanIconHelper, factory, models, node, utils,
view, views, Y, testContainer;
before(function(done) {
Y = YUI(GlobalConfig).use(
'datatype-date',
'datatype-date-format',
'charmstore-api',
'json-stringify',
'juju-charm-models',
'juju-tests-utils',
'juju-tests-factory',
'node',
'node-event-simulate',
'subapp-browser-charmview',
function(Y) {
views = Y.namespace('juju.browser.views');
models = Y.namespace('juju.models');
utils = Y.namespace('juju-tests.utils');
factory = Y.namespace('juju-tests.factory');
CharmView = views.BrowserCharmView;
cleanIconHelper = utils.stubCharmIconPath();
done();
});
});
beforeEach(function() {
window.flags = {};
container = utils.makeContainer(this, 'container');
var testcontent = [
'<div id=testcontent><div class="bws-view-data">',
'</div></div>'
].join();
Y.Node.create(testcontent).appendTo(container);
// Mock out a dummy location for the Store used in view instances.
window.juju_config = {
charmworldURL: 'http://localhost'
};
node = Y.one('#testcontent');
});
afterEach(function() {
window.flags = {};
if (view) {
view.destroy();
}
if (testContainer) {
testContainer.remove(true);
}
node.remove(true);
delete window.juju_config;
container.remove(true);
});
after(function() {
cleanIconHelper();
});
it('renders for inspector mode correctly', function() {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
entity: new models.Charm(data),
container: utils.makeContainer(this),
forInspector: true
});
view.render();
assert.isNull(view.get('container').one('.heading'));
// There is no 'related charms' tab to display.
assert.equal(view.get('container').all('.related-charms').size(), 0);
});
// Return the charm heading node included in the charm detail view.
var makeHeading = function(context, is_subordinate) {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
data.is_subordinate = is_subordinate;
utils.makeContainer(context);
view = new CharmView({
entity: new models.Charm(data),
container: utils.makeContainer(context)
});
view.render();
var heading = view.get('container').one('.header');
assert.isNotNull(heading);
return heading;
};
it('avoids showing the subordinate message for non-subordinate charms',
function() {
var heading = makeHeading(this, false);
assert.notInclude(heading.getContent(), 'Subordinate charm');
});
it('shows the subordinate message if the charm is a subordinate',
function() {
var heading = makeHeading(this, true);
assert.include(heading.getContent(), 'Subordinate charm');
});
it('renders local charms for inspector mode correctly', function() {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
data.url = 'local:precise/apache2-10';
var charm = new models.Charm(data);
charm.set('scheme', 'local');
view = new CharmView({
entity: charm,
container: utils.makeContainer(this),
forInspector: true
});
view.render();
assert.isNull(view.get('container').one('.heading'));
assert.isNull(view.get('container').one('#readme'));
assert.isNull(view.get('container').one('#configuration'));
assert.isNull(view.get('container').one('#code'));
assert.isNull(view.get('container').one('#features'));
});
it('has sharing links', function() {
view = new CharmView({
entity: new models.Charm({
files: [
'hooks/install',
'readme.rst'
],
id: 'precise/wordpress',
code_source: { location: 'lp:~foo'}
}),
container: utils.makeContainer(this),
charmstore: factory.makeFakeCharmstore()
});
view.render();
var links = container.all('#sharing a');
assert.equal(links.size(), 3);
});
it('should be able to locate a readme file', function() {
view = new CharmView({
entity: new models.Charm({
files: [
'hooks/install',
'readme.rst'
],
id: 'precise/ceph-9',
code_source: { location: 'lp:~foo' }
})
});
view._locateReadme().should.eql('readme.rst');
// Matches for caps as well.
view.get('entity').set('files', [
'hooks/install',
'README.md'
]);
view._locateReadme().should.eql('README.md');
});
it('can generate source, bug, and revno links from its charm', function() {
view = new CharmView({
entity: new models.Charm({
files: [
'hooks/install',
'readme.rst'
],
id: 'precise/ceph-9',
name: 'ceph',
code_source: { location: 'lp:~foo'}
})
});
var url = view._getSourceLink(
view.get('entity').get('code_source').location);
assert.equal('http://bazaar.launchpad.net/~foo/files', url);
assert.equal(
'http://bazaar.launchpad.net/~foo/revision/1',
view._getRevnoLink(url, 1));
assert.equal(
'https://bugs.launchpad.net/charms/+source/ceph',
view._getBugLink(view.get('entity').get('name')));
});
it('excludes source svg files from the source tab', function() {
view = new CharmView({
entity: new models.Charm({
files: [
'hooks/install',
'icon.svg',
'readme.rst'
],
id: 'precise/ceph-9',
code_source: { location: 'lp:~foo'}
}),
container: utils.makeContainer(this)
});
view.render();
var options = Y.one('#code').all('select option');
assert.equal(options.size(), 3);
assert.deepEqual(
options.get('text'),
['Select --', 'readme.rst', 'hooks/install']);
});
it('can generate useful display data for commits', function() {
view = new CharmView({
entity: new models.Charm({
files: [
'hooks/install',
'readme.rst'
],
id: 'precise/ceph-9',
code_source: {
location: 'lp:~foo'
}
})
});
var revisions = [
{
authors: [{
email: 'jdoe@example.com',
name: 'John Doe'
}],
date: '2013-05-02T10:05:32Z',
message: 'The fnord had too much fleem.',
revno: 1
},
{
authors: [{
email: 'jdoe@example.com',
name: 'John Doe'
}],
date: '2013-05-02T10:05:45Z',
message: 'Fnord needed more fleem.',
revno: 2
}
];
var url = view._getSourceLink(
view.get('entity').get('code_source').location);
var commits = view._formatCommitsForHtml(revisions, url);
assert.equal(
'http://bazaar.launchpad.net/~foo/revision/1',
commits.first.revnoLink);
assert.equal(
'http://bazaar.launchpad.net/~foo/revision/2',
commits.remaining[0].revnoLink);
});
it('should be able to display the readme content', function() {
view = new CharmView({
activeTab: '#readme',
entity: new models.Charm({
files: [
'hooks/install',
'readme.rst'
],
id: 'precise/ceph-9',
code_source: { location: 'lp:~foo'}
}),
container: utils.makeContainer(this),
charmstore: {
getFile: function(url, filename, success, failure) {
success({
target: {
responseText: 'README content.'
}
});
}
}
});
view.render();
Y.one('#readme').get('text').should.eql('README content.');
});
// EVENTS
it('should catch when the add control is clicked', function(done) {
view = new CharmView({
activeTab: '#readme',
entity: new models.Charm({
files: [
'hooks/install'
],
id: 'precise/ceph-9',
code_source: { location: 'lp:~foo' }
}),
container: utils.makeContainer(this)
});
// Hook up to the callback for the click event.
view._addCharmEnvironment = function(ev) {
ev.halt();
Y.one('#readme h3').get('text').should.eql('Charm has no README');
done();
};
view.render();
node.one('.charm .add').simulate('click');
});
it('_addCharmEnvironment displays the config panel', function(done) {
var fakeStore = new Y.juju.charmstore.APIv4({
charmstoreURL: 'localhost/'
});
view = new CharmView({
entity: new models.Charm({
files: [
'hooks/install'
],
id: 'precise/ceph-9',
url: 'cs:precise/ceph-9',
code_source: { location: 'lp:~foo' },
options: {
configName: 'test'
}
}),
container: utils.makeContainer(this),
charmstore: fakeStore
});
var fireStub = utils.makeStubMethod(view, 'fire');
this._cleanups.push(fireStub.reset);
view.set('deployService', function(charm, serviceAttrs) {
var serviceCharm = view.get('entity');
assert.deepEqual(charm, serviceCharm);
assert.equal(charm.get('id'), 'cs:precise/ceph-9');
assert.equal(serviceAttrs.icon, 'localhost/v4/precise/ceph-9/icon.svg');
assert.equal(fireStub.calledOnce(), true);
var fireArgs = fireStub.lastArguments();
assert.equal(fireArgs[0], 'changeState');
assert.deepEqual(fireArgs[1], {
sectionA: {
component: 'charmbrowser',
metadata: {
id: null }}});
done();
});
view._addCharmEnvironment({halt: function() {}});
});
it('should load a file when a hook is selected', function() {
view = new CharmView({
entity: new models.Charm({
files: [
'hooks/install',
'readme.rst'
],
id: 'precise/ceph-9',
code_source: { location: 'lp:~foo' }
}),
container: utils.makeContainer(this),
charmstore: {
getFile: function(url, filename, success, failure) {
success({
target: {
responseText: '<install hook content>'
}
});
}
}
});
view.render();
Y.one('#code').all('select option').size().should.equal(3);
// Select the hooks install and the content should update.
Y.one('#code').all('select option').item(2).set(
'selected', 'selected');
Y.one('#code').one('select').simulate('change');
var content = Y.one('#code').one('div.filecontent');
// Content is escaped, so we read it out as text, not tags.
content.get('text').should.eql('<install hook content>');
});
it('should be able to render markdown as html', function() {
view = new CharmView({
activeTab: '#readme',
entity: new models.Charm({
files: [
'readme.md'
],
id: 'precise/wordpress-9',
code_source: { location: 'lp:~foo' }
}),
container: utils.makeContainer(this),
charmstore: {
getFile: function(url, filename, success, failure) {
success({
target: {
responseText: 'README Header\n============='
}
});
}
}
});
view.render();
Y.one('#readme').get('innerHTML').should.eql(
'<h1>README Header</h1>');
});
it('should display the config data in the config tab', function() {
view = new CharmView({
entity: new models.Charm({
files: [],
id: 'precise/ceph-9',
code_source: { location: 'lp:~foo' },
options: {
'client-port': {
'default': 9160,
'description': 'Port for client communcation',
'type': 'int'
}
}
}),
container: utils.makeContainer(this)
});
view.render();
Y.one('#configuration dd div').get('text').should.eql(
'Default: 9160');
Y.one('#configuration dd p').get('text').should.eql(
'Port for client communcation');
});
it('should catch when the open log is clicked', function(done) {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
entity: new models.Charm(data),
container: utils.makeContainer(this)
});
// Hook up to the callback for the click event.
view._toggleLog = function(ev) {
ev.halt();
done();
};
view.render();
node.one('.changelog .expand').simulate('click');
});
it('changelog is reformatted and displayed', function() {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
entity: new models.Charm(data),
container: utils.makeContainer(this)
});
view.render();
// Basics that we have the right number of nodes.
node.all('.remaining li').size().should.eql(9);
node.all('.first p').size().should.eql(1);
// The reminaing starts out hidden.
assert(node.one('.changelog .remaining').hasClass('hidden'));
});
it('_getInterfaceIntroFlag sets the flag for no requires, no provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
},
'requires': {
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'noRequiresNoProvides'));
});
it('_getInterfaceIntroFlag sets the flag for no requires, 1 provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
'foo': {}
},
'requires': {
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'noRequiresOneProvides'));
});
it('_getInterfaceIntroFlag sets the flag for no requires, many provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
'foo': {},
'two': {}
},
'requires': {
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'noRequiresManyProvides'));
});
it('_getInterfaceIntroFlag sets the flag for 1 requires, no provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
},
'requires': {
'foo': {}
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'oneRequiresNoProvides'));
});
it('_getInterfaceIntroFlag sets the flag for 1 requires, 1 provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
'foo': {}
},
'requires': {
'foo': {}
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'oneRequiresOneProvides'));
});
it('_getInterfaceIntroFlag sets the flag for 1 requires, many provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
'foo': {},
'two': {}
},
'requires': {
'foo': {}
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'oneRequiresManyProvides'));
});
it('_getInterfaceIntroFlag sets the flag for many requires, no provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
},
'requires': {
'foo': {},
'two': {}
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'manyRequiresNoProvides'));
});
it('_getInterfaceIntroFlag sets the flag for many requires, 1 provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
'foo': {}
},
'requires': {
'foo': {},
'two': {}
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'manyRequiresOneProvides'));
});
it('_getInterfaceIntroFlag sets the flag for many requires, many provides',
function() {
var charm = new models.Charm({
files: [],
id: 'precise/ceph-9',
relations: {
'provides': {
'foo': {},
'two': {}
},
'requires': {
'foo': {},
'two': {}
}
}
});
view = new CharmView({
entity: charm
});
var interfaceIntro = view._getInterfaceIntroFlag(
charm.get('requires'), charm.get('provides'));
assert(Y.Object.hasKey(interfaceIntro, 'manyRequiresManyProvides'));
});
it('shows and hides an indicator', function(done) {
var hit = 0;
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
entity: new models.Charm(data),
container: utils.makeContainer(this)
});
view.showIndicator = function() {
hit += 1;
};
view.hideIndicator = function() {
hit += 1;
hit.should.equal(2);
done();
};
view.render();
});
it('selects the proper tab when given one', function() {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
activeTab: '#configuration',
entity: new models.Charm(data),
container: utils.makeContainer(this)
});
view.render();
// We've selected the activeTab specified.
var selected = view.get('container').one('nav .active');
assert.equal(selected.getAttribute('href'), '#configuration');
});
it('sets the proper change request when closed', function(done) {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
activeTab: '#configuration',
entity: new models.Charm(data),
container: utils.makeContainer(this)
});
view.on('changeState', function(ev) {
assert.equal(ev.details[0].sectionA.metadata.id, null,
'The charm id is not set to null.');
assert.equal(ev.details[0].sectionA.metadata.hash, null,
'The charm details hash is not set to null.');
done();
});
view.render();
view.get('container').one('.charm .back').simulate('click');
});
it('renders related charms when interface tab selected', function() {
var data = utils.loadFixture('data/browsercharm.json', true);
testContainer = utils.makeContainer(this);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
activeTab: '#related-charms',
entity: new models.Charm(data),
renderTo: testContainer
});
view.render();
assert.equal(
testContainer.all('#related-charms .token').size(),
18);
assert.equal(view.get('entity').get('id'), 'cs:precise/apache2-27');
assert.isTrue(view.loadedRelatedInterfaceCharms);
});
it('ignore invalid tab selections', function() {
var data = utils.loadFixture('data/browsercharm.json', true);
testContainer = utils.makeContainer(this);
// We don't want any files so we don't have to mock/load them.
data.files = [];
var fakeStore = factory.makeFakeCharmstore();
view = new CharmView({
activeTab: '#bws-does-not-exist',
entity: new models.Charm(data),
renderTo: testContainer,
charmstore: fakeStore
});
view.render();
assert.equal(
testContainer.one('nav .active').getAttribute('href'),
'#summary');
});
it('should open header links in a new tab', function() {
var data = utils.loadFixture('data/browsercharm.json', true);
// We don't want any files so we don't have to mock/load them.
data.files = [];
view = new CharmView({
entity: new models.Charm(data),
container: utils.makeContainer(this)
});
view.render();
var links = view.get('container').all('.header .details li a');
// Check that we've found the links, otherwise the assert in .each will
// succeed when there are no links.
assert.equal(links.size() > 0, true);
links.each(function(link) {
assert.equal(link.getAttribute('target'), '_blank');
});
});
});
})();
| jrwren/juju-gui | test/test_browser_charm_details.js | JavaScript | agpl-3.0 | 25,436 |
import React, { useMemo } from 'react';
import { cx, css } from '@emotion/css';
import { MenuItem, WithContextMenu, MenuGroup, useTheme2 } from '@grafana/ui';
import { SelectableValue, GrafanaTheme2 } from '@grafana/data';
import { Seg } from './Seg';
import { unwrap } from './unwrap';
import { toSelectableValue } from './toSelectableValue';
import { AddButton } from './AddButton';
export type PartParams = Array<{
value: string;
options: (() => Promise<string[]>) | null;
}>;
type Props = {
parts: Array<{
name: string;
params: PartParams;
}>;
getNewPartOptions: () => Promise<SelectableValue[]>;
onChange: (partIndex: number, paramValues: string[]) => void;
onRemovePart: (index: number) => void;
onAddNewPart: (type: string) => void;
};
const renderRemovableNameMenuItems = (onClick: () => void) => {
return (
<MenuGroup label="">
<MenuItem label="remove" onClick={onClick} />
</MenuGroup>
);
};
const noRightMarginPaddingClass = css({
paddingRight: '0',
marginRight: '0',
});
const RemovableName = ({ name, onRemove }: { name: string; onRemove: () => void }) => {
return (
<WithContextMenu renderMenuItems={() => renderRemovableNameMenuItems(onRemove)}>
{({ openMenu }) => (
<button className={cx('gf-form-label', noRightMarginPaddingClass)} onClick={openMenu}>
{name}
</button>
)}
</WithContextMenu>
);
};
type PartProps = {
name: string;
params: PartParams;
onRemove: () => void;
onChange: (paramValues: string[]) => void;
};
const noHorizMarginPaddingClass = css({
paddingLeft: '0',
paddingRight: '0',
marginLeft: '0',
marginRight: '0',
});
const getPartClass = (theme: GrafanaTheme2) => {
return cx(
'gf-form-label',
css({
paddingLeft: '0',
// gf-form-label class makes certain css attributes incorrect
// for the selectbox-dropdown, so we have to "reset" them back
lineHeight: theme.typography.body.lineHeight,
fontSize: theme.typography.body.fontSize,
})
);
};
const Part = ({ name, params, onChange, onRemove }: PartProps): JSX.Element => {
const theme = useTheme2();
const partClass = useMemo(() => getPartClass(theme), [theme]);
const onParamChange = (par: string, i: number) => {
const newParams = params.map((p) => p.value);
newParams[i] = par;
onChange(newParams);
};
return (
<div className={partClass}>
<RemovableName name={name} onRemove={onRemove} />(
{params.map((p, i) => {
const { value, options } = p;
const isLast = i === params.length - 1;
const loadOptions =
options !== null ? () => options().then((items) => items.map(toSelectableValue)) : undefined;
return (
<React.Fragment key={i}>
<Seg
allowCustomValue
value={value}
buttonClassName={noHorizMarginPaddingClass}
loadOptions={loadOptions}
onChange={(v) => {
onParamChange(unwrap(v.value), i);
}}
/>
{!isLast && ','}
</React.Fragment>
);
})}
)
</div>
);
};
export const PartListSection = ({
parts,
getNewPartOptions,
onAddNewPart,
onRemovePart,
onChange,
}: Props): JSX.Element => {
return (
<>
{parts.map((part, index) => (
<Part
key={index}
name={part.name}
params={part.params}
onRemove={() => {
onRemovePart(index);
}}
onChange={(pars) => {
onChange(index, pars);
}}
/>
))}
<AddButton loadOptions={getNewPartOptions} onAdd={onAddNewPart} />
</>
);
};
| grafana/grafana | public/app/plugins/datasource/influxdb/components/VisualInfluxQLEditor/PartListSection.tsx | TypeScript | agpl-3.0 | 3,719 |
export const BattleAvatarNumbers = {
1: 'lucas',
2: 'dawn',
3: 'youngster-gen4',
4: 'lass-gen4dp',
5: 'camper',
6: 'picnicker',
7: 'bugcatcher',
8: 'aromalady',
9: 'twins-gen4dp',
10: 'hiker-gen4',
11: 'battlegirl-gen4',
12: 'fisherman-gen4',
13: 'cyclist-gen4',
14: 'cyclistf-gen4',
15: 'blackbelt-gen4dp',
16: 'artist-gen4',
17: 'pokemonbreeder-gen4',
18: 'pokemonbreederf-gen4',
19: 'cowgirl',
20: 'jogger',
21: 'pokefan-gen4',
22: 'pokefanf-gen4',
23: 'pokekid',
24: 'youngcouple-gen4dp',
25: 'acetrainer-gen4dp',
26: 'acetrainerf-gen4dp',
27: 'waitress-gen4',
28: 'veteran-gen4',
29: 'ninjaboy',
30: 'dragontamer',
31: 'birdkeeper-gen4dp',
32: 'doubleteam',
33: 'richboy-gen4',
34: 'lady-gen4',
35: 'gentleman-gen4dp',
36: 'madame-gen4dp',
37: 'beauty-gen4dp',
38: 'collector',
39: 'policeman-gen4',
40: 'pokemonranger-gen4',
41: 'pokemonrangerf-gen4',
42: 'scientist-gen4dp',
43: 'swimmer-gen4dp',
44: 'swimmerf-gen4dp',
45: 'tuber',
46: 'tuberf',
47: 'sailor',
48: 'sisandbro',
49: 'ruinmaniac',
50: 'psychic-gen4',
51: 'psychicf-gen4',
52: 'gambler',
53: 'guitarist-gen4',
54: 'acetrainersnow',
55: 'acetrainersnowf',
56: 'skier',
57: 'skierf-gen4dp',
58: 'roughneck-gen4',
59: 'clown',
60: 'worker-gen4',
61: 'schoolkid-gen4dp',
62: 'schoolkidf-gen4',
63: 'roark',
64: 'barry',
65: 'byron',
66: 'aaron',
67: 'bertha',
68: 'flint',
69: 'lucian',
70: 'cynthia-gen4',
71: 'bellepa',
72: 'rancher',
73: 'mars',
74: 'galacticgrunt',
75: 'gardenia',
76: 'crasherwake',
77: 'maylene',
78: 'fantina',
79: 'candice',
80: 'volkner',
81: 'parasollady-gen4',
82: 'waiter-gen4dp',
83: 'interviewers',
84: 'cameraman',
85: 'reporter',
86: 'idol',
87: 'cyrus',
88: 'jupiter',
89: 'saturn',
90: 'galacticgruntf',
91: 'argenta',
92: 'palmer',
93: 'thorton',
94: 'buck',
95: 'darach',
96: 'marley',
97: 'mira',
98: 'cheryl',
99: 'riley',
100: 'dahlia',
101: 'ethan',
102: 'lyra',
103: 'twins-gen4',
104: 'lass-gen4',
105: 'acetrainer-gen4',
106: 'acetrainerf-gen4',
107: 'juggler',
108: 'sage',
109: 'li',
110: 'gentleman-gen4',
111: 'teacher',
112: 'beauty',
113: 'birdkeeper',
114: 'swimmer-gen4',
115: 'swimmerf-gen4',
116: 'kimonogirl',
117: 'scientist-gen4',
118: 'acetrainercouple',
119: 'youngcouple',
120: 'supernerd',
121: 'medium',
122: 'schoolkid-gen4',
123: 'blackbelt-gen4',
124: 'pokemaniac',
125: 'firebreather',
126: 'burglar',
127: 'biker-gen4',
128: 'skierf',
129: 'boarder',
130: 'rocketgrunt',
131: 'rocketgruntf',
132: 'archer',
133: 'ariana',
134: 'proton',
135: 'petrel',
136: 'eusine',
137: 'lucas-gen4pt',
138: 'dawn-gen4pt',
139: 'madame-gen4',
140: 'waiter-gen4',
141: 'falkner',
142: 'bugsy',
143: 'whitney',
144: 'morty',
145: 'chuck',
146: 'jasmine',
147: 'pryce',
148: 'clair',
149: 'will',
150: 'koga',
151: 'bruno',
152: 'karen',
153: 'lance',
154: 'brock',
155: 'misty',
156: 'ltsurge',
157: 'erika',
158: 'janine',
159: 'sabrina',
160: 'blaine',
161: 'blue',
162: 'red',
163: 'red',
164: 'silver',
165: 'giovanni',
166: 'unknownf',
167: 'unknown',
168: 'unknown',
169: 'hilbert',
170: 'hilda',
171: 'youngster',
172: 'lass',
173: 'schoolkid',
174: 'schoolkidf',
175: 'smasher',
176: 'linebacker',
177: 'waiter',
178: 'waitress',
179: 'chili',
180: 'cilan',
181: 'cress',
182: 'nurseryaide',
183: 'preschoolerf',
184: 'preschooler',
185: 'twins',
186: 'pokemonbreeder',
187: 'pokemonbreederf',
188: 'lenora',
189: 'burgh',
190: 'elesa',
191: 'clay',
192: 'skyla',
193: 'pokemonranger',
194: 'pokemonrangerf',
195: 'worker',
196: 'backpacker',
197: 'backpackerf',
198: 'fisherman',
199: 'musician',
200: 'dancer',
201: 'harlequin',
202: 'artist',
203: 'baker',
204: 'psychic',
205: 'psychicf',
206: 'cheren',
207: 'bianca',
208: 'plasmagrunt-gen5bw',
209: 'n',
210: 'richboy',
211: 'lady',
212: 'pilot',
213: 'workerice',
214: 'hoopster',
215: 'scientistf',
216: 'clerkf',
217: 'acetrainerf',
218: 'acetrainer',
219: 'blackbelt',
220: 'scientist',
221: 'striker',
222: 'brycen',
223: 'iris',
224: 'drayden',
225: 'roughneck',
226: 'janitor',
227: 'pokefan',
228: 'pokefanf',
229: 'doctor',
230: 'nurse',
231: 'hooligans',
232: 'battlegirl',
233: 'parasollady',
234: 'clerk',
235: 'clerk-boss',
236: 'backers',
237: 'backersf',
238: 'veteran',
239: 'veteranf',
240: 'biker',
241: 'infielder',
242: 'hiker',
243: 'madame',
244: 'gentleman',
245: 'plasmagruntf-gen5bw',
246: 'shauntal',
247: 'marshal',
248: 'grimsley',
249: 'caitlin',
250: 'ghetsis-gen5bw',
251: 'depotagent',
252: 'swimmer',
253: 'swimmerf',
254: 'policeman',
255: 'maid',
256: 'ingo',
257: 'alder',
258: 'cyclist',
259: 'cyclistf',
260: 'cynthia',
261: 'emmet',
262: 'hilbert-dueldisk',
263: 'hilda-dueldisk',
264: 'hugh',
265: 'rosa',
266: 'nate',
267: 'colress',
268: 'beauty-gen5bw2',
269: 'ghetsis',
270: 'plasmagrunt',
271: 'plasmagruntf',
272: 'iris-gen5bw2',
273: 'brycenman',
274: 'shadowtriad',
275: 'rood',
276: 'zinzolin',
277: 'cheren-gen5bw2',
278: 'marlon',
279: 'roxie',
280: 'roxanne',
281: 'brawly',
282: 'wattson',
283: 'flannery',
284: 'norman',
285: 'winona',
286: 'tate',
287: 'liza',
288: 'juan',
289: 'guitarist',
290: 'steven',
291: 'wallace',
292: 'bellelba',
293: 'benga',
294: 'ash',
'#bw2elesa': 'elesa-gen5bw2',
'#teamrocket': 'teamrocket',
'#yellow': 'yellow',
'#zinnia': 'zinnia',
'#clemont': 'clemont',
'#wally': 'wally',
breeder: 'pokemonbreeder',
breederf: 'pokemonbreederf',
1001: '#1001',
1002: '#1002',
1003: '#1003',
1005: '#1005',
1010: '#1010',
};
| shoedrip-unbound/dogars | src/Shoedrip/dexdata.ts | TypeScript | agpl-3.0 | 5,687 |
class Waypoint < ActiveRecord::Base
belongs_to :open_capacity
belongs_to :location, :validate => true, :dependent => :destroy
end
| rideconnection/clearinghouse | app/models/waypoint.rb | Ruby | agpl-3.0 | 134 |
# frozen_string_literal: true
# == Schema Information
#
# Table name: packages
#
# id :integer not null, primary key
# name :string(255)
# initial_price :decimal(, )
# at_the_door_price :decimal(, )
# attendee_limit :integer
# expires_at :datetime
# requires_track :boolean
# event_id :integer
# created_at :datetime
# updated_at :datetime
# deleted_at :datetime
# ignore_pricing_tiers :boolean default(FALSE), not null
# description :text
#
# Indexes
#
# index_packages_on_event_id (event_id)
#
require 'spec_helper'
describe Package do
let(:user) { create(:user) }
let(:event) { create(:event, user: user) }
let(:package) { create(:package, event: event, initial_price: 10, at_the_door_price: 50) }
context '#current_price' do
before(:each) do
# set openieng tier before any tiers created here
o = event.opening_tier
o.date = 1.week.ago
o.increase_by_dollars = 0
o.save
end
it 'is the initial price' do
expect(package.current_price).to eq package.initial_price
end
it 'changes based on the date' do
tier = create(:pricing_tier, date: 2.day.ago, event: event)
expected = package.initial_price + tier.increase_by_dollars
expect(package.current_price).to eq expected
end
it 'changes based on the number of registrants for this event' do
tier = create(:pricing_tier, registrants: 10, event: event)
20.times do
create(:registration, event: event)
end
expected = package.initial_price + tier.increase_by_dollars
expect(package.current_price).to eq expected
end
it 'does not change if the tiers are not yet eligible' do
event.registrations.destroy_all
# The Delorean replicates a long lasting issue that Travis discovered
Delorean.time_travel_to(10.days.from_now) do
tier = create(:pricing_tier, date: 19.days.from_now, event: event)
tier2 = create(:pricing_tier, registrants: 10, event: event, date: nil)
expect(event.current_tier).to eq event.opening_tier
expect(package.current_price).to eq package.initial_price
end
end
it 'changes base on two tiers' do
tier = create(:pricing_tier, registrants: 10, event: event, date: nil)
tier2 = create(:pricing_tier, date: 2.day.ago, event: event)
11.times do
create(:registration, event: event)
end
expect(event.current_tier).to eq tier2
expected = package.initial_price + tier.increase_by_dollars + tier2.increase_by_dollars
expect(package.current_price).to eq expected
end
context 'optionally does not change based on passing tiers' do
before(:each) do
package.ignore_pricing_tiers = true
end
it 'tier by date passes' do
tier = create(:pricing_tier, date: Date.today, event: event)
expected = package.initial_price
expect(package.current_price).to eq expected
end
end
end
context '#price_at_tier' do
it 'redirects functionality to the tier' do
tier = create(:pricing_tier, date: Date.today, event: event)
expect(tier).to receive(:price_of)
package.price_at_tier(tier)
end
it 'correctly calculates the value' do
tier = create(:pricing_tier, registrants: 2, event: event)
allow(tier).to receive(:should_apply_amount?) { true }
allow(event).to receive(:pricing_tiers) { [tier] }
expected = package.initial_price + tier.amount
expect(package.price_at_tier(tier)).to eq expected
end
context 'a new tier is current' do
before(:each) do
@new_tier = create(:pricing_tier,
event: event,
date: 1.day.from_now,
increase_by_dollars: 3)
Delorean.jump 4.days
event.reload
expect(event.pricing_tiers.count).to eq 2
expect(event.current_tier).to_not eq event.opening_tier
expect(event.current_tier).to eq @new_tier
end
after(:each) do
Delorean.back_to_the_present
end
it 'reflects the price of a previous tier' do
expected = package.initial_price
expect(package.price_at_tier(event.opening_tier)).to eq expected
end
end
end
end
| NullVoxPopuli/aeonvera | app/models/specs/package_spec.rb | Ruby | agpl-3.0 | 4,412 |
# -*- coding: utf-8 -*-
# Copyright 2016 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'OAuth2 Disable Login with Odoo.com',
'version': '10.0.1.0.0',
'category': 'Tools',
'author': 'Onestein',
'license': 'AGPL-3',
'depends': ['auth_oauth'],
'data': [
'data/auth_oauth_data.xml',
],
}
| VitalPet/addons-onestein | auth_oauth_disable_login_with_odoo/__manifest__.py | Python | agpl-3.0 | 394 |
public class Generic{
public static void main(String[] args){
Car <String> car1 = new Car <String> ();
car1.setName("Buick");
car1.setPrice("100");
System.out.printf("name=%s,price=%s\n",car1.getName(),car1.getPrice());
Car <Integer> car2 = new Car <Integer> ();
car2.setName(001);
car2.setPrice(100);
System.out.printf("name=%d,price=%d\n",car2.getName(),car2.getPrice());
Integer[] array = {1,2,3,4,5,6,7,8,9};
car2.print(array);
}
}
/*generic class*/
class Car <T> {
private T name;
private T price;
public Car(){
this.name = null;
this.price = null;
}
public Car(T name,T price){
this.name = name;
this.price = price;
}
public void setName(T name){
this.name = name;
}
public T getName(){
return this.name;
}
public void setPrice(T price){
this.price = price;
}
public T getPrice(){
return this.price;
}
public <A> void print(A[] array){
for (A var:array)
System.out.printf("%s",var);
}
}
| 314942468GitHub/JavaLearnning | src/Generic.java | Java | agpl-3.0 | 963 |
<?php
/**
* Created by iluxovi4
* Protected by SugarTalk.ru
*/
$dictionary['RealtyTemplates']['fields']['realtytemplates_contracts'] =
array (
'name' => 'realtytemplates_contracts',
'type' => 'link',
'relationship' => 'realtytemplates_contracts',
'source'=>'non-db',
'vname'=>'LBL_REALTYTEMPLATES_CONTRACTS',
); | MarStan/sugar_work | custom/Extension/modules/RealtyTemplates/Ext/Vardefs/realty_contracts.php | PHP | agpl-3.0 | 362 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-11-22 07:11
from __future__ import unicode_literals
from django.core.management.sql import emit_post_migrate_signal
from django.db import migrations
def add_executive_group(apps, schema_editor):
# create group
db_alias = schema_editor.connection.alias
emit_post_migrate_signal(1, False, db_alias)
Group = apps.get_model('auth', 'Group')
Permission = apps.get_model('auth', 'Permission')
executive_group, created = Group.objects.get_or_create(name='executive')
if created:
# Learning unit
can_access_learningunit = Permission.objects.get(codename='can_access_learningunit')
executive_group.permissions.add(can_access_learningunit)
class Migration(migrations.Migration):
dependencies = [
('base', '0207_auto_20171220_1035'),
]
operations = [
migrations.RunPython(add_executive_group, elidable=True),
]
| uclouvain/OSIS-Louvain | base/migrations/0208_create_role_executive.py | Python | agpl-3.0 | 951 |
import * as React from 'react';
import renderOrder from '../renderOrder';
import { RenderLayer } from '../browser_utils/Utils';
import { SharedState } from '../citybound';
import { shadersForLandUses } from './stripedShaders';
import colors from '../colors';
export const LAND_USES = [
"Residential",
"Commercial",
"Industrial",
"Agricultural",
"Recreational",
"Administrative",
];
const landUseInstances = new Map(LAND_USES.map(landUse => [landUse, new Float32Array([0.0, 0.0, 0.0, 1.0, 0.0, ...colors[landUse]])]));
const buildingOutlinesInstance = new Float32Array([0.0, 0.0, 0.0, 1.0, 0.0, ...colors.buildingOutlines]);
export function ZonePlanningLayers({ state }: {
state: SharedState;
}) {
const { zoneGroups, zoneOutlineGroups, buildingOutlinesGroup } = state.planning.rendering.currentPreview;
return <>
{[...zoneGroups.entries()].map(([landUse, groups]) => <RenderLayer renderOrder={renderOrder.addedGesturesZones} decal={true} batches={[...groups.values()].map(groupMesh => ({
mesh: groupMesh,
instances: landUseInstances.get(landUse)
}))} />)}
{[...zoneGroups.entries()].reverse().map(([landUse, groups]) => <RenderLayer renderOrder={renderOrder.addedGesturesZonesStipple} decal={true} shader={shadersForLandUses[landUse]} batches={[...groups.values()].map(groupMesh => ({
mesh: groupMesh,
instances: landUseInstances.get(landUse)
}))} />)}
{[...zoneOutlineGroups.entries()].map(([landUse, groups]) => <RenderLayer renderOrder={renderOrder.addedGesturesZonesOutlines} decal={true} batches={[...groups.values()].map(groupMesh => ({
mesh: groupMesh,
instances: landUseInstances.get(landUse)
}))} />)}
<RenderLayer renderOrder={renderOrder.buildingOutlines} decal={true} batches={[...buildingOutlinesGroup.values()].map(groupMesh => ({
mesh: groupMesh,
instances: buildingOutlinesInstance
}))} />
</>;
}
| citybound/citybound | cb_browser_ui/src/planning_browser/ZonePlanningLayers.tsx | TypeScript | agpl-3.0 | 2,016 |
<?php
App::uses('AppController', 'Controller');
App::uses('Folder', 'Utility');
App::uses('File', 'Utility');
/**
* Attributes Controller
*
* @property Attribute $Attribute
*/
class AttributesController extends AppController {
public $components = array('Security', 'RequestHandler', 'Cidr');
public $paginate = array(
'limit' => 60,
'maxLimit' => 9999, // LATER we will bump here on a problem once we have more than 9999 events
);
public $helpers = array('Js' => array('Jquery'));
public function beforeFilter() {
parent::beforeFilter();
$this->Auth->allow('restSearch');
$this->Auth->allow('returnAttributes');
$this->Auth->allow('downloadAttachment');
$this->Auth->allow('text');
// permit reuse of CSRF tokens on the search page.
if ('search' == $this->request->params['action']) {
$this->Security->csrfUseOnce = false;
}
$this->Security->validatePost = true;
// convert uuid to id if present in the url, and overwrite id field
if (isset($this->params->query['uuid'])) {
$params = array(
'conditions' => array('Attribute.uuid' => $this->params->query['uuid']),
'recursive' => 0,
'fields' => 'Attribute.id'
);
$result = $this->Attribute->find('first', $params);
if (isset($result['Attribute']) && isset($result['Attribute']['id'])) {
$id = $result['Attribute']['id'];
$this->params->addParams(array('pass' => array($id))); // FIXME find better way to change id variable if uuid is found. params->url and params->here is not modified accordingly now
}
}
// do not show private to other orgs
// if not admin or own org, check private as well..
if (!$this->_isSiteAdmin()) {
$this->paginate = Set::merge($this->paginate,array(
'conditions' =>
array('OR' =>
array(
'Event.org =' => $this->Auth->user('org'),
'AND' => array(
'Attribute.distribution >' => 0,
'Event.distribution >' => 0,
)))));
}
/* We want to show this outside now as discussed with Christophe. Still not pushable, but anything should be pullable that's visible
// do not show cluster outside server
if ($this->_isRest()) {
$this->paginate = Set::merge($this->paginate,array(
'conditions' =>
array("AND" => array('Event.cluster !=' => true),array('Attribute.cluster !=' => true)),
//array("AND" => array(array('Event.private !=' => 2))),
));
}
*/
}
/**
* index method
*
* @return void
*
*/
public function index() {
$this->Attribute->recursive = 0;
$this->Attribute->contain = array('Event.id', 'Event.orgc', 'Event.org', 'Event.info');
$this->set('isSearch', 0);
$this->set('attributes', $this->paginate());
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->set('typeDefinitions', $this->Attribute->typeDefinitions);
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
}
/**
* add method
*
* @return void
*
* @throws NotFoundException // TODO Exception
*/
public function add($eventId = null) {
if (!$this->userRole['perm_add']) {
throw new MethodNotAllowedException('You don\'t have permissions to create attributes');
}
if ($this->request->is('ajax')) {
$this->set('ajax', true);
$this->layout = 'ajax';
}
else $this->set('ajax', false);
if ($this->request->is('post')) {
if ($this->request->is('ajax')) $this->autoRender = false;
$this->loadModel('Event');
$date = new DateTime();
// Give error if someone tried to submit a attribute with attachment or malware-sample type.
// TODO change behavior attachment options - this is bad ... it should rather by a messagebox or should be filtered out on the view level
if (isset($this->request->data['Attribute']['type']) && $this->Attribute->typeIsAttachment($this->request->data['Attribute']['type'])) {
$this->Session->setFlash(__('Attribute has not been added: attachments are added by "Add attachment" button', true), 'default', array(), 'error');
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
}
// remove the published flag from the event
$this->Event->recursive = -1;
$this->Event->read(null, $this->request->data['Attribute']['event_id']);
if (!$this->_isSiteAdmin() && ($this->Event->data['Event']['orgc'] != $this->_checkOrg() || !$this->userRole['perm_modify'])) {
throw new UnauthorizedException('You do not have permission to do that.');
}
$this->Event->set('timestamp', $date->getTimestamp());
$this->Event->set('published', 0);
$this->Event->save($this->Event->data, array('fieldList' => array('published', 'timestamp', 'info')));
//
// multiple attributes in batch import
//
if ((isset($this->request->data['Attribute']['batch_import']) && $this->request->data['Attribute']['batch_import'] == 1)) {
// make array from value field
$attributes = explode("\n", $this->request->data['Attribute']['value']);
$fails = ""; // will be used to keep a list of the lines that failed or succeeded
$successes = "";
$failCount = 0;
$successCount = 0;
// TODO loop-holes,
// the value null value thing
foreach ($attributes as $key => $attribute) {
$attribute = trim($attribute);
if (strlen($attribute) == 0)
continue; // don't do anything for empty lines
$this->Attribute->create();
$this->request->data['Attribute']['value'] = $attribute; // set the value as the content of the single line
// TODO loop-holes,
// there seems to be a loop-hole in misp here
// be it an create and not an update
$this->Attribute->id = null;
if ($this->Attribute->save($this->request->data)) {
$successes .= " " . ($key + 1);
$successCount++;
} else {
$fails .= " " . ($key + 1);
$failCount++;
}
}
if ($this->request->is('ajax')) {
$this->autoRender = false;
if ($fails) {
$error_message = 'The lines' . $fails . ' could not be saved. Please, try again.';
return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'errors' => $error_message)), 'status' => 200));
} else {
return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'success' => $successCount . ' Attributes added')), 'status' => 200));
}
} else {
// we added all the attributes,
if ($fails) {
// list the ones that failed
if (!CakeSession::read('Message.flash')) {
$this->Session->setFlash(__('The lines' . $fails . ' could not be saved. Please, try again.', true), 'default', array(), 'error');
} else {
$existingFlash = CakeSession::read('Message.flash');
$this->Session->setFlash(__('The lines' . $fails . ' could not be saved. ' . $existingFlash['message'], true), 'default', array(), 'error');
}
}
if ($successes) {
// list the ones that succeeded
$this->Session->setFlash(__('The lines' . $successes . ' have been saved', true));
}
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
}
} else {
if (isset($this->request->data['Attribute']['uuid'])) { // TODO here we should start RESTful dialog
// check if the uuid already exists and also save the existing attribute for further checks
$existingAttribute = null;
$existingAttribute = $this->Attribute->find('first', array('conditions' => array('Attribute.uuid' => $this->request->data['Attribute']['uuid'])));
//$existingAttributeCount = $this->Attribute->find('count', array('conditions' => array('Attribute.uuid' => $this->request->data['Attribute']['uuid'])));
if ($existingAttribute) {
// TODO RESTfull, set responce location header..so client can find right URL to edit
$this->response->header('Location', Configure::read('MISP.baseurl') . '/attributes/' . $existingAttribute['Attribute']['id']);
$this->response->send();
$this->view($this->Attribute->getId());
$this->render('view');
return false;
} else {
// if the attribute doesn't exist yet, check whether it has a timestamp - if yes, it's from a push, keep the timestamp we had, if no create a timestamp
if (!isset($this->request->data['Attribute']['timestamp'])) {
$this->request->data['Attribute']['timestamp'] = $date->getTimestamp();
}
}
} else {
if (!isset($this->request->data['Attribute']['timestamp'])) {
$this->request->data['Attribute']['timestamp'] = $date->getTimestamp();
}
}
//
// single attribute
//
// create the attribute
$this->Attribute->create();
$savedId = $this->Attribute->getId();
if ($this->Attribute->save($this->request->data)) {
if ($this->_isRest()) {
// REST users want to see the newly created attribute
$this->view($this->Attribute->getId());
$this->render('view');
} elseif ($this->request->is('ajax')) {
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'success' => 'Attribute added.')),'status'=>200));
} else {
// inform the user and redirect
$this->Session->setFlash(__('The attribute has been saved'));
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
}
} else {
if ($this->_isRest()) { // TODO return error if REST
// REST users want to see the failed attribute
$this->view($savedId);
$this->render('view');
} elseif ($this->request->is('ajax')) {
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode(array('saved' => false, 'errors' => $this->Attribute->validationErrors)),'status'=>200));
} else {
if (!CakeSession::read('Message.flash')) {
$this->Session->setFlash(__('The attribute could not be saved. Please, try again.'));
}
}
}
}
} else {
// set the event_id in the form
$this->request->data['Attribute']['event_id'] = $eventId;
}
// combobox for types
$types = array_keys($this->Attribute->typeDefinitions);
$types = $this->_arrayToValuesIndexArray($types);
$this->set('types', $types);
// combobos for categories
$categories = $this->Attribute->validate['category']['rule'][1];
array_pop($categories);
$categories = $this->_arrayToValuesIndexArray($categories);
$this->set('categories', compact('categories'));
$this->loadModel('Event');
$events = $this->Event->findById($eventId);
$this->set('event_id', $events['Event']['id']);
// combobox for distribution
$this->set('distributionLevels', $this->Attribute->distributionLevels);
$this->set('currentDist', $events['Event']['distribution']); // TODO default distribution
// tooltip for distribution
$this->set('distributionDescriptions', $this->Attribute->distributionDescriptions);
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->set('typeDefinitions', $this->Attribute->typeDefinitions);
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
$this->set('published', $events['Event']['published']);
}
public function download($id = null) {
$this->Attribute->id = $id;
if (!$this->Attribute->exists()) {
throw new NotFoundException(__('Invalid attribute'));
}
$this->Attribute->read();
if (!$this->_isSiteAdmin() &&
$this->Auth->user('org') !=
$this->Attribute->data['Event']['org'] &&
($this->Attribute->data['Event']['distribution'] == 0 ||
$this->Attribute->data['Attribute']['distribution'] == 0
)) {
throw new UnauthorizedException('You do not have the permission to view this event.');
}
$this->__downloadAttachment($this->Attribute->data['Attribute']);
}
private function __downloadAttachment($attribute) {
$path = "files" . DS . $attribute['event_id'] . DS;
$file = $attribute['id'];
$filename = '';
if ('attachment' == $attribute['type']) {
$filename = $attribute['value'];
$fileExt = pathinfo($filename, PATHINFO_EXTENSION);
$filename = substr($filename, 0, strlen($filename) - strlen($fileExt) - 1);
} elseif ('malware-sample' == $attribute['type']) {
$filenameHash = explode('|', $attribute['value']);
$filename = $filenameHash[0];
$filename = substr($filenameHash[0], strrpos($filenameHash[0], '\\'));
$fileExt = "zip";
} else {
throw new NotFoundException(__('Attribute not an attachment or malware-sample'));
}
$this->autoRender = false;
$this->response->type($fileExt);
$this->response->file($path . $file, array('download' => true, 'name' => $filename . '.' . $fileExt));
}
/**
* add_attachment method
*
* @return void
* @throws InternalErrorException
*/
public function add_attachment($eventId = null) {
$sha256 = null;
$sha1 = null;
//$ssdeep = null;
if ($this->request->is('post')) {
$this->loadModel('Event');
$this->Event->id = $this->request->data['Attribute']['event_id'];
$this->Event->recursive = -1;
$this->Event->read();
if (!$this->_isSiteAdmin() && ($this->Event->data['Event']['orgc'] != $this->_checkOrg() || !$this->userRole['perm_modify'])) {
throw new UnauthorizedException('You do not have permission to do that.');
}
// Check if there were problems with the file upload
// only keep the last part of the filename, this should prevent directory attacks
$filename = basename($this->request->data['Attribute']['value']['name']);
$tmpfile = new File($this->request->data['Attribute']['value']['tmp_name']);
if ((isset($this->request->data['Attribute']['value']['error']) && $this->request->data['Attribute']['value']['error'] == 0) ||
(!empty( $this->request->data['Attribute']['value']['tmp_name']) && $this->request->data['Attribute']['value']['tmp_name'] != 'none')
) {
if (!is_uploaded_file($tmpfile->path))
throw new InternalErrorException('PHP says file was not uploaded. Are you attacking me?');
} else {
$this->Session->setFlash(__('There was a problem to upload the file.', true), 'default', array(), 'error');
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
}
// save the file-info in the database
$this->Attribute->create();
if ($this->request->data['Attribute']['malware']) {
$this->request->data['Attribute']['type'] = "malware-sample";
// Validate filename
if (!preg_match('@^[\w\-. ]+$@', $filename)) throw new Exception ('Filename not allowed');
$this->request->data['Attribute']['value'] = $filename . '|' . hash_file('md5', $tmpfile->path); // TODO gives problems with bigger files
$sha256 = (hash_file('sha256', $tmpfile->path));
$sha1 = (hash_file('sha1', $tmpfile->path));
$this->request->data['Attribute']['to_ids'] = 1; // LATER let user choose to send this to IDS
} else {
$this->request->data['Attribute']['type'] = "attachment";
// Validate filename
if (!preg_match('@^[\w\-. ]+$@', $filename)) throw new Exception ('Filename not allowed');
$this->request->data['Attribute']['value'] = $filename;
$this->request->data['Attribute']['to_ids'] = 0;
}
$this->request->data['Attribute']['uuid'] = String::uuid();
$this->request->data['Attribute']['batch_import'] = 0;
if ($this->Attribute->save($this->request->data)) {
// attribute saved correctly in the db
// remove the published flag from the event
$this->Event->id = $this->request->data['Attribute']['event_id'];
$this->Event->saveField('published', 0);
} else {
$this->Session->setFlash(__('The attribute could not be saved. Did you already upload this file?'));
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
}
// no errors in file upload, entry already in db, now move the file where needed and zip it if required.
// no sanitization is required on the filename, path or type as we save
// create directory structure
if (PHP_OS == 'WINNT') {
$rootDir = APP . "files" . DS . $this->request->data['Attribute']['event_id'];
} else {
$rootDir = APP . DS . "files" . DS . $this->request->data['Attribute']['event_id'];
}
$dir = new Folder($rootDir, true);
// move the file to the correct location
$destpath = $rootDir . DS . $this->Attribute->id; // id of the new attribute in the database
$file = new File ($destpath);
$zipfile = new File ($destpath . '.zip');
$fileInZip = new File($rootDir . DS . $filename); // FIXME do sanitization of the filename
if ($file->exists() || $zipfile->exists() || $fileInZip->exists()) {
// this should never happen as the attribute id should be unique
$this->Session->setFlash(__('Attachment with this name already exist in this event.', true), 'default', array(), 'error');
// remove the entry from the database
$this->Attribute->delete();
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
}
if (!move_uploaded_file($tmpfile->path, $file->path)) {
$this->Session->setFlash(__('Problem with uploading attachment. Cannot move it to its final location.', true), 'default', array(), 'error');
// remove the entry from the database
$this->Attribute->delete();
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
}
// zip and password protect the malware files
if ($this->request->data['Attribute']['malware']) {
// TODO check if CakePHP has no easy/safe wrapper to execute commands
$execRetval = '';
$execOutput = array();
rename($file->path, $fileInZip->path); // TODO check if no workaround exists for the current filtering mechanisms
if (PHP_OS == 'WINNT') {
exec("zip -j -P infected " . $zipfile->path . ' "' . $fileInZip->path . '"', $execOutput, $execRetval);
} else {
exec("zip -j -P infected " . $zipfile->path . ' "' . addslashes($fileInZip->path) . '"', $execOutput, $execRetval);
}
if ($execRetval != 0) { // not EXIT_SUCCESS
$this->Session->setFlash(__('Problem with zipping the attachment. Please report to administrator. ' . $execOutput, true), 'default', array(), 'error');
// remove the entry from the database
$this->Attribute->delete();
$fileInZip->delete();
$file->delete();
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
};
$fileInZip->delete(); // delete the original not-zipped-file
rename($zipfile->path, $file->path); // rename the .zip to .nothing
}
if ($this->request->data['Attribute']['malware']) {
$temp = $this->request->data;
$this->Attribute->create();
$temp['Attribute']['type'] = 'filename|sha256';
$temp['Attribute']['value'] = $filename . '|' .$sha256;
$temp['Attribute']['uuid'] = String::uuid();
$this->Attribute->save($temp, array('fieldlist' => array('value', 'type', 'category', 'event_id', 'distribution', 'to_ids', 'comment')));
$this->Attribute->create();
$temp['Attribute']['type'] = 'filename|sha1';
$temp['Attribute']['value'] = $filename . '|' .$sha1;
$temp['Attribute']['uuid'] = String::uuid();
$this->Attribute->save($temp, array('fieldlist' => array('value', 'type', 'category', 'event_id', 'distribution', 'to_ids', 'comment')));
}
// everything is done, now redirect to event view
$this->Session->setFlash(__('The attachment has been uploaded'));
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
} else {
// set the event_id in the form
$this->request->data['Attribute']['event_id'] = $eventId;
}
// combobos for categories
$categories = $this->Attribute->validate['category']['rule'][1];
// just get them with attachments..
$selectedCategories = array();
foreach ($categories as $category) {
if (isset($this->Attribute->categoryDefinitions[$category])) {
$types = $this->Attribute->categoryDefinitions[$category]['types'];
$alreadySet = false;
foreach ($types as $type) {
if ($this->Attribute->typeIsAttachment($type) && !$alreadySet) {
// add to the whole..
$selectedCategories[] = $category;
$alreadySet = true;
continue;
}
}
}
};
$categories = $this->_arrayToValuesIndexArray($selectedCategories);
$this->set('categories',$categories);
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->set('typeDefinitions', $this->Attribute->typeDefinitions);
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
$this->set('zippedDefinitions', $this->Attribute->zippedDefinitions);
$this->set('uploadDefinitions', $this->Attribute->uploadDefinitions);
// combobox for distribution
$this->loadModel('Event');
$this->set('distributionDescriptions', $this->Attribute->distributionDescriptions);
$this->set('distributionLevels', $this->Event->distributionLevels);
$events = $this->Event->findById($eventId);
$this->set('currentDist', $events['Event']['distribution']);
$this->set('published', $events['Event']['published']);
}
/**
* Imports the CSV threatConnect file to multiple attributes
* @param int $id The id of the event
*/
public function add_threatconnect($eventId = null) {
if ($this->request->is('post')) {
$this->loadModel('Event');
$this->Event->id = $eventId;
$this->Event->recursive = -1;
$this->Event->read();
if (!$this->_isSiteAdmin() && ($this->Event->data['Event']['orgc'] != $this->_checkOrg() || !$this->userRole['perm_modify'])) {
throw new UnauthorizedException('You do not have permission to do that.');
}
//
// File upload
//
// Check if there were problems with the file upload
$tmpfile = new File($this->request->data['Attribute']['value']['tmp_name']);
if ((isset($this->request->data['Attribute']['value']['error']) && $this->request->data['Attribute']['value']['error'] == 0) ||
(!empty( $this->request->data['Attribute']['value']['tmp_name']) && $this->request->data['Attribute']['value']['tmp_name'] != 'none')
) {
if (!is_uploaded_file($tmpfile->path))
throw new InternalErrorException('PHP says file was not uploaded. Are you attacking me?');
} else {
$this->Session->setFlash(__('There was a problem to upload the file.', true), 'default', array(), 'error');
$this->redirect(array('controller' => 'attributes', 'action' => 'add_threatconnect', $this->request->data['Attribute']['event_id']));
}
// verify mime type
$file_info = $tmpfile->info();
if ($file_info['mime'] != 'text/plain') {
$this->Session->setFlash('File not in CSV format.', 'default', array(), 'error');
$this->redirect(array('controller' => 'attributes', 'action' => 'add_threatconnect', $this->request->data['Attribute']['event_id']));
}
// parse uploaded csv file
$filename = $tmpfile->path;
$header = NULL;
$entries = array();
if (($handle = fopen($filename, 'r')) !== FALSE) {
while (($row = fgetcsv($handle, 0, ',', '"')) !== FALSE) {
if(!$header)
$header = $row;
else
$entries[] = array_combine($header, $row);
}
fclose($handle);
}
// verify header of the file (first row)
$required_headers = array('Type', 'Value', 'Confidence', 'Description', 'Source');
if (count(array_intersect($header, $required_headers)) != count($required_headers)) {
$this->Session->setFlash('Incorrect ThreatConnect headers. The minimum required headers are: '.implode(',', $required_headers), 'default', array(), 'error');
$this->redirect(array('controller' => 'attributes', 'action' => 'add_threatconnect', $this->request->data['Attribute']['event_id']));
}
//
// import attributes
//
$attributes = array(); // array with all the attributes we're going to save
foreach($entries as $entry) {
$attribute = array();
$attribute['event_id'] = $this->request->data['Attribute']['event_id'];
$attribute['value'] = $entry['Value'];
$attribute['to_ids'] = ($entry['Confidence'] > 51) ? 1 : 0; // To IDS if high confidence
$attribute['comment'] = 'ThreatConnect: ' . $entry['Description'];
$attribute['distribution'] = '3'; // 'All communities'
if (Configure::read('MISP.default_attribute_distribution') != null) {
if (Configure::read('MISP.default_attribute_distribution') === 'event') {
$attribute['distribution'] = $this->Event->data['Event']['distribution'];
} else {
$attribute['distribution'] = Configure::read('MISP.default_attribute_distribution');
}
}
switch($entry['Type']) {
case 'Address':
$attribute['category'] = 'Network activity';
$attribute['type'] = 'ip-dst';
break;
case 'Host':
$attribute['category'] = 'Network activity';
$attribute['type'] = 'domain';
break;
case 'EmailAddress':
$attribute['category'] = 'Payload delivery';
$attribute['type'] = 'email-src';
break;
case 'File':
$attribute['category'] = 'Artifacts dropped';
$attribute['value'] = strtolower($attribute['value']);
if (preg_match("#^[0-9a-f]{32}$#", $attribute['value']))
$attribute['type'] = 'md5';
else if (preg_match("#^[0-9a-f]{40}$#", $attribute['value']))
$attribute['type'] = 'sha1';
else if (preg_match("#^[0-9a-f]{64}$#", $attribute['value']))
$attribute['type'] = 'sha256';
else
// do not keep attributes that do not have a match
$attribute=NULL;
break;
case 'URL':
$attribute['category'] = 'Network activity';
$attribute['type'] = 'url';
break;
default:
// do not keep attributes that do not have a match
$attribute=NULL;
}
// add attribute to the array that will be saved
if ($attribute) $attributes[] = $attribute;
}
//
// import source info:
//
// 1/ iterate over all the sources, unique
// 2/ add uniques as 'Internal reference'
// 3/ if url format -> 'link'
// else 'comment'
$references = array();
foreach($entries as $entry) {
$references[$entry['Source']] = true;
}
$references = array_keys($references);
// generate the Attributes
foreach($references as $reference) {
$attribute = array();
$attribute['event_id'] = $this->request->data['Attribute']['event_id'];
$attribute['category'] = 'Internal reference';
if (preg_match('#^(http|ftp)(s)?\:\/\/((([a-z|0-9|\-]{1,25})(\.)?){2,7})($|/.*$)#i', $reference))
$attribute['type'] = 'link';
else
$attribute['type'] = 'comment';
$attribute['value'] = $reference;
$attribute['distribution'] = 3; // 'All communities'
// add attribute to the array that will be saved
$attributes[] = $attribute;
}
//
// finally save all the attributes at once, and continue if there are validation errors
//
$this->Attribute->saveMany($attributes, array('validate' => true));
// data imported (with or without errors)
// remove the published flag from the event
$this->loadModel('Event');
$this->Event->id = $this->request->data['Attribute']['event_id'];
$this->Event->saveField('published', 0);
// everything is done, now redirect to event view
$this->Session->setFlash(__('The ThreatConnect data has been imported'));
$this->redirect(array('controller' => 'events', 'action' => 'view', $this->request->data['Attribute']['event_id']));
} else {
// set the event_id in the form
$this->request->data['Attribute']['event_id'] = $eventId;
}
// form not submitted, show page
$this->loadModel('Event');
$events = $this->Event->findById($eventId);
$this->set('published', $events['Event']['published']);
}
/**
* edit method
*
* @param string $id
* @return void
* @throws NotFoundException
*/
public function edit($id = null) {
$this->Attribute->id = $id;
$date = new DateTime();
if (!$this->Attribute->exists()) {
throw new NotFoundException(__('Invalid attribute'));
}
$this->Attribute->read();
//set stuff to fix undefined index: uuid
if (!$this->_isRest()) {
$uuid = $this->Attribute->data['Attribute']['uuid'];
}
if (!$this->_isSiteAdmin()) {
//
if ($this->Attribute->data['Event']['orgc'] == $this->Auth->user('org')
&& (($this->userRole['perm_modify'] && $this->Attribute->data['Event']['user_id'] != $this->Auth->user('id'))
|| $this->userRole['perm_modify_org'])) {
// Allow the edit
} else {
$this->Session->setFlash(__('Invalid attribute.'));
$this->redirect(array('controller' => 'events', 'action' => 'index'));
}
}
$eventId = $this->Attribute->data['Attribute']['event_id'];
if ('attachment' == $this->Attribute->data['Attribute']['type'] ||
'malware-sample' == $this->Attribute->data['Attribute']['type'] ) {
$this->set('attachment', true);
// TODO we should ensure 'value' cannot be changed here and not only on a view level (because of the associated file)
// $this->Session->setFlash(__('You cannot edit attachment attributes.', true), 'default', array(), 'error');
// $this->redirect(array('controller' => 'events', 'action' => 'view', $old_attribute['Event']['id']));
} else {
$this->set('attachment', false);
}
if ($this->request->is('post') || $this->request->is('put')) {
// reposition to get the attribute.id with given uuid
// Notice (8): Undefined index: uuid [APP/Controller/AttributesController.php, line 502]
// Fixed - uuid was not passed back from the form since it's not a field. Set the uuid in a variable for non rest users, rest should have uuid.
// Generally all of this should be _isRest() only, but that's something for later to think about
if ($this->_isRest()) {
$existingAttribute = $this->Attribute->findByUuid($this->request->data['Attribute']['uuid']);
} else {
$existingAttribute = $this->Attribute->findByUuid($uuid);
}
if (count($existingAttribute)) {
$this->request->data['Attribute']['id'] = $existingAttribute['Attribute']['id'];
}
// check if the attribute has a timestamp already set (from a previous instance that is trying to edit via synchronisation)
if (isset($this->request->data['Attribute']['timestamp'])) {
// check which attribute is newer
if ($this->request->data['Attribute']['timestamp'] > $existingAttribute['Attribute']['timestamp']) {
// carry on with adding this attribute - Don't forget! if orgc!=user org, create shadow attribute, not attribute!
} else {
// the old one is newer or the same, replace the request's attribute with the old one
$this->request->data['Attribute'] = $existingAttribute['Attribute'];
}
} else {
$this->request->data['Attribute']['timestamp'] = $date->getTimestamp();
}
$fieldList = array('category', 'type', 'value1', 'value2', 'to_ids', 'distribution', 'value', 'timestamp', 'comment');
$this->loadModel('Event');
$this->Event->id = $eventId;
// enabling / disabling the distribution field in the edit view based on whether user's org == orgc in the event
$this->Event->read();
if ($this->Attribute->save($this->request->data)) {
$this->Session->setFlash(__('The attribute has been saved'));
// remove the published flag from the event
$this->Event->set('timestamp', $date->getTimestamp());
$this->Event->set('published', 0);
$this->Event->save($this->Event->data, array('fieldList' => array('published', 'timestamp', 'info')));
if ($this->_isRest()) {
// REST users want to see the newly created event
$this->view($this->Attribute->getId());
$this->render('view');
} else {
$this->redirect(array('controller' => 'events', 'action' => 'view', $eventId));
}
} else {
if (!CakeSession::read('Message.flash')) {
$this->Session->setFlash(__('The attribute could not be saved. Please, try again.'));
} else {
$this->request->data = $this->Attribute->read(null, $id);
}
}
} else {
$this->request->data = $this->Attribute->read(null, $id);
}
$this->set('attribute', $this->request->data);
// enabling / disabling the distribution field in the edit view based on whether user's org == orgc in the event
$this->loadModel('Event');
$this->Event->id = $eventId;
$this->Event->read();
$this->set('published', $this->Event->data['Event']['published']);
// needed for RBAC
// combobox for types
$types = array_keys($this->Attribute->typeDefinitions);
$types = $this->_arrayToValuesIndexArray($types);
$this->set('types', $types);
// combobox for categories
$categories = $this->Attribute->validate['category']['rule'][1];
array_pop($categories); // remove that last empty/space option
$categories = $this->_arrayToValuesIndexArray($categories);
$this->set('categories', $categories);
$this->set('currentDist', $this->Event->data['Event']['distribution']);
// combobox for distribution
$this->set('distributionLevels', $this->Attribute->distributionLevels);
// tooltip for distribution
$this->set('distributionDescriptions', $this->Attribute->distributionDescriptions);
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->set('typeDefinitions', $this->Attribute->typeDefinitions);
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
}
// ajax edit - post a single edited field and this method will attempt to save it and return a json with the validation errors if they occur.
public function editField($id) {
if ((!$this->request->is('post') && !$this->request->is('put')) || !$this->request->is('ajax')) throw new MethodNotAllowedException();
$this->Attribute->id = $id;
if (!$this->Attribute->exists()) {
return new CakeResponse(array('body'=> json_encode(array('fail' => false, 'errors' => 'Invalid attribute')),'status'=>200));
}
$this->Attribute->recursive = -1;
$this->Attribute->contain('Event');
$attribute = $this->Attribute->read();
if (!$this->_isSiteAdmin()) {
//
if ($this->Attribute->data['Event']['orgc'] == $this->Auth->user('org')
&& (($this->userRole['perm_modify'] && $this->Attribute->data['Event']['user_id'] != $this->Auth->user('id'))
|| $this->userRole['perm_modify_org'])) {
// Allow the edit
} else {
return new CakeResponse(array('body'=> json_encode(array('fail' => false, 'errors' => 'Invalid attribute')),'status'=>200));
}
}
foreach ($this->request->data['Attribute'] as $changedKey => $changedField) {
if ($attribute['Attribute'][$changedKey] == $changedField) {
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode('nochange'),'status'=>200));
}
$attribute['Attribute'][$changedKey] = $changedField;
}
$date = new DateTime();
$attribute['Attribute']['timestamp'] = $date->getTimestamp();
if ($this->Attribute->save($attribute)) {
$event = $this->Attribute->Event->find('first', array(
'recursive' => -1,
'fields' => array('id', 'published', 'timestamp', 'info'),
'conditions' => array(
'id' => $attribute['Attribute']['event_id'],
)));
$event['Event']['timestamp'] = $date->getTimestamp();
$event['Event']['published'] = 0;
$this->Attribute->Event->save($event, array('fieldList' => array('published', 'timestamp', 'info')));
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'success' => 'Field updated.')),'status'=>200));
} else {
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode(array('saved' => false, 'errors' => $this->Attribute->validationErrors)),'status'=>200));
}
}
public function view($id, $hasChildren = 0) {
$this->Attribute->id = $id;
if (!$this->Attribute->exists()) {
throw new NotFoundException('Invalid attribute');
}
$this->Attribute->recursive = -1;
$this->Attribute->contain('Event');
$attribute = $this->Attribute->read();
if (!$this->_isSiteAdmin()) {
//
if ($this->Attribute->data['Event']['org'] == $this->Auth->user('org') || (($this->Attribute->data['Event']['distribution'] > 0) && $this->Attribute->data['Attribute']['distribution'] > 0)) {
throw new MethodNotAllowed('Invalid attribute');
}
}
$eventRelations = $this->Attribute->Event->getRelatedAttributes($this->Auth->user(), $this->_isSiteAdmin(), $attribute['Attribute']['event_id']);
$attribute['Attribute']['relations'] = array();
if (isset($eventRelations[$id])) {
foreach ($eventRelations[$id] as $relations) {
$attribute['Attribute']['relations'][] = array($relations['id'], $relations['info'], $relations['org']);
}
}
$object = $attribute['Attribute'];
$object['objectType'] = 0;
$object['hasChildren'] = $hasChildren;
$this->set('object', $object);
$this->set('distributionLevels', $this->Attribute->Event->distributionLevels);
/*
$this->autoRender = false;
$responseObject = array();
return new CakeResponse(array('body'=> json_encode($attribute['Attribute']),'status'=>200));
*/
}
/**
* delete method
*
* @param string $id
* @return void
* @throws MethodNotAllowedException
* @throws NotFoundException
*
* and is able to delete w/o question
*/
public function delete($id = null) {
if ($this->request->is('ajax')) {
if ($this->request->is('post')) {
if ($this->__delete($id)) {
return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'success' => 'Attribute deleted.')),'status'=>200));
} else {
return new CakeResponse(array('body'=> json_encode(array('saved' => false, 'errors' => 'Attribute was not deleted.')),'status'=>200));
}
} else {
$this->set('id', $id);
$attribute = $this->Attribute->find('first', array(
'conditions' => array('id' => $id),
'recursive' => -1,
'fields' => array('id', 'event_id'),
));
$this->set('event_id', $attribute['Attribute']['event_id']);
$this->render('ajax/attributeConfirmationForm');
}
} else {
if (!$this->request->is('post') && !$this->_isRest()) {
throw new MethodNotAllowedException();
}
if ($this->__delete($id)) {
$this->Session->setFlash(__('Attribute deleted'));
} else {
$this->Session->setFlash(__('Attribute was not deleted'));
}
if (!$this->_isRest()) $this->redirect($this->referer()); // TODO check
else $this->redirect(array('action' => 'index'));
}
}
/**
* unification of the actual delete for the multi-select
*
* @param unknown $id
* @throws NotFoundException
* @throws MethodNotAllowedException
* @return boolean
*
* returns true/false based on success
*/
private function __delete($id) {
$this->Attribute->id = $id;
if (!$this->Attribute->exists()) {
return false;
}
$result = $this->Attribute->find('first', array(
'conditions' => array('Attribute.id' => $id),
'fields' => array('Attribute.id, Attribute.event_id', 'Attribute.uuid'),
'contain' => array('Event' => array(
'fields' => array('Event.id', 'Event.orgc', 'Event.org', 'Event.locked')
)),
));
// find the uuid
$uuid = $result['Attribute']['uuid'];
// check for permissions
if (!$this->_isSiteAdmin()) {
if ($result['Event']['locked']) {
if ($this->_checkOrg() != $result['Event']['org'] || !$this->userRole['perm_sync']) {
throw new MethodNotAllowedException();
}
} else {
if ($this->_checkOrg() != $result['Event']['orgc']) {
throw new MethodNotAllowedException();
}
}
}
// attachment will be deleted with the beforeDelete() function in the Model
if ($this->Attribute->delete()) {
// delete the attribute from remote servers
//$this->__deleteAttributeFromServers($uuid);
// We have just deleted the attribute, let's also check if there are any shadow attributes that were attached to it and delete them
$this->loadModel('ShadowAttribute');
$this->ShadowAttribute->deleteAll(array('ShadowAttribute.old_id' => $id), false);
return true;
} else {
return false;
}
}
public function deleteSelected($id) {
if (!$this->request->is('post') && !$this->request->is('ajax')) {
//if (!$this->request->is('post')) {
throw new MethodNotAllowedException();
}
// get a json object with a list of attribute IDs to be deleted
// check each of them and return a json object with the successful deletes and the failed ones.
$ids = json_decode($this->request->data['Attribute']['ids']);
if (!$this->_isSiteAdmin()) {
$event = $this->Attribute->Event->find('first', array(
'conditions' => array('id' => $id),
'recursive' => -1,
'fields' => array('id', 'orgc', 'user_id')
));
if ($event['Event']['orgc'] != $this->Auth->user('org') || (!$this->userRole['perm_modify_org'] && !($this->userRole['perm_modify'] && $event['Event']['user_id'] == $this->Auth->user('id')))) {
throw new MethodNotAllowedException('Invalid Event.');
}
}
// find all attributes from the ID list that also match the provided event ID.
$attributes = $this->Attribute->find('all', array(
'recursive' => -1,
'conditions' => array('id' => $ids, 'event_id' => $id),
'fields' => array('id', 'event_id')
));
$successes = array();
foreach ($attributes as $a) {
if ($this->__delete($a['Attribute']['id'])) $successes[] = $a['Attribute']['id'];
}
$fails = array_diff($ids, $successes);
$this->autoRender = false;
if (count($fails) == 0 && count($successes) > 0) {
return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'success' => count($successes) . ' attribute' . (count($successes) != 1 ? 's' : '') . ' deleted.')),'status'=>200));
} else {
return new CakeResponse(array('body'=> json_encode(array('saved' => false, 'errors' => count($successes) . ' attribute' . (count($successes) != 1 ? 's' : '') . ' deleted, but ' . count($fails) . ' attribute' . (count($fails) != 1 ? 's' : '') . ' could not be deleted.')),'status'=>200));
}
}
public function editSelected($id) {
if (!$this->request->is('ajax')) throw new MethodNotAllowedException('This method can only be accessed via AJAX.');
if ($this->request->is('post')) {
$event = $this->Attribute->Event->find('first', array(
'conditions' => array('id' => $id),
'recursive' => -1,
'fields' => array('id', 'orgc', 'user_id')
));
if (!$this->_isSiteAdmin()) {
if ($event['Event']['orgc'] != $this->Auth->user('org') || (!$this->userRole['perm_modify_org'] && !($this->userRole['perm_modify'] && $event['user_id'] == $this->Auth->user('id')))) {
throw new MethodNotAllowedException('You are not authorized to edit this event.');
}
}
$attribute_ids = json_decode($this->request->data['Attribute']['attribute_ids']);
$attributes = $this->Attribute->find('all', array(
'conditions' => array(
'id' => $attribute_ids,
'event_id' => $id,
),
//to_ids = true/false, distribution = [0,1,2,3]
//'fields' => array('id', 'event_id', 'comment', 'to_ids', 'timestamp', 'distribution'),
'recursive' => -1,
));
if ($this->request->data['Attribute']['to_ids'] == 2 && $this->request->data['Attribute']['distribution'] == 4 && $this->request->data['Attribute']['comment'] == null) {
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode(array('saved' => true)),'status' => 200));
}
if ($this->request->data['Attribute']['to_ids'] != 2) {
foreach ($attributes as &$attribute) $attribute['Attribute']['to_ids'] = ($this->request->data['Attribute']['to_ids'] == 0 ? false : true);
}
if ($this->request->data['Attribute']['distribution'] != 4) {
foreach ($attributes as &$attribute) $attribute['Attribute']['distribution'] = $this->request->data['Attribute']['distribution'];
}
if ($this->request->data['Attribute']['comment'] != null) {
foreach ($attributes as &$attribute) $attribute['Attribute']['comment'] = $this->request->data['Attribute']['comment'];
}
$date = new DateTime();
$timestamp = $date->getTimestamp();
foreach ($attributes as &$attribute) $attribute['Attribute']['timestamp'] = $timestamp;
if($this->Attribute->saveMany($attributes)) {
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode(array('saved' => true)),'status' => 200));
} else {
$this->autoRender = false;
return new CakeResponse(array('body'=> json_encode(array('saved' => false)),'status' => 200));
}
} else {
if (!isset($id)) throw new MethodNotAllowedException('No event ID provided.');
$this->layout = 'ajax';
$this->set('id', $id);
$this->set('distributionLevels', $this->Attribute->distributionLevels);
$this->set('distributionDescriptions', $this->Attribute->distributionDescriptions);
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->render('ajax/attributeEditMassForm');
}
}
/**
* Deletes this specific attribute from all remote servers
* TODO move this to a component(?)
*/
private function __deleteAttributeFromServers($uuid) {
// get a list of the servers with push active
$this->loadModel('Server');
$servers = $this->Server->find('all', array('conditions' => array('push' => 1)));
// iterate over the servers and upload the attribute
if (empty($servers))
return;
App::uses('SyncTool', 'Tools');
foreach ($servers as &$server) {
$syncTool = new SyncTool();
$HttpSocket = $syncTool->setupHttpSocket($server);
$this->Attribute->deleteAttributeFromServer($uuid, $server, $HttpSocket);
}
}
public function search() {
$fullAddress = '/attributes/search';
if ($this->request->here == $fullAddress) {
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->set('typeDefinitions', $this->Attribute->typeDefinitions);
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
// reset the paginate_conditions
$this->Session->write('paginate_conditions',array());
if ($this->request->is('post') && ($this->request->here == $fullAddress)) {
$keyword = $this->request->data['Attribute']['keyword'];
$keyword2 = $this->request->data['Attribute']['keyword2'];
$tags = $this->request->data['Attribute']['tags'];
$org = $this->request->data['Attribute']['org'];
$type = $this->request->data['Attribute']['type'];
$ioc = $this->request->data['Attribute']['ioc'];
$this->set('ioc', $ioc);
$category = $this->request->data['Attribute']['category'];
$this->set('keywordSearch', $keyword);
$this->set('tags', $tags);
$keyWordText = null;
$keyWordText2 = null;
$keyWordText3 = null;
$this->set('typeSearch', $type);
$this->set('isSearch', 1);
$this->set('categorySearch', $category);
// search the db
$conditions = array();
if ($ioc) {
$conditions['AND'][] = array('Attribute.to_ids =' => 1);
$conditions['AND'][] = array('Event.published =' => 1);
}
// search on the value field
if (isset($keyword)) {
$keywordArray = explode("\n", $keyword);
$this->set('keywordArray', $keywordArray);
$i = 1;
$temp = array();
$temp2 = array();
foreach ($keywordArray as $keywordArrayElement) {
$saveWord = trim(strtolower($keywordArrayElement));
if ($saveWord != '') {
$toInclude = true;
if ($saveWord[0] == '!') {
$toInclude = false;
$saveWord = substr($saveWord, 1);
}
if (preg_match('@^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(\d|[1-2]\d|3[0-2]))$@', $saveWord)) {
$cidrresults = $this->Cidr->CIDR($saveWord);
foreach ($cidrresults as $result) {
$result = strtolower($result);
if (strpos($result, '|')) {
$resultParts = explode('|', $result);
if (!toInclude) {
$temp2[] = array(
'AND' => array(
'LOWER(Attribute.value1) NOT LIKE' => $resultParts[0],
'LOWER(Attribute.value2) NOT LIKE' => $resultParts[1],
));
} else {
$temp[] = array(
'AND' => array(
'LOWER(Attribute.value1)' => $resultParts[0],
'LOWER(Attribute.value2)' => $resultParts[1],
));
}
} else {
if (!$toInclude) {
array_push($temp2, array('LOWER(Attribute.value1) NOT LIKE' => $result));
array_push($temp2, array('LOWER(Attribute.value2) NOT LIKE' => $result));
} else {
array_push($temp, array('LOWER(Attribute.value1) LIKE' => $result));
array_push($temp, array('LOWER(Attribute.value2) LIKE' => $result));
}
}
}
} else {
if (strpos($saveWord, '|')) {
$resultParts = explode('|', $saveWord);
if (!$toInclude) {
$temp2[] = array(
'AND' => array(
'LOWER(Attribute.value1) NOT LIKE' => '%' . $resultParts[0],
'LOWER(Attribute.value2) NOT LIKE' => $resultParts[1] . '%',
));
} else {
$temp2[] = array(
'AND' => array(
'LOWER(Attribute.value1)' => '%' . $resultParts[0],
'LOWER(Attribute.value2)' => $resultParts[1] . '%',
));
}
} else {
if (!$toInclude) {
array_push($temp2, array('LOWER(Attribute.value1) NOT LIKE' => '%' . $saveWord . '%'));
array_push($temp2, array('LOWER(Attribute.value2) NOT LIKE' => '%' . $saveWord . '%'));
} else {
array_push($temp, array('LOWER(Attribute.value1) LIKE' => '%' . $saveWord . '%'));
array_push($temp, array('LOWER(Attribute.value2) LIKE' => '%' . $saveWord . '%'));
}
}
}
if ($toInclude) {
array_push($temp, array('LOWER(Attribute.comment) LIKE' => '%' . $saveWord . '%'));
} else {
array_push($temp2, array('LOWER(Attribute.comment) NOT LIKE' => '%' . $saveWord . '%'));
}
}
if ($i == 1 && $saveWord != '') $keyWordText = $saveWord;
else if (($i > 1 && $i < 10) && $saveWord != '') $keyWordText = $keyWordText . ', ' . $saveWord;
else if ($i == 10 && $saveWord != '') $keyWordText = $keyWordText . ' and several other keywords';
$i++;
}
$this->set('keywordSearch', $keyWordText);
if (!empty($temp)) {
$conditions['AND']['OR'] = $temp;
}
if (!empty($temp2)) {
$conditions['AND'][] = $temp2;
}
}
// event IDs to be excluded
if (isset($keyword2)) {
$keywordArray2 = explode("\n", $keyword2);
$i = 1;
$temp = array();
foreach ($keywordArray2 as $keywordArrayElement) {
$saveWord = trim($keywordArrayElement);
if (empty($saveWord)) continue;
if ($saveWord[0] == '!') {
$temp[] = array('Attribute.event_id !=' => substr($saveWord, 1));
} else {
$temp['OR'][] = array('Attribute.event_id =' => $saveWord);
}
if ($i == 1 && $saveWord != '') $keyWordText2 = $saveWord;
else if (($i > 1 && $i < 10) && $saveWord != '') $keyWordText2 = $keyWordText2 . ', ' . $saveWord;
else if ($i == 10 && $saveWord != '') $keyWordText2 = $keyWordText2 . ' and several other events';
$i++;
}
$this->set('keywordSearch2', $keyWordText2);
if (!empty($temp)) {
$conditions['AND'][] = $temp;
}
}
if (!empty($tags)) {
$include = array();
$exclude = array();
$keywordArray = explode("\n", $tags);
foreach ($keywordArray as $tagname) {
$tagname = trim($tagname);
if (substr($tagname, 0, 1) === '!') $exclude[] = substr($tagname, 1);
else $include[] = $tagname;
}
$this->loadModel('Tag');
if (!empty($include)) $conditions['AND'][] = array('OR' => array('Attribute.event_id' => $this->Tag->findTags($include)));
if (!empty($exclude)) $conditions['AND'][] = array('Attribute.event_id !=' => $this->Tag->findTags($exclude));
}
if ($type != 'ALL') {
$conditions['Attribute.type ='] = $type;
}
if ($category != 'ALL') {
$conditions['Attribute.category ='] = $category;
}
// organisation search field
$i = 1;
$temp = array();
if (isset($org)) {
$orgArray = explode("\n", $org);
foreach ($orgArray as $orgArrayElement) {
$saveWord = trim($orgArrayElement);
if (empty($saveWord)) continue;
if ($saveWord[0] == '!') {
$temp[] = array('Event.orgc NOT LIKE ' => '%' . substr($saveWord, 1) . '%');
} else {
$temp['OR'][] = array('Event.orgc LIKE ' => '%' . $saveWord . '%');
}
}
if ($i == 1 && $saveWord != '') $keyWordText3 = $saveWord;
else if (($i > 1 && $i < 10) && $saveWord != '') $keyWordText3 = $keyWordText3 . ', ' . $saveWord;
else if ($i == 10 && $saveWord != '') $keyWordText3 = $keyWordText3 . ' and several other organisations';
$i++;
$this->set('orgSearch', $keyWordText3);
if (!empty($temp)) {
$conditions['AND'][] = $temp;
}
}
if ($this->request->data['Attribute']['alternate']) {
$events = $this->searchAlternate($conditions);
$this->set('events', $events);
$this->render('alternate_search_result');
} else {
$this->Attribute->recursive = 0;
$this->paginate = array(
'limit' => 60,
'maxLimit' => 9999, // LATER we will bump here on a problem once we have more than 9999 attributes?
'conditions' => $conditions,
'contain' => array('Event.orgc', 'Event.id', 'Event.org', 'Event.user_id', 'Event.info')
);
if (!$this->_isSiteAdmin()) {
// merge in private conditions
$this->paginate = Set::merge($this->paginate, array(
'conditions' =>
array("OR" => array(
array('Event.org =' => $this->Auth->user('org')),
array("AND" => array('Event.org !=' => $this->Auth->user('org')), array('Event.distribution !=' => 0), array('Attribute.distribution !=' => 0)))),
)
);
}
$idList = array();
$attributeIdList = array();
$attributes = $this->paginate();
// if we searched for IOCs only, apply the whitelist to the search result!
if ($ioc) {
$this->loadModel('Whitelist');
$attributes = $this->Whitelist->removeWhitelistedFromArray($attributes, true);
}
foreach ($attributes as &$attribute) {
$attributeIdList[] = $attribute['Attribute']['id'];
if (!in_array($attribute['Attribute']['event_id'], $idList)) {
$idList[] = $attribute['Attribute']['event_id'];
}
}
$this->set('attributes', $attributes);
// and store into session
$this->Session->write('paginate_conditions', $this->paginate);
$this->Session->write('paginate_conditions_keyword', $keyword);
$this->Session->write('paginate_conditions_keyword2', $keyword2);
$this->Session->write('paginate_conditions_org', $org);
$this->Session->write('paginate_conditions_type', $type);
$this->Session->write('paginate_conditions_ioc', $ioc);
$this->Session->write('paginate_conditions_tags', $tags);
$this->Session->write('paginate_conditions_category', $category);
$this->Session->write('search_find_idlist', $idList);
$this->Session->write('search_find_attributeidlist', $attributeIdList);
// set the same view as the index page
$this->render('index');
}
} else {
// no search keyword is given, show the search form
// adding filtering by category and type
// combobox for types
$types = array('' => array('ALL' => 'ALL'), 'types' => array());
$types['types'] = array_merge($types['types'], $this->_arrayToValuesIndexArray(array_keys($this->Attribute->typeDefinitions)));
$this->set('types', $types);
// combobox for categories
$categories = array('' => array('ALL' => 'ALL', '' => ''), 'categories' => array());
array_pop($this->Attribute->validate['category']['rule'][1]); // remove that last 'empty' item
$categories['categories'] = array_merge($categories['categories'], $this->_arrayToValuesIndexArray($this->Attribute->validate['category']['rule'][1]));
$this->set('categories', $categories);
}
} else {
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->set('typeDefinitions', $this->Attribute->typeDefinitions);
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
// get from Session
$keyword = $this->Session->read('paginate_conditions_keyword');
$keyword2 = $this->Session->read('paginate_conditions_keyword2');
$org = $this->Session->read('paginate_conditions_org');
$type = $this->Session->read('paginate_conditions_type');
$category = $this->Session->read('paginate_conditions_category');
$tags = $this->Session->read('paginate_conditions_tags');
$this->set('keywordSearch', $keyword);
$this->set('keywordSearch2', $keyword2);
$this->set('orgSearch', $org);
$this->set('typeSearch', $type);
$this->set('tags', $tags);
$this->set('isSearch', 1);
$this->set('categorySearch', $category);
// re-get pagination
$this->Attribute->recursive = 0;
$this->paginate = $this->Session->read('paginate_conditions');
$this->set('attributes', $this->paginate());
// set the same view as the index page
$this->render('index');
}
}
// If the checkbox for the alternate search is ticked, then this method is called to return the data to be represented
// This alternate view will show a list of events with matching search results and the percentage of those matched attributes being marked as to_ids
// events are sorted based on relevance (as in the percentage of matches being flagged as indicators for IDS)
public function searchAlternate($data) {
$data['AND'][] = array(
"OR" => array(
array('Event.org =' => $this->Auth->user('org')),
array("AND" => array('Event.org !=' => $this->Auth->user('org')), array('Event.distribution !=' => 0), array('Attribute.distribution !=' => 0))));
$attributes = $this->Attribute->find('all', array(
'conditions' => $data,
'fields' => array(
'Attribute.id', 'Attribute.event_id', 'Attribute.type', 'Attribute.category', 'Attribute.to_ids', 'Attribute.value', 'Attribute.distribution',
'Event.id', 'Event.org', 'Event.orgc', 'Event.info', 'Event.distribution', 'Event.attribute_count'
)));
$events = array();
foreach ($attributes as $attribute) {
if (isset($events[$attribute['Event']['id']])) {
if ($attribute['Attribute']['to_ids']) {
$events[$attribute['Event']['id']]['to_ids']++;
} else {
$events[$attribute['Event']['id']]['no_ids']++;
}
} else {
$events[$attribute['Event']['id']]['Event'] = $attribute['Event'];
$events[$attribute['Event']['id']]['to_ids'] = 0;
$events[$attribute['Event']['id']]['no_ids'] = 0;
if ($attribute['Attribute']['to_ids']) {
$events[$attribute['Event']['id']]['to_ids']++;
} else {
$events[$attribute['Event']['id']]['no_ids']++;
}
}
}
foreach ($events as &$event) {
$event['relevance'] = 100 * $event['to_ids'] / ($event['no_ids'] + $event['to_ids']);
}
if (!empty($events)) $events = $this->__subval_sort($events, 'relevance');
return $events;
}
// Sort the array of arrays based on a value of a sub-array
private function __subval_sort($a,$subkey) {
foreach($a as $k=>$v) {
$b[$k] = strtolower($v[$subkey]);
}
arsort($b);
foreach($b as $key=>$val) {
$c[] = $a[$key];
}
return $c;
}
public function downloadAttributes() {
$idList = $this->Session->read('search_find_idlist');
$this->response->type('xml'); // set the content type
$this->header('Content-Disposition: download; filename="misp.attribute.search.xml"');
$this->layout = 'xml/default';
$this->loadModel('Attribute');
if (!isset($idList)) {
print "No results found to export\n";
} else {
foreach ($idList as $listElement) {
$put['OR'][] = array('Attribute.id' => $listElement);
}
$conditions['AND'][] = $put;
// restricting to non-private or same org if the user is not a site-admin.
if (!$this->_isSiteAdmin()) {
$temp = array();
array_push($temp, array('Attribute.distribution >' => 0));
array_push($temp, array('OR' => $distribution));
array_push($temp, array('(SELECT events.org FROM events WHERE events.id = Attribute.event_id) LIKE' => $this->_checkOrg()));
$put2['OR'][] = $temp;
$conditions['AND'][] = $put2;
}
$params = array(
'conditions' => $conditions, //array of conditions
'recursive' => 0, //int
'fields' => array('Attribute.id', 'Attribute.value'), //array of field names
'order' => array('Attribute.id'), //string or array defining order
);
$attributes = $this->Attribute->find('all', $params);
$this->set('results', $attributes);
}
$this->render('xml');
}
public function checkComposites() {
if (!self::_isAdmin()) throw new NotFoundException();
$this->set('fails', $this->Attribute->checkComposites());
}
// Use the rest interface to search for attributes. Usage:
// MISP-base-url/attributes/restSearch/[api-key]/[value]/[type]/[category]/[orgc]
// value, type, category, orgc are optional
// the last 4 fields accept the following operators:
// && - you can use && between two search values to put a logical OR between them. for value, 1.1.1.1&&2.2.2.2 would find attributes with the value being either of the two.
// ! - you can negate a search term. For example: google.com&&!mail would search for all attributes with value google.com but not ones that include mail. www.google.com would get returned, mail.google.com wouldn't.
public function restSearch($key='download', $value=null, $type=null, $category=null, $org=null, $tags=null) {
if ($tags) $tags = str_replace(';', ':', $tags);
if ($tags === 'null') $tags = null;
if ($value === 'null') $value = null;
if ($type === 'null') $type = null;
if ($category === 'null') $category = null;
if ($org === 'null') $org = null;
if ($key!=null && $key!='download') {
$user = $this->checkAuthUser($key);
} else {
if (!$this->Auth->user()) throw new UnauthorizedException('You are not authorized. Please send the Authorization header with your auth key along with an Accept header for application/xml.');
$user = $this->checkAuthUser($this->Auth->user('authkey'));
}
if (!$user) {
throw new UnauthorizedException('This authentication key is not authorized to be used for exports. Contact your administrator.');
}
$value = str_replace('|', '/', $value);
// request handler for POSTed queries. If the request is a post, the parameters (apart from the key) will be ignored and replaced by the terms defined in the posted json or xml object.
// The correct format for both is a "request" root element, as shown by the examples below:
// For Json: {"request":{"value": "7.7.7.7&&1.1.1.1","type":"ip-src"}}
// For XML: <request><value>7.7.7.7&&1.1.1.1</value><type>ip-src</type></request>
// the response type is used to determine the parsing method (xml/json)
if ($this->request->is('post')) {
if ($this->response->type() === 'application/json') {
$data = $this->request->input('json_decode', true);
} elseif ($this->response->type() === 'application/xml' && !empty($this->request->data)) {
$data = $this->request->data;
} else {
throw new BadRequestException('Either specify the search terms in the url, or POST a json array / xml (with the root element being "request" and specify the correct accept and content type headers.');
}
$paramArray = array('value', 'type', 'category', 'org', 'tags');
foreach ($paramArray as $p) {
if (isset($data['request'][$p])) ${$p} = $data['request'][$p];
else ${$p} = null;
}
}
if (!isset($this->request->params['ext']) || $this->request->params['ext'] !== 'json') {
$this->response->type('xml'); // set the content type
$this->layout = 'xml/default';
$this->header('Content-Disposition: download; filename="misp.search.attribute.results.xml"');
} else {
$this->response->type('json'); // set the content type
$this->layout = 'json/default';
$this->header('Content-Disposition: download; filename="misp.search.attribute.results.json"');
}
$conditions['AND'] = array();
$subcondition = array();
$this->loadModel('Attribute');
// add the values as specified in the 2nd parameter to the conditions
$values = explode('&&', $value);
$parameters = array('value', 'type', 'category', 'org');
foreach ($parameters as $k => $param) {
if (isset(${$parameters[$k]}) && ${$parameters[$k]}!=='null') {
$elements = explode('&&', ${$parameters[$k]});
foreach($elements as $v) {
if (substr($v, 0, 1) == '!') {
if ($parameters[$k] === 'value' && preg_match('@^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(\d|[1-2]\d|3[0-2]))$@', substr($v, 1))) {
$cidrresults = $this->Cidr->CIDR(substr($v, 1));
foreach ($cidrresults as $result) {
$subcondition['AND'][] = array('Attribute.value NOT LIKE' => $result);
}
} else {
if ($parameters[$k] === 'org') {
$subcondition['AND'][] = array('Event.' . $parameters[$k] . ' NOT LIKE' => '%'.substr($v, 1).'%');
} else {
$subcondition['AND'][] = array('Attribute.' . $parameters[$k] . ' NOT LIKE' => '%'.substr($v, 1).'%');
}
}
} else {
if ($parameters[$k] === 'value' && preg_match('@^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(\d|[1-2]\d|3[0-2]))$@', substr($v, 1))) {
$cidrresults = $this->Cidr->CIDR($v);
foreach ($cidrresults as $result) {
$subcondition['OR'][] = array('Attribute.value LIKE' => $result);
}
} else {
if ($parameters[$k] === 'org') {
$subcondition['OR'][] = array('Event.' . $parameters[$k] . ' LIKE' => '%'.$v.'%');
} else {
$subcondition['OR'][] = array('Attribute.' . $parameters[$k] . ' LIKE' => '%'.$v.'%');
}
}
}
}
array_push ($conditions['AND'], $subcondition);
$subcondition = array();
}
}
// If we are looking for an attribute, we want to retrieve some extra data about the event to be able to check for the permissions.
if (!$user['User']['siteAdmin']) {
$temp = array();
$temp['AND'] = array('Event.distribution >' => 0, 'Attribute.distribution >' => 0);
$subcondition['OR'][] = $temp;
$subcondition['OR'][] = array('Event.org' => $user['User']['org']);
array_push($conditions['AND'], $subcondition);
}
// If we sent any tags along, load the associated tag names for each attribute
if ($tags) {
$args = $this->Attribute->dissectArgs($tags);
$this->loadModel('Tag');
$tagArray = $this->Tag->fetchEventTagIds($args[0], $args[1]);
$temp = array();
foreach ($tagArray[0] as $accepted) {
$temp['OR'][] = array('Event.id' => $accepted);
}
$conditions['AND'][] = $temp;
$temp = array();
foreach ($tagArray[1] as $rejected) {
$temp['AND'][] = array('Event.id !=' => $rejected);
}
$conditions['AND'][] = $temp;
}
// change the fields here for the attribute export!!!! Don't forget to check for the permissions, since you are not going through fetchevent. Maybe create fetchattribute?
$params = array(
'conditions' => $conditions,
'fields' => array('Attribute.*', 'Event.org', 'Event.distribution'),
'contain' => array('Event' => array())
);
$results = $this->Attribute->find('all', $params);
$this->loadModel('Whitelist');
$results = $this->Whitelist->removeWhitelistedFromArray($results, true);
if (empty($results)) throw new NotFoundException('No matches.');
$this->set('results', $results);
}
// returns an XML with attributes that belong to an event. The type of attributes to be returned can be restricted by type using the 3rd parameter.
// Similar to the restSearch, this parameter can be chained with '&&' and negations are accepted too. For example filename&&!filename|md5 would return all filenames that don't have an md5
// The usage of returnAttributes is the following: [MISP-url]/attributes/returnAttributes/<API-key>/<type>/<signature flag>
// The signature flag is off by default, enabling it will only return attribugtes that have the to_ids flag set to true.
public function returnAttributes($key='download', $id, $type = null, $sigOnly = false) {
$user = $this->checkAuthUser($key);
// if the user is authorised to use the api key then user will be populated with the user's account
// in addition we also set a flag indicating whether the user is a site admin or not.
if ($key!=null && $key!='download') {
$user = $this->checkAuthUser($key);
} else {
if (!$this->Auth->user()) throw new UnauthorizedException('You are not authorized. Please send the Authorization header with your auth key along with an Accept header for application/xml.');
$user = $this->checkAuthUser($this->Auth->user('authkey'));
}
if (!$user) {
throw new UnauthorizedException('This authentication key is not authorized to be used for exports. Contact your administrator.');
}
if ($this->request->is('post')) {
if ($this->response->type() === 'application/json') {
$data = $this->request->input('json_decode', true);
} elseif ($this->response->type() === 'application/xml' && !empty($this->request->data)) {
$data = $this->request->data;
} else {
throw new BadRequestException('Either specify the search terms in the url, or POST a json array / xml (with the root element being "request" and specify the correct accept and content type headers.');
}
$paramArray = array('type', 'sigOnly');
foreach ($paramArray as $p) {
if (isset($data['request'][$p])) ${$p} = $data['request'][$p];
else ${$p} = null;
}
}
$this->loadModel('Event');
$this->Event->read(null, $id);
$myEventOrAdmin = false;
if ($user['User']['siteAdmin'] || $this->Event->data['Event']['org'] == $user['User']['org']) {
$myEventOrAdmin = true;
}
if (!$myEventOrAdmin) {
if ($this->Event->data['Event']['distribution'] == 0) {
throw new UnauthorizedException('You don\'t have access to that event.');
}
}
$this->response->type('xml'); // set the content type
$this->layout = 'xml/default';
$this->header('Content-Disposition: download; filename="misp.search.attribute.results.xml"');
// check if user can see the event!
$conditions['AND'] = array();
$include = array();
$exclude = array();
$attributes = array();
// If there is a type set, create the include and exclude arrays from it
if (isset($type)) {
$elements = explode('&&', $type);
foreach($elements as $v) {
if (substr($v, 0, 1) == '!') {
$exclude[] = substr($v, 1);
} else {
$include[] = $v;
}
}
}
// check each attribute
foreach($this->Event->data['Attribute'] as $k => $attribute) {
$contained = false;
// If the include list is empty, then we just then the first check should always set contained to true (basically we chose type = all - exclusions, or simply all)
if (empty($include)) {
$contained = true;
} else {
// If we have elements in $include we should check if the attribute's type should be included
foreach ($include as $inc) {
if (strpos($attribute['type'], $inc) !== false) {
$contained = true;
}
}
}
// If we have either everything included or the attribute passed the include check, we should check if there is a reason to exclude the attribute
// For example, filename may be included, but md5 may be excluded, meaning that filename|md5 should be removed
if ($contained) {
foreach ($exclude as $exc) {
if (strpos($attribute['type'], $exc) !== false) {
$contained = false;
continue 2;
}
}
}
// If we still didn't throw the attribute away, let's check if the user requesting the attributes is of the owning organisation of the event
// and if not, whether the distribution of the attribute allows the user to see it
if ($contained && !$myEventOrAdmin && $attribute['distribution'] == 0) {
$contained = false;
}
// If we have set the sigOnly parameter and the attribute has to_ids set to false, discard it!
if ($contained && $sigOnly === 'true' && !$attribute['to_ids']) {
$contained = false;
}
// If after all of this $contained is still true, let's add the attribute to the array
if ($contained) $attributes[] = $attribute;
}
if (empty($attributes)) throw new NotFoundException('No matches.');
$this->set('results', $attributes);
}
public function downloadAttachment($key='download', $id) {
if ($key!=null && $key!='download') {
$user = $this->checkAuthUser($key);
} else {
if (!$this->Auth->user()) throw new UnauthorizedException('You are not authorized. Please send the Authorization header with your auth key along with an Accept header for application/xml.');
$user = $this->checkAuthUser($this->Auth->user('authkey'));
}
// if the user is authorised to use the api key then user will be populated with the user's account
// in addition we also set a flag indicating whether the user is a site admin or not.
if (!$user) {
throw new UnauthorizedException('This authentication key is not authorized to be used for exports. Contact your administrator.');
}
$this->Attribute->id = $id;
if(!$this->Attribute->exists()) {
throw new NotFoundException('Invalid attribute or no authorisation to view it.');
}
$this->Attribute->read(null, $id);
if (!$user['User']['siteAdmin'] &&
$user['User']['org'] != $this->Attribute->data['Event']['org'] &&
($this->Attribute->data['Event']['distribution'] == 0 ||
$this->Attribute->data['Attribute']['distribution'] == 0
)) {
throw new NotFoundException('Invalid attribute or no authorisation to view it.');
}
$this->__downloadAttachment($this->Attribute->data['Attribute']);
}
public function text($key='download', $type='all', $tags=false, $eventId=false, $allowNonIDS=false) {
if ($eventId === 'null' || $eventId == '0' || $eventId === 'false') $eventId = false;
if ($allowNonIDS === 'null' || $allowNonIDS === '0' || $allowNonIDS === 'false') $allowNonIDS = false;
if ($type === 'null' || $type === '0' || $type === 'false') $type = 'all';
if ($tags === 'null' || $tags === '0' || $tags === 'false') $tags = false;
if ($key != 'download') {
// check if the key is valid -> search for users based on key
$user = $this->checkAuthUser($key);
if (!$user) {
throw new UnauthorizedException('This authentication key is not authorized to be used for exports. Contact your administrator.');
}
} else {
if (!$this->Auth->user('id')) {
throw new UnauthorizedException('You have to be logged in to do that.');
}
}
$this->response->type('txt'); // set the content type
$this->header('Content-Disposition: download; filename="misp.' . $type . '.txt"');
$this->layout = 'text/default';
$attributes = $this->Attribute->text($this->_checkOrg(), $this->_isSiteAdmin(), $type, $tags, $eventId, $allowNonIDS);
$this->loadModel('Whitelist');
$attributes = $this->Whitelist->removeWhitelistedFromArray($attributes, true);
$this->set('attributes', $attributes);
}
public function reportValidationIssuesAttributes() {
// TODO improve performance of this function by eliminating the additional SQL query per attribute
// search for validation problems in the attributes
if (!self::_isSiteAdmin()) throw new NotFoundException();
$this->set('result', $this->Attribute->reportValidationIssuesAttributes());
}
public function generateCorrelation() {
if (!self::_isSiteAdmin()) throw new NotFoundException();
if (!Configure::read('MISP.background_jobs')) {
$k = $this->Attribute->generateCorrelation();
$this->Session->setFlash(__('All done. ' . $k . ' attributes processed.'));
$this->redirect(array('controller' => 'pages', 'action' => 'display', 'administration'));
} else {
$job = ClassRegistry::init('Job');
$job->create();
$data = array(
'worker' => 'default',
'job_type' => 'generate correlation',
'job_input' => 'All attributes',
'status' => 0,
'retries' => 0,
'org' => 'ADMIN',
'message' => 'Job created.',
);
$job->save($data);
$jobId = $job->id;
$process_id = CakeResque::enqueue(
'default',
'AdminShell',
array('jobGenerateCorrelation', $jobId)
);
$job->saveField('process_id', $process_id);
$this->Session->setFlash(__('Job queued. You can view the progress if you navigate to the active jobs view (administration -> jobs).'));
$this->redirect(array('controller' => 'pages', 'action' => 'display', 'administration'));
}
}
public function fetchViewValue($id, $field = null) {
$validFields = array('value', 'comment', 'type', 'category', 'to_ids', 'distribution', 'timestamp');
if (!isset($field) || !in_array($field, $validFields)) throw new MethodNotAllowedException('Invalid field requested.');
//if (!$this->request->is('ajax')) throw new MethodNotAllowedException('This function can only be accessed via AJAX.');
$this->Attribute->id = $id;
if (!$this->Attribute->exists()) {
throw new NotFoundException(__('Invalid attribute'));
}
$attribute = $this->Attribute->find('first', array(
'recursive' => -1,
'conditions' => array('Attribute.id' => $id),
'fields' => array('id', 'distribution', 'event_id', $field),
'contain' => array(
'Event' => array(
'fields' => array('distribution', 'id', 'org'),
)
)
));
if (!$this->_isSiteAdmin()) {
//
if (!($attribute['Event']['org'] == $this->Auth->user('org') || ($attribute['Event']['distribution'] > 0 && $attribute['Attribute']['distribution'] > 0))) {
throw new NotFoundException(__('Invalid attribute'));
}
}
$result = $attribute['Attribute'][$field];
if ($field == 'distribution') $result=$this->Attribute->distributionLevels[$result];
if ($field == 'to_ids') $result = ($result == 0 ? 'No' : 'Yes');
if ($field == 'timestamp') {
if (isset($result)) $result = date('Y-m-d', $result);
else echo ' ';
}
$this->set('value', $result);
$this->layout = 'ajax';
$this->render('ajax/attributeViewFieldForm');
}
public function fetchEditForm($id, $field = null) {
$validFields = array('value', 'comment', 'type', 'category', 'to_ids', 'distribution');
if (!isset($field) || !in_array($field, $validFields)) throw new MethodNotAllowedException('Invalid field requested.');
if (!$this->request->is('ajax')) throw new MethodNotAllowedException('This function can only be accessed via AJAX.');
$this->Attribute->id = $id;
if (!$this->Attribute->exists()) {
throw new NotFoundException(__('Invalid attribute'));
}
$fields = array('id', 'distribution', 'event_id');
$additionalFieldsToLoad = $field;
if ($field == 'category' || $field == 'type') {
$fields[] = 'type';
$fields[] = 'category';
} else {
$fields[] = $field;
}
$attribute = $this->Attribute->find('first', array(
'recursive' => -1,
'conditions' => array('Attribute.id' => $id),
'fields' => $fields,
'contain' => array(
'Event' => array(
'fields' => array('distribution', 'id', 'user_id', 'orgc'),
)
)
));
if (!$this->_isSiteAdmin()) {
//
if ($attribute['Event']['orgc'] == $this->Auth->user('org')
&& (($this->userRole['perm_modify'] && $attribute['Event']['user_id'] != $this->Auth->user('id'))
|| $this->userRole['perm_modify_org'])) {
// Allow the edit
} else {
throw new NotFoundException(__('Invalid attribute'));
}
}
$this->layout = 'ajax';
if ($field == 'distribution') $this->set('distributionLevels', $this->Attribute->distributionLevels);
if ($field == 'category') {
$typeCategory = array();
foreach ($this->Attribute->categoryDefinitions as $k => $category) {
foreach ($category['types'] as $type) {
$typeCategory[$type][] = $k;
}
}
$this->set('typeCategory', $typeCategory);
}
if ($field == 'type') {
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
}
$this->set('object', $attribute['Attribute']);
$fieldURL = ucfirst($field);
$this->render('ajax/attributeEdit' . $fieldURL . 'Form');
}
public function attributeReplace($id) {
if (!$this->userRole['perm_add']) {
throw new MethodNotAllowedException('Event not found or you don\'t have permissions to create attributes');
}
$event = $this->Attribute->Event->find('first', array(
'conditions' => array('Event.id' => $id),
'fields' => array('id', 'orgc', 'distribution'),
'recursive' => -1
));
if (empty($event) || (!$this->_isSiteAdmin() && ($event['Event']['orgc'] != $this->Auth->user('org') || !$this->userRole['perm_add']))) throw new MethodNotAllowedException('Event not found or you don\'t have permissions to create attributes');
$this->set('event_id', $id);
if ($this->request->is('get')) {
$this->layout = 'ajax';
$this->request->data['Attribute']['event_id'] = $id;
// combobox for types
$types = array_keys($this->Attribute->typeDefinitions);
$types = $this->_arrayToValuesIndexArray($types);
$this->set('types', $types);
// combobos for categories
$categories = $this->Attribute->validate['category']['rule'][1];
array_pop($categories);
$categories = $this->_arrayToValuesIndexArray($categories);
$this->set('categories', compact('categories'));
$this->set('attrDescriptions', $this->Attribute->fieldDescriptions);
$this->set('typeDefinitions', $this->Attribute->typeDefinitions);
$this->set('categoryDefinitions', $this->Attribute->categoryDefinitions);
}
if ($this->request->is('post')) {
if (!$this->request->is('ajax')) throw new MethodNotAllowedException('This action can only be accessed via AJAX.');
$newValues = explode(PHP_EOL, $this->request->data['Attribute']['value']);
$category = $this->request->data['Attribute']['category'];
$type = $this->request->data['Attribute']['type'];
$to_ids = $this->request->data['Attribute']['to_ids'];
if (!$this->_isSiteAdmin() && $this->Auth->user('org') != $event['Event']['orgc'] && !$this->userRole['perm_add']) throw new MethodNotAllowedException('You are not authorised to do that.');
$oldAttributes = $this->Attribute->find('all', array(
'conditions' => array(
'event_id' => $id,
'category' => $category,
'type' => $type,
),
'fields' => array('id', 'event_id', 'category', 'type', 'value'),
'recursive' => -1,
));
$results = array('untouched' => count($oldAttributes), 'created' => 0, 'deleted' => 0, 'createdFail' => 0, 'deletedFail' => 0);
foreach ($newValues as &$value) {
$value = trim($value);
$found = false;
foreach ($oldAttributes as &$old) {
if ($value == $old['Attribute']['value']) {
$found = true;
}
}
if (!$found) {
$attribute = array(
'value' => $value,
'event_id' => $id,
'category' => $category,
'type' => $type,
'distribution' => $event['Event']['distribution'],
'to_ids' => $to_ids,
);
$this->Attribute->create();
if ($this->Attribute->save(array('Attribute' => $attribute))) {
$results['created']++;
} else {
$results['createdFail']++;
}
}
}
foreach ($oldAttributes as &$old) {
if (!in_array($old['Attribute']['value'], $newValues)) {
if ($this->Attribute->delete($old['Attribute']['id'])) {
$results['deleted']++;
$results['untouched']--;
} else {
$results['deletedFail']++;
}
}
}
$message = '';
$success = true;
if (($results['created'] > 0 || $results['deleted'] > 0) && $results['createdFail'] == 0 && $results['deletedFail'] == 0) {
$message .= 'Update completed without any issues.';
$event = $this->Attribute->Event->find('first', array(
'conditions' => array('Event.id' => $id),
'recursive' => -1
));
$event['Event']['published'] = 0;
$date = new DateTime();
$event['Event']['timestamp'] = $date->getTimestamp();
$this->Attribute->Event->save($event);
} else {
$message .= 'Update completed with some errors.';
$success = false;
}
if ($results['created']) $message .= $results['created'] . ' attribute' . $this->__checkCountForOne($results['created']) . ' created. ';
if ($results['createdFail']) $message .= $results['createdFail'] . ' attribute' . $this->__checkCountForOne($results['createdFail']) . ' could not be created. ';
if ($results['deleted']) $message .= $results['deleted'] . ' attribute' . $this->__checkCountForOne($results['deleted']) . ' deleted.';
if ($results['deletedFail']) $message .= $results['deletedFail'] . ' attribute' . $this->__checkCountForOne($results['deletedFail']) . ' could not be deleted. ';
$message .= $results['untouched'] . ' attributes left untouched. ';
$this->autoRender = false;
$this->layout = 'ajax';
if ($success) return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'success' => $message)),'status'=>200));
else return new CakeResponse(array('body'=> json_encode(array('saved' => true, 'errors' => $message)),'status'=>200));
}
}
private function __checkCountForOne($number) {
if ($number != 1) return 's';
return '';
}
}
| 0x0mar/MISP | app/Controller/AttributesController.php | PHP | agpl-3.0 | 85,681 |
// Protocol Buffers for Go with Gadgets
//
// Copyright (c) 2013, The GoGo Authors. All rights reserved.
// http://github.com/gogo/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package generator
import (
"bytes"
"go/parser"
"go/printer"
"go/token"
"path"
"strings"
"gx/ipfs/QmdxUuburamoF6zF9qjeQC4WYcWGbWuRmdLacMEsW8ioD8/gogo-protobuf/gogoproto"
"gx/ipfs/QmdxUuburamoF6zF9qjeQC4WYcWGbWuRmdLacMEsW8ioD8/gogo-protobuf/proto"
descriptor "gx/ipfs/QmdxUuburamoF6zF9qjeQC4WYcWGbWuRmdLacMEsW8ioD8/gogo-protobuf/protoc-gen-gogo/descriptor"
plugin "gx/ipfs/QmdxUuburamoF6zF9qjeQC4WYcWGbWuRmdLacMEsW8ioD8/gogo-protobuf/protoc-gen-gogo/plugin"
)
func (d *FileDescriptor) Messages() []*Descriptor {
return d.desc
}
func (d *FileDescriptor) Enums() []*EnumDescriptor {
return d.enum
}
func (d *Descriptor) IsGroup() bool {
return d.group
}
func (g *Generator) IsGroup(field *descriptor.FieldDescriptorProto) bool {
if d, ok := g.typeNameToObject[field.GetTypeName()].(*Descriptor); ok {
return d.IsGroup()
}
return false
}
func (g *Generator) TypeNameByObject(typeName string) Object {
o, ok := g.typeNameToObject[typeName]
if !ok {
g.Fail("can't find object with type", typeName)
}
return o
}
func (g *Generator) OneOfTypeName(message *Descriptor, field *descriptor.FieldDescriptorProto) string {
typeName := message.TypeName()
ccTypeName := CamelCaseSlice(typeName)
fieldName := g.GetOneOfFieldName(message, field)
tname := ccTypeName + "_" + fieldName
// It is possible for this to collide with a message or enum
// nested in this message. Check for collisions.
ok := true
for _, desc := range message.nested {
if strings.Join(desc.TypeName(), "_") == tname {
ok = false
break
}
}
for _, enum := range message.enums {
if strings.Join(enum.TypeName(), "_") == tname {
ok = false
break
}
}
if !ok {
tname += "_"
}
return tname
}
type PluginImports interface {
NewImport(pkg string) Single
GenerateImports(file *FileDescriptor)
}
type pluginImports struct {
generator *Generator
singles []Single
}
func NewPluginImports(generator *Generator) *pluginImports {
return &pluginImports{generator, make([]Single, 0)}
}
func (this *pluginImports) NewImport(pkg string) Single {
imp := newImportedPackage(this.generator.ImportPrefix, pkg)
this.singles = append(this.singles, imp)
return imp
}
func (this *pluginImports) GenerateImports(file *FileDescriptor) {
for _, s := range this.singles {
if s.IsUsed() {
this.generator.PrintImport(GoPackageName(s.Name()), GoImportPath(s.Location()))
}
}
}
type Single interface {
Use() string
IsUsed() bool
Name() string
Location() string
}
type importedPackage struct {
used bool
pkg string
name string
importPrefix string
}
func newImportedPackage(importPrefix string, pkg string) *importedPackage {
return &importedPackage{
pkg: pkg,
importPrefix: importPrefix,
}
}
func (this *importedPackage) Use() string {
if !this.used {
this.name = string(cleanPackageName(this.pkg))
this.used = true
}
return this.name
}
func (this *importedPackage) IsUsed() bool {
return this.used
}
func (this *importedPackage) Name() string {
return this.name
}
func (this *importedPackage) Location() string {
return this.importPrefix + this.pkg
}
func (g *Generator) GetFieldName(message *Descriptor, field *descriptor.FieldDescriptorProto) string {
goTyp, _ := g.GoType(message, field)
fieldname := CamelCase(*field.Name)
if gogoproto.IsCustomName(field) {
fieldname = gogoproto.GetCustomName(field)
}
if gogoproto.IsEmbed(field) {
fieldname = EmbedFieldName(goTyp)
}
if field.OneofIndex != nil {
fieldname = message.OneofDecl[int(*field.OneofIndex)].GetName()
fieldname = CamelCase(fieldname)
}
for _, f := range methodNames {
if f == fieldname {
return fieldname + "_"
}
}
if !gogoproto.IsProtoSizer(message.file.FileDescriptorProto, message.DescriptorProto) {
if fieldname == "Size" {
return fieldname + "_"
}
}
return fieldname
}
func (g *Generator) GetOneOfFieldName(message *Descriptor, field *descriptor.FieldDescriptorProto) string {
goTyp, _ := g.GoType(message, field)
fieldname := CamelCase(*field.Name)
if gogoproto.IsCustomName(field) {
fieldname = gogoproto.GetCustomName(field)
}
if gogoproto.IsEmbed(field) {
fieldname = EmbedFieldName(goTyp)
}
for _, f := range methodNames {
if f == fieldname {
return fieldname + "_"
}
}
if !gogoproto.IsProtoSizer(message.file.FileDescriptorProto, message.DescriptorProto) {
if fieldname == "Size" {
return fieldname + "_"
}
}
return fieldname
}
func (g *Generator) IsMap(field *descriptor.FieldDescriptorProto) bool {
if !field.IsMessage() {
return false
}
byName := g.ObjectNamed(field.GetTypeName())
desc, ok := byName.(*Descriptor)
if byName == nil || !ok || !desc.GetOptions().GetMapEntry() {
return false
}
return true
}
func (g *Generator) GetMapKeyField(field, keyField *descriptor.FieldDescriptorProto) *descriptor.FieldDescriptorProto {
if !gogoproto.IsCastKey(field) {
return keyField
}
keyField = proto.Clone(keyField).(*descriptor.FieldDescriptorProto)
if keyField.Options == nil {
keyField.Options = &descriptor.FieldOptions{}
}
keyType := gogoproto.GetCastKey(field)
if err := proto.SetExtension(keyField.Options, gogoproto.E_Casttype, &keyType); err != nil {
g.Fail(err.Error())
}
return keyField
}
func (g *Generator) GetMapValueField(field, valField *descriptor.FieldDescriptorProto) *descriptor.FieldDescriptorProto {
if gogoproto.IsCustomType(field) && gogoproto.IsCastValue(field) {
g.Fail("cannot have a customtype and casttype: ", field.String())
}
valField = proto.Clone(valField).(*descriptor.FieldDescriptorProto)
if valField.Options == nil {
valField.Options = &descriptor.FieldOptions{}
}
stdtime := gogoproto.IsStdTime(field)
if stdtime {
if err := proto.SetExtension(valField.Options, gogoproto.E_Stdtime, &stdtime); err != nil {
g.Fail(err.Error())
}
}
stddur := gogoproto.IsStdDuration(field)
if stddur {
if err := proto.SetExtension(valField.Options, gogoproto.E_Stdduration, &stddur); err != nil {
g.Fail(err.Error())
}
}
if valType := gogoproto.GetCastValue(field); len(valType) > 0 {
if err := proto.SetExtension(valField.Options, gogoproto.E_Casttype, &valType); err != nil {
g.Fail(err.Error())
}
}
if valType := gogoproto.GetCustomType(field); len(valType) > 0 {
if err := proto.SetExtension(valField.Options, gogoproto.E_Customtype, &valType); err != nil {
g.Fail(err.Error())
}
}
nullable := gogoproto.IsNullable(field)
if err := proto.SetExtension(valField.Options, gogoproto.E_Nullable, &nullable); err != nil {
g.Fail(err.Error())
}
return valField
}
// GoMapValueTypes returns the map value Go type and the alias map value Go type (for casting), taking into
// account whether the map is nullable or the value is a message.
func GoMapValueTypes(mapField, valueField *descriptor.FieldDescriptorProto, goValueType, goValueAliasType string) (nullable bool, outGoType string, outGoAliasType string) {
nullable = gogoproto.IsNullable(mapField) && (valueField.IsMessage() || gogoproto.IsCustomType(mapField))
if nullable {
// ensure the non-aliased Go value type is a pointer for consistency
if strings.HasPrefix(goValueType, "*") {
outGoType = goValueType
} else {
outGoType = "*" + goValueType
}
outGoAliasType = goValueAliasType
} else {
outGoType = strings.Replace(goValueType, "*", "", 1)
outGoAliasType = strings.Replace(goValueAliasType, "*", "", 1)
}
return
}
func GoTypeToName(goTyp string) string {
return strings.Replace(strings.Replace(goTyp, "*", "", -1), "[]", "", -1)
}
func EmbedFieldName(goTyp string) string {
goTyp = GoTypeToName(goTyp)
goTyps := strings.Split(goTyp, ".")
if len(goTyps) == 1 {
return goTyp
}
if len(goTyps) == 2 {
return goTyps[1]
}
panic("unreachable")
}
func (g *Generator) GeneratePlugin(p Plugin) {
plugins = []Plugin{p}
p.Init(g)
// Generate the output. The generator runs for every file, even the files
// that we don't generate output for, so that we can collate the full list
// of exported symbols to support public imports.
genFileMap := make(map[*FileDescriptor]bool, len(g.genFiles))
for _, file := range g.genFiles {
genFileMap[file] = true
}
for _, file := range g.allFiles {
g.Reset()
g.writeOutput = genFileMap[file]
g.generatePlugin(file, p)
if !g.writeOutput {
continue
}
g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{
Name: proto.String(file.goFileName(g.pathType)),
Content: proto.String(g.String()),
})
}
}
func (g *Generator) generatePlugin(file *FileDescriptor, p Plugin) {
g.writtenImports = make(map[string]bool)
g.file = file
// Run the plugins before the imports so we know which imports are necessary.
p.Generate(file)
// Generate header and imports last, though they appear first in the output.
rem := g.Buffer
g.Buffer = new(bytes.Buffer)
g.generateHeader()
p.GenerateImports(g.file)
g.generateImports()
if !g.writeOutput {
return
}
g.Write(rem.Bytes())
// Reformat generated code.
contents := string(g.Buffer.Bytes())
fset := token.NewFileSet()
ast, err := parser.ParseFile(fset, "", g, parser.ParseComments)
if err != nil {
g.Fail("bad Go source code was generated:", contents, err.Error())
return
}
g.Reset()
err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, ast)
if err != nil {
g.Fail("generated Go source code could not be reformatted:", err.Error())
}
}
func GetCustomType(field *descriptor.FieldDescriptorProto) (packageName string, typ string, err error) {
return getCustomType(field)
}
func getCustomType(field *descriptor.FieldDescriptorProto) (packageName string, typ string, err error) {
if field.Options != nil {
var v interface{}
v, err = proto.GetExtension(field.Options, gogoproto.E_Customtype)
if err == nil && v.(*string) != nil {
ctype := *(v.(*string))
packageName, typ = splitCPackageType(ctype)
return packageName, typ, nil
}
}
return "", "", err
}
func splitCPackageType(ctype string) (packageName string, typ string) {
ss := strings.Split(ctype, ".")
if len(ss) == 1 {
return "", ctype
}
packageName = strings.Join(ss[0:len(ss)-1], ".")
typeName := ss[len(ss)-1]
importStr := strings.Map(badToUnderscore, packageName)
typ = importStr + "." + typeName
return packageName, typ
}
func getCastType(field *descriptor.FieldDescriptorProto) (packageName string, typ string, err error) {
if field.Options != nil {
var v interface{}
v, err = proto.GetExtension(field.Options, gogoproto.E_Casttype)
if err == nil && v.(*string) != nil {
ctype := *(v.(*string))
packageName, typ = splitCPackageType(ctype)
return packageName, typ, nil
}
}
return "", "", err
}
func FileName(file *FileDescriptor) string {
fname := path.Base(file.FileDescriptorProto.GetName())
fname = strings.Replace(fname, ".proto", "", -1)
fname = strings.Replace(fname, "-", "_", -1)
fname = strings.Replace(fname, ".", "_", -1)
return CamelCase(fname)
}
func (g *Generator) AllFiles() *descriptor.FileDescriptorSet {
set := &descriptor.FileDescriptorSet{}
set.File = make([]*descriptor.FileDescriptorProto, len(g.allFiles))
for i := range g.allFiles {
set.File[i] = g.allFiles[i].FileDescriptorProto
}
return set
}
func (d *Descriptor) Path() string {
return d.path
}
func (g *Generator) useTypes() string {
pkg := strings.Map(badToUnderscore, "github.com/gogo/protobuf/types")
g.customImports = append(g.customImports, "github.com/gogo/protobuf/types")
return pkg
}
func (d *FileDescriptor) GoPackageName() string {
return string(d.packageName)
}
| disorganizer/brig | vendor/gx/ipfs/QmdxUuburamoF6zF9qjeQC4WYcWGbWuRmdLacMEsW8ioD8/gogo-protobuf/protoc-gen-gogo/generator/helper.go | GO | agpl-3.0 | 13,043 |
// Copyright (c) 2016 Mattermost, Inc. All Rights Reserved.
// See License.txt for license information.
package api
import (
"bufio"
"io"
"io/ioutil"
"net/http"
"os"
"strconv"
"strings"
"time"
l4g "github.com/alecthomas/log4go"
"github.com/gorilla/mux"
"github.com/mattermost/platform/einterfaces"
"github.com/mattermost/platform/model"
"github.com/mattermost/platform/store"
"github.com/mattermost/platform/utils"
"github.com/mssola/user_agent"
)
func InitAdmin() {
l4g.Debug(utils.T("api.admin.init.debug"))
BaseRoutes.Admin.Handle("/logs", ApiUserRequired(getLogs)).Methods("GET")
BaseRoutes.Admin.Handle("/audits", ApiUserRequired(getAllAudits)).Methods("GET")
BaseRoutes.Admin.Handle("/config", ApiUserRequired(getConfig)).Methods("GET")
BaseRoutes.Admin.Handle("/save_config", ApiUserRequired(saveConfig)).Methods("POST")
BaseRoutes.Admin.Handle("/reload_config", ApiUserRequired(reloadConfig)).Methods("GET")
BaseRoutes.Admin.Handle("/test_email", ApiUserRequired(testEmail)).Methods("POST")
BaseRoutes.Admin.Handle("/recycle_db_conn", ApiUserRequired(recycleDatabaseConnection)).Methods("GET")
BaseRoutes.Admin.Handle("/analytics/{id:[A-Za-z0-9]+}/{name:[A-Za-z0-9_]+}", ApiUserRequired(getAnalytics)).Methods("GET")
BaseRoutes.Admin.Handle("/analytics/{name:[A-Za-z0-9_]+}", ApiUserRequired(getAnalytics)).Methods("GET")
BaseRoutes.Admin.Handle("/save_compliance_report", ApiUserRequired(saveComplianceReport)).Methods("POST")
BaseRoutes.Admin.Handle("/compliance_reports", ApiUserRequired(getComplianceReports)).Methods("GET")
BaseRoutes.Admin.Handle("/download_compliance_report/{id:[A-Za-z0-9]+}", ApiUserRequiredTrustRequester(downloadComplianceReport)).Methods("GET")
BaseRoutes.Admin.Handle("/upload_brand_image", ApiAdminSystemRequired(uploadBrandImage)).Methods("POST")
BaseRoutes.Admin.Handle("/get_brand_image", ApiAppHandlerTrustRequester(getBrandImage)).Methods("GET")
BaseRoutes.Admin.Handle("/reset_mfa", ApiAdminSystemRequired(adminResetMfa)).Methods("POST")
BaseRoutes.Admin.Handle("/reset_password", ApiAdminSystemRequired(adminResetPassword)).Methods("POST")
BaseRoutes.Admin.Handle("/ldap_sync_now", ApiAdminSystemRequired(ldapSyncNow)).Methods("POST")
BaseRoutes.Admin.Handle("/saml_metadata", ApiAppHandler(samlMetadata)).Methods("GET")
BaseRoutes.Admin.Handle("/add_certificate", ApiAdminSystemRequired(addCertificate)).Methods("POST")
BaseRoutes.Admin.Handle("/remove_certificate", ApiAdminSystemRequired(removeCertificate)).Methods("POST")
BaseRoutes.Admin.Handle("/saml_cert_status", ApiAdminSystemRequired(samlCertificateStatus)).Methods("GET")
BaseRoutes.Admin.Handle("/cluster_status", ApiAdminSystemRequired(getClusterStatus)).Methods("GET")
}
func getLogs(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getLogs") {
return
}
lines, err := GetLogs()
if err != nil {
c.Err = err
return
}
if einterfaces.GetClusterInterface() != nil {
clines, err := einterfaces.GetClusterInterface().GetLogs()
if err != nil {
c.Err = err
return
}
lines = append(lines, clines...)
}
w.Write([]byte(model.ArrayToJson(lines)))
}
func GetLogs() ([]string, *model.AppError) {
var lines []string
if utils.Cfg.LogSettings.EnableFile {
file, err := os.Open(utils.GetLogFileLocation(utils.Cfg.LogSettings.FileLocation))
if err != nil {
return nil, model.NewLocAppError("getLogs", "api.admin.file_read_error", nil, err.Error())
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
} else {
lines = append(lines, "")
}
return lines, nil
}
func getClusterStatus(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getClusterStatus") {
return
}
infos := make([]*model.ClusterInfo, 0)
if einterfaces.GetClusterInterface() != nil {
infos = einterfaces.GetClusterInterface().GetClusterInfos()
}
w.Write([]byte(model.ClusterInfosToJson(infos)))
}
func getAllAudits(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getAllAudits") {
return
}
if result := <-Srv.Store.Audit().Get("", 200); result.Err != nil {
c.Err = result.Err
return
} else {
audits := result.Data.(model.Audits)
etag := audits.Etag()
if HandleEtag(etag, w, r) {
return
}
if len(etag) > 0 {
w.Header().Set(model.HEADER_ETAG_SERVER, etag)
}
w.Write([]byte(audits.ToJson()))
return
}
}
func getConfig(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getConfig") {
return
}
json := utils.Cfg.ToJson()
cfg := model.ConfigFromJson(strings.NewReader(json))
cfg.Sanitize()
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
w.Write([]byte(cfg.ToJson()))
}
func reloadConfig(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("reloadConfig") {
return
}
utils.LoadConfig(utils.CfgFileName)
// start/restart email batching job if necessary
InitEmailBatching()
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
ReturnStatusOK(w)
}
func saveConfig(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getConfig") {
return
}
cfg := model.ConfigFromJson(r.Body)
if cfg == nil {
c.SetInvalidParam("saveConfig", "config")
return
}
cfg.SetDefaults()
utils.Desanitize(cfg)
if err := cfg.IsValid(); err != nil {
c.Err = err
return
}
if err := utils.ValidateLdapFilter(cfg); err != nil {
c.Err = err
return
}
if *utils.Cfg.ClusterSettings.Enable {
c.Err = model.NewLocAppError("saveConfig", "ent.cluster.save_config.error", nil, "")
return
}
c.LogAudit("")
//oldCfg := utils.Cfg
utils.SaveConfig(utils.CfgFileName, cfg)
utils.LoadConfig(utils.CfgFileName)
// Future feature is to sync the configuration files
// if einterfaces.GetClusterInterface() != nil {
// err := einterfaces.GetClusterInterface().ConfigChanged(cfg, oldCfg, true)
// if err != nil {
// c.Err = err
// return
// }
// }
// start/restart email batching job if necessary
InitEmailBatching()
rdata := map[string]string{}
rdata["status"] = "OK"
w.Write([]byte(model.MapToJson(rdata)))
}
func recycleDatabaseConnection(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("recycleDatabaseConnection") {
return
}
oldStore := Srv.Store
l4g.Warn(utils.T("api.admin.recycle_db_start.warn"))
Srv.Store = store.NewSqlStore()
time.Sleep(20 * time.Second)
oldStore.Close()
l4g.Warn(utils.T("api.admin.recycle_db_end.warn"))
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
ReturnStatusOK(w)
}
func testEmail(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("testEmail") {
return
}
cfg := model.ConfigFromJson(r.Body)
if cfg == nil {
c.SetInvalidParam("testEmail", "config")
return
}
if len(cfg.EmailSettings.SMTPServer) == 0 {
c.Err = model.NewLocAppError("testEmail", "api.admin.test_email.missing_server", nil, utils.T("api.context.invalid_param.app_error", map[string]interface{}{"Name": "SMTPServer"}))
return
}
// if the user hasn't changed their email settings, fill in the actual SMTP password so that
// the user can verify an existing SMTP connection
if cfg.EmailSettings.SMTPPassword == model.FAKE_SETTING {
if cfg.EmailSettings.SMTPServer == utils.Cfg.EmailSettings.SMTPServer &&
cfg.EmailSettings.SMTPPort == utils.Cfg.EmailSettings.SMTPPort &&
cfg.EmailSettings.SMTPUsername == utils.Cfg.EmailSettings.SMTPUsername {
cfg.EmailSettings.SMTPPassword = utils.Cfg.EmailSettings.SMTPPassword
} else {
c.Err = model.NewLocAppError("testEmail", "api.admin.test_email.reenter_password", nil, "")
return
}
}
if result := <-Srv.Store.User().Get(c.Session.UserId); result.Err != nil {
c.Err = result.Err
return
} else {
if err := utils.SendMailUsingConfig(result.Data.(*model.User).Email, c.T("api.admin.test_email.subject"), c.T("api.admin.test_email.body"), cfg); err != nil {
c.Err = err
return
}
}
m := make(map[string]string)
m["SUCCESS"] = "true"
w.Write([]byte(model.MapToJson(m)))
}
func getComplianceReports(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getComplianceReports") {
return
}
if !*utils.Cfg.ComplianceSettings.Enable || !utils.IsLicensed || !*utils.License.Features.Compliance {
c.Err = model.NewLocAppError("getComplianceReports", "ent.compliance.licence_disable.app_error", nil, "")
return
}
if result := <-Srv.Store.Compliance().GetAll(); result.Err != nil {
c.Err = result.Err
return
} else {
crs := result.Data.(model.Compliances)
w.Write([]byte(crs.ToJson()))
}
}
func saveComplianceReport(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getComplianceReports") {
return
}
if !*utils.Cfg.ComplianceSettings.Enable || !utils.IsLicensed || !*utils.License.Features.Compliance || einterfaces.GetComplianceInterface() == nil {
c.Err = model.NewLocAppError("saveComplianceReport", "ent.compliance.licence_disable.app_error", nil, "")
return
}
job := model.ComplianceFromJson(r.Body)
if job == nil {
c.SetInvalidParam("saveComplianceReport", "compliance")
return
}
job.UserId = c.Session.UserId
job.Type = model.COMPLIANCE_TYPE_ADHOC
if result := <-Srv.Store.Compliance().Save(job); result.Err != nil {
c.Err = result.Err
return
} else {
job = result.Data.(*model.Compliance)
go einterfaces.GetComplianceInterface().RunComplianceJob(job)
}
w.Write([]byte(job.ToJson()))
}
func downloadComplianceReport(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("downloadComplianceReport") {
return
}
if !*utils.Cfg.ComplianceSettings.Enable || !utils.IsLicensed || !*utils.License.Features.Compliance || einterfaces.GetComplianceInterface() == nil {
c.Err = model.NewLocAppError("downloadComplianceReport", "ent.compliance.licence_disable.app_error", nil, "")
return
}
params := mux.Vars(r)
id := params["id"]
if len(id) != 26 {
c.SetInvalidParam("downloadComplianceReport", "id")
return
}
if result := <-Srv.Store.Compliance().Get(id); result.Err != nil {
c.Err = result.Err
return
} else {
job := result.Data.(*model.Compliance)
c.LogAudit("downloaded " + job.Desc)
if f, err := ioutil.ReadFile(*utils.Cfg.ComplianceSettings.Directory + "compliance/" + job.JobName() + ".zip"); err != nil {
c.Err = model.NewLocAppError("readFile", "api.file.read_file.reading_local.app_error", nil, err.Error())
return
} else {
w.Header().Set("Cache-Control", "max-age=2592000, public")
w.Header().Set("Content-Length", strconv.Itoa(len(f)))
w.Header().Del("Content-Type") // Content-Type will be set automatically by the http writer
// attach extra headers to trigger a download on IE, Edge, and Safari
ua := user_agent.New(r.UserAgent())
bname, _ := ua.Browser()
w.Header().Set("Content-Disposition", "attachment;filename=\""+job.JobName()+".zip\"")
if bname == "Edge" || bname == "Internet Explorer" || bname == "Safari" {
// trim off anything before the final / so we just get the file's name
w.Header().Set("Content-Type", "application/octet-stream")
}
w.Write(f)
}
}
}
func getAnalytics(c *Context, w http.ResponseWriter, r *http.Request) {
if !c.HasSystemAdminPermissions("getAnalytics") {
return
}
params := mux.Vars(r)
teamId := params["id"]
name := params["name"]
if name == "standard" {
var rows model.AnalyticsRows = make([]*model.AnalyticsRow, 5)
rows[0] = &model.AnalyticsRow{"channel_open_count", 0}
rows[1] = &model.AnalyticsRow{"channel_private_count", 0}
rows[2] = &model.AnalyticsRow{"post_count", 0}
rows[3] = &model.AnalyticsRow{"unique_user_count", 0}
rows[4] = &model.AnalyticsRow{"team_count", 0}
openChan := Srv.Store.Channel().AnalyticsTypeCount(teamId, model.CHANNEL_OPEN)
privateChan := Srv.Store.Channel().AnalyticsTypeCount(teamId, model.CHANNEL_PRIVATE)
postChan := Srv.Store.Post().AnalyticsPostCount(teamId, false, false)
userChan := Srv.Store.User().AnalyticsUniqueUserCount(teamId)
teamChan := Srv.Store.Team().AnalyticsTeamCount()
if r := <-openChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[0].Value = float64(r.Data.(int64))
}
if r := <-privateChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[1].Value = float64(r.Data.(int64))
}
if r := <-postChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[2].Value = float64(r.Data.(int64))
}
if r := <-userChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[3].Value = float64(r.Data.(int64))
}
if r := <-teamChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[4].Value = float64(r.Data.(int64))
}
w.Write([]byte(rows.ToJson()))
} else if name == "post_counts_day" {
if r := <-Srv.Store.Post().AnalyticsPostCountsByDay(teamId); r.Err != nil {
c.Err = r.Err
return
} else {
w.Write([]byte(r.Data.(model.AnalyticsRows).ToJson()))
}
} else if name == "user_counts_with_posts_day" {
if r := <-Srv.Store.Post().AnalyticsUserCountsWithPostsByDay(teamId); r.Err != nil {
c.Err = r.Err
return
} else {
w.Write([]byte(r.Data.(model.AnalyticsRows).ToJson()))
}
} else if name == "extra_counts" {
var rows model.AnalyticsRows = make([]*model.AnalyticsRow, 6)
rows[0] = &model.AnalyticsRow{"file_post_count", 0}
rows[1] = &model.AnalyticsRow{"hashtag_post_count", 0}
rows[2] = &model.AnalyticsRow{"incoming_webhook_count", 0}
rows[3] = &model.AnalyticsRow{"outgoing_webhook_count", 0}
rows[4] = &model.AnalyticsRow{"command_count", 0}
rows[5] = &model.AnalyticsRow{"session_count", 0}
fileChan := Srv.Store.Post().AnalyticsPostCount(teamId, true, false)
hashtagChan := Srv.Store.Post().AnalyticsPostCount(teamId, false, true)
iHookChan := Srv.Store.Webhook().AnalyticsIncomingCount(teamId)
oHookChan := Srv.Store.Webhook().AnalyticsOutgoingCount(teamId)
commandChan := Srv.Store.Command().AnalyticsCommandCount(teamId)
sessionChan := Srv.Store.Session().AnalyticsSessionCount()
if r := <-fileChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[0].Value = float64(r.Data.(int64))
}
if r := <-hashtagChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[1].Value = float64(r.Data.(int64))
}
if r := <-iHookChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[2].Value = float64(r.Data.(int64))
}
if r := <-oHookChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[3].Value = float64(r.Data.(int64))
}
if r := <-commandChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[4].Value = float64(r.Data.(int64))
}
if r := <-sessionChan; r.Err != nil {
c.Err = r.Err
return
} else {
rows[5].Value = float64(r.Data.(int64))
}
w.Write([]byte(rows.ToJson()))
} else {
c.SetInvalidParam("getAnalytics", "name")
}
}
func uploadBrandImage(c *Context, w http.ResponseWriter, r *http.Request) {
if len(utils.Cfg.FileSettings.DriverName) == 0 {
c.Err = model.NewLocAppError("uploadBrandImage", "api.admin.upload_brand_image.storage.app_error", nil, "")
c.Err.StatusCode = http.StatusNotImplemented
return
}
if r.ContentLength > *utils.Cfg.FileSettings.MaxFileSize {
c.Err = model.NewLocAppError("uploadBrandImage", "api.admin.upload_brand_image.too_large.app_error", nil, "")
c.Err.StatusCode = http.StatusRequestEntityTooLarge
return
}
if err := r.ParseMultipartForm(*utils.Cfg.FileSettings.MaxFileSize); err != nil {
c.Err = model.NewLocAppError("uploadBrandImage", "api.admin.upload_brand_image.parse.app_error", nil, "")
return
}
m := r.MultipartForm
imageArray, ok := m.File["image"]
if !ok {
c.Err = model.NewLocAppError("uploadBrandImage", "api.admin.upload_brand_image.no_file.app_error", nil, "")
c.Err.StatusCode = http.StatusBadRequest
return
}
if len(imageArray) <= 0 {
c.Err = model.NewLocAppError("uploadBrandImage", "api.admin.upload_brand_image.array.app_error", nil, "")
c.Err.StatusCode = http.StatusBadRequest
return
}
brandInterface := einterfaces.GetBrandInterface()
if brandInterface == nil {
c.Err = model.NewLocAppError("uploadBrandImage", "api.admin.upload_brand_image.not_available.app_error", nil, "")
c.Err.StatusCode = http.StatusNotImplemented
return
}
if err := brandInterface.SaveBrandImage(imageArray[0]); err != nil {
c.Err = err
return
}
c.LogAudit("")
rdata := map[string]string{}
rdata["status"] = "OK"
w.Write([]byte(model.MapToJson(rdata)))
}
func getBrandImage(c *Context, w http.ResponseWriter, r *http.Request) {
if len(utils.Cfg.FileSettings.DriverName) == 0 {
c.Err = model.NewLocAppError("getBrandImage", "api.admin.get_brand_image.storage.app_error", nil, "")
c.Err.StatusCode = http.StatusNotImplemented
return
}
brandInterface := einterfaces.GetBrandInterface()
if brandInterface == nil {
c.Err = model.NewLocAppError("getBrandImage", "api.admin.get_brand_image.not_available.app_error", nil, "")
c.Err.StatusCode = http.StatusNotImplemented
return
}
if img, err := brandInterface.GetBrandImage(); err != nil {
w.Write(nil)
} else {
w.Header().Set("Content-Type", "image/png")
w.Write(img)
}
}
func adminResetMfa(c *Context, w http.ResponseWriter, r *http.Request) {
props := model.MapFromJson(r.Body)
userId := props["user_id"]
if len(userId) != 26 {
c.SetInvalidParam("adminResetMfa", "user_id")
return
}
if err := DeactivateMfa(userId); err != nil {
c.Err = err
return
}
c.LogAudit("")
rdata := map[string]string{}
rdata["status"] = "ok"
w.Write([]byte(model.MapToJson(rdata)))
}
func adminResetPassword(c *Context, w http.ResponseWriter, r *http.Request) {
props := model.MapFromJson(r.Body)
userId := props["user_id"]
if len(userId) != 26 {
c.SetInvalidParam("adminResetPassword", "user_id")
return
}
newPassword := props["new_password"]
if err := utils.IsPasswordValid(newPassword); err != nil {
c.Err = err
return
}
if err := ResetPassword(c, userId, newPassword); err != nil {
c.Err = err
return
}
c.LogAudit("")
rdata := map[string]string{}
rdata["status"] = "ok"
w.Write([]byte(model.MapToJson(rdata)))
}
func ldapSyncNow(c *Context, w http.ResponseWriter, r *http.Request) {
go func() {
if utils.IsLicensed && *utils.License.Features.LDAP && *utils.Cfg.LdapSettings.Enable {
if ldapI := einterfaces.GetLdapInterface(); ldapI != nil {
ldapI.SyncNow()
} else {
l4g.Error("%v", model.NewLocAppError("saveComplianceReport", "ent.compliance.licence_disable.app_error", nil, "").Error())
}
}
}()
rdata := map[string]string{}
rdata["status"] = "ok"
w.Write([]byte(model.MapToJson(rdata)))
}
func samlMetadata(c *Context, w http.ResponseWriter, r *http.Request) {
samlInterface := einterfaces.GetSamlInterface()
if samlInterface == nil {
c.Err = model.NewLocAppError("loginWithSaml", "api.admin.saml.not_available.app_error", nil, "")
c.Err.StatusCode = http.StatusFound
return
}
if result, err := samlInterface.GetMetadata(); err != nil {
c.Err = model.NewLocAppError("loginWithSaml", "api.admin.saml.metadata.app_error", nil, "err="+err.Message)
return
} else {
w.Header().Set("Content-Type", "application/xml")
w.Header().Set("Content-Disposition", "attachment; filename=\"metadata.xml\"")
w.Write([]byte(result))
}
}
func addCertificate(c *Context, w http.ResponseWriter, r *http.Request) {
err := r.ParseMultipartForm(*utils.Cfg.FileSettings.MaxFileSize)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
m := r.MultipartForm
fileArray, ok := m.File["certificate"]
if !ok {
c.Err = model.NewLocAppError("addCertificate", "api.admin.add_certificate.no_file.app_error", nil, "")
c.Err.StatusCode = http.StatusBadRequest
return
}
if len(fileArray) <= 0 {
c.Err = model.NewLocAppError("addCertificate", "api.admin.add_certificate.array.app_error", nil, "")
c.Err.StatusCode = http.StatusBadRequest
return
}
fileData := fileArray[0]
file, err := fileData.Open()
defer file.Close()
if err != nil {
c.Err = model.NewLocAppError("addCertificate", "api.admin.add_certificate.open.app_error", nil, err.Error())
return
}
out, err := os.Create(utils.FindDir("config") + fileData.Filename)
if err != nil {
c.Err = model.NewLocAppError("addCertificate", "api.admin.add_certificate.saving.app_error", nil, err.Error())
return
}
defer out.Close()
io.Copy(out, file)
ReturnStatusOK(w)
}
func removeCertificate(c *Context, w http.ResponseWriter, r *http.Request) {
props := model.MapFromJson(r.Body)
filename := props["filename"]
if err := os.Remove(utils.FindConfigFile(filename)); err != nil {
c.Err = model.NewLocAppError("removeCertificate", "api.admin.remove_certificate.delete.app_error",
map[string]interface{}{"Filename": filename}, err.Error())
return
}
ReturnStatusOK(w)
}
func samlCertificateStatus(c *Context, w http.ResponseWriter, r *http.Request) {
status := make(map[string]interface{})
status["IdpCertificateFile"] = utils.FileExistsInConfigFolder(*utils.Cfg.SamlSettings.IdpCertificateFile)
status["PrivateKeyFile"] = utils.FileExistsInConfigFolder(*utils.Cfg.SamlSettings.PrivateKeyFile)
status["PublicCertificateFile"] = utils.FileExistsInConfigFolder(*utils.Cfg.SamlSettings.PublicCertificateFile)
w.Write([]byte(model.StringInterfaceToJson(status)))
}
| daizenberg/platform | api/admin.go | GO | agpl-3.0 | 21,623 |
// (c) Copyright Fernando Luis Cacciola Carballal 2000-2004
// Use, modification, and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See library home page at http://www.boost.org/libs/numeric/conversion
//
// Contact the author at: fernando_cacciola@hotmail.com
//
//
// Revision History
//
// 19 Nov 2001 Syntatic changes as suggested by Darin Adler (Fernando Cacciola)
// 08 Nov 2001 Fixes to accommodate MSVC (Fernando Cacciola)
// 04 Nov 2001 Fixes to accommodate gcc2.92 (Fernando Cacciola)
// 30 Oct 2001 Some fixes suggested by Daryle Walker (Fernando Cacciola)
// 25 Oct 2001 Initial boostification (Fernando Cacciola)
// 23 Jan 2004 Inital add to cvs (post review)s
// 22 Jun 2011 Added support for specializing cast policies via numeric_cast_traits (Brandon Kohn).
//
#ifndef BOOST_NUMERIC_CONVERSION_CAST_25OCT2001_HPP
#define BOOST_NUMERIC_CONVERSION_CAST_25OCT2001_HPP
#include <boost/detail/workaround.hpp>
#if BOOST_WORKAROUND(BOOST_MSVC, < 1300) || BOOST_WORKAROUND(__BORLANDC__, BOOST_TESTED_AT(0x582))
# include<boost/numeric/conversion/detail/old_numeric_cast.hpp>
#else
#include <boost/type.hpp>
#include <boost/numeric/conversion/converter.hpp>
#include <boost/numeric/conversion/numeric_cast_traits.hpp>
namespace abt_boost{} namespace boost = abt_boost; namespace abt_boost{
template <typename Target, typename Source>
inline Target numeric_cast( Source arg )
{
typedef numeric::conversion_traits<Target, Source> conv_traits;
typedef numeric::numeric_cast_traits<Target, Source> cast_traits;
typedef abt_boost::numeric::converter
<
Target,
Source,
conv_traits,
typename cast_traits::overflow_policy,
typename cast_traits::rounding_policy,
abt_boost::numeric::raw_converter< conv_traits >,
typename cast_traits::range_checking_policy
> converter;
return converter::convert(arg);
}
using numeric::bad_numeric_cast;
} // namespace abt_boost
#endif
#endif
| jbruestle/aggregate_btree | tiny_boost/boost/numeric/conversion/cast.hpp | C++ | agpl-3.0 | 2,229 |
/*
Copyright 2011-2012 Frederic Menou and others referred in AUTHORS file.
This file is part of Magrit.
Magrit is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Magrit is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with Magrit.
If not, see <http://www.gnu.org/licenses/>.
*/
package org.kercoin.magrit.sshd;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import org.apache.sshd.SshServer;
import org.apache.sshd.common.NamedFactory;
import org.apache.sshd.common.util.SecurityUtils;
import org.apache.sshd.server.CommandFactory;
import org.apache.sshd.server.ForwardingFilter;
import org.apache.sshd.server.PublickeyAuthenticator;
import org.apache.sshd.server.UserAuth;
import org.apache.sshd.server.auth.UserAuthNone;
import org.apache.sshd.server.auth.UserAuthPublicKey;
import org.apache.sshd.server.keyprovider.PEMGeneratorHostKeyProvider;
import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider;
import org.apache.sshd.server.session.ServerSession;
import org.kercoin.magrit.core.Configuration;
import org.kercoin.magrit.core.Context;
import org.kercoin.magrit.core.Configuration.Authentication;
import org.kercoin.magrit.core.services.Service;
import org.kercoin.magrit.core.services.ServiceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
@Singleton
public class Server implements Service.UseTCP {
protected final Logger log = LoggerFactory.getLogger(getClass());
private SshServer sshd;
private final int port;
@Inject
public Server(final Context ctx, CommandFactory factory) {
port = ctx.configuration().getSshPort();
sshd = SshServer.setUpDefaultServer();
if (SecurityUtils.isBouncyCastleRegistered()) {
sshd.setKeyPairProvider(new PEMGeneratorHostKeyProvider("key.pem"));
} else {
sshd.setKeyPairProvider(new SimpleGeneratorHostKeyProvider("key.ser"));
}
PublickeyAuthenticator auth = null;
if (ctx.configuration().getAuthentication() == Configuration.Authentication.SSH_PUBLIC_KEYS) {
auth = ctx.getInjector().getInstance(PublickeyAuthenticator.class);
}
setupUserAuth(auth);
sshd.setCommandFactory(factory);
if (!ctx.configuration().isRemoteAllowed()) {
sshd.setSessionFactory(new LocalOnlySessionFactory());
}
sshd.setForwardingFilter(new ForwardingFilter() {
public boolean canForwardAgent(ServerSession session) {
return false;
}
public boolean canForwardX11(ServerSession session) {
return false;
}
public boolean canListen(InetSocketAddress address, ServerSession session) {
return false;
}
public boolean canConnect(InetSocketAddress address, ServerSession session) {
return false;
}
});
}
private void setupUserAuth(PublickeyAuthenticator auth) {
List<NamedFactory<UserAuth>> list = new ArrayList<NamedFactory<UserAuth>>();
if (auth != null) {
list.add(new UserAuthPublicKey.Factory());
sshd.setPublickeyAuthenticator(auth);
} else {
list.add(new UserAuthNone.Factory());
}
sshd.setUserAuthFactories(list);
}
@Override
public void start() throws ServiceException {
sshd.setPort(port);
try {
sshd.start();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public String getName() {
return "SSH Service";
}
@Override
public int getTCPPort() {
return port;
}
@Override
public void logConfig(ConfigurationLogger log, Configuration cfg) {
log.logKey("SSHd", cfg.getSshPort());
log.logKey("Listening", cfg.isRemoteAllowed() ? "everybody" : "localhost");
log.logKey("Authent", cfg.getAuthentication().external());
if (cfg.getAuthentication() == Authentication.SSH_PUBLIC_KEYS) {
log.logSubKey("Keys dir", cfg.getPublickeyRepositoryDir());
}
log.logKey("Home dir", cfg.getRepositoriesHomeDir());
log.logKey("Work dir", cfg.getWorkHomeDir());
}
}
| ptitfred/magrit | server/sshd/src/main/java/org/kercoin/magrit/sshd/Server.java | Java | agpl-3.0 | 4,603 |
/**
* BLOCK: blocks
*
* Registering a basic block with Gutenberg.
* Simple block, renders and saves the same content without any interactivity.
*/
import "./editor.scss";
import "./style.scss";
import React from "react";
import Select from "react-select";
const {
PanelBody,
PanelRow,
ServerSideRender,
TextControl,
SelectControl
} = wp.components;
var el = wp.element.createElement;
const { InspectorControls } = wp.editor;
const { __ } = wp.i18n; // Import __() from wp.i18n
const { registerBlockType } = wp.blocks; // Import registerBlockType() from wp.blocks
/**
* Register: aa Gutenberg Block.
*
* Registers a new block provided a unique name and an object defining its
* behavior. Once registered, the block is made editor as an option to any
* editor interface where blocks are implemented.
*
* @link https://wordpress.org/gutenberg/handbook/block-api/
* @param {string} name Block name.
* @param {Object} settings Block settings.
* @return {?WPBlock} The block, if it has been successfully
* registered; otherwise `undefined`.
*/
registerBlockType("bos/badgeos-evidence-block", {
// Block name. Block names must be string that contains a namespace prefix. Example: my-plugin/my-custom-block.
title: __("Evidence - block"), // Block title.
icon: "shield", // Block icon from Dashicons → https://developer.wordpress.org/resource/dashicons/.
category: "badgeos-blocks", // Block category — Group blocks together based on common traits E.g. common, formatting, layout widgets, embed.
keywords: [
__("Evidence - block"),
__("block"),
__("Evidence")
],
supports: {
// Turn off ability to edit HTML of block content
html: false,
// Turn off reusable block feature
reusable: false,
// Add alignwide and alignfull options
align: false
},
attributes: {
achievement: {
type: "string",
default: ""
},
user_id: {
type: "string",
default: ""
},
award_id: {
type: "string",
default: ""
},
},
/**
* The edit function describes the structure of your block in the context of the editor.
* This represents what the editor will render when the block is used.
*
* The "edit" property must be a valid function.
*
* @link https://wordpress.org/gutenberg/handbook/block-api/block-edit-save/
*
* @param {Object} props Props.
* @returns {Mixed} JSX Component.
*/
edit: props => {
const {
attributes: {
achievement,
user_id,
award_id,
},
setAttributes
} = props;
let achievements_list = [];
let entries = [];
let user_lists = [];
wp.apiFetch({ path: `badgeos/block-achievements-award-list/0/0`, method: 'GET' }).then(posts =>
posts.map(function (post) {
console.log(post);
entries.push(post);
})
);
wp.apiFetch({ path: "badgeos/achievements" }).then(posts =>
posts.map(function (post) {
achievements_list.push(post);
})
);
wp.apiFetch({ path: "badgeos/user-lists" }).then(posts =>
posts.map(function (post) {
user_lists.push(post);
})
);
let selectedAwardId = [];
if (null !== award_id && award_id != "") {
selectedAwardId = JSON.parse(award_id);
}
function handleAwardChange(award_id) {
props.setAttributes({ award_id: JSON.stringify(award_id) });
}
function loadawardids() {
entries = [];
var achievement_val = 0;
if (achievement) {
var achievement_array = JSON.parse(achievement)
achievement_val = achievement_array.value;
}
var user_id_val = 0;
if (user_id) {
var user_array = JSON.parse(user_id);
user_id_val = user_array.value;
}
wp.apiFetch({ path: "badgeos/block-achievements-award-list/" + achievement_val + "/" + user_id_val + "", method: 'GET' }).then(posts =>
posts.map(function (post) {
entries.push(post);
})
);
}
let selectedUser = [];
if (null !== user_id && user_id != "") {
selectedUser = JSON.parse(user_id);
}
function handleUserChange(user_id) {
props.setAttributes({ user_id: JSON.stringify(user_id) });
loadawardids();
}
let selectedAchievement = [];
if (null !== achievement && achievement != "") {
selectedAchievement = JSON.parse(achievement);
}
function handleAchievementChange(achievement_val) {
props.setAttributes({
achievement: JSON.stringify(achievement_val)
});
loadawardids();
}
// Creates a <p class='wp-block-bos-block-blocks'></p>.
return [
el("div", {
className: "badgeos-editor-container",
style: { textAlign: "center" }
},
el(ServerSideRender, {
block: 'bos/badgeos-evidence-block',
attributes: props.attributes
})
),
<InspectorControls>
<PanelBody
title={__("Achievement", "badgeos")}
className="bos-block-inspector"
>
<PanelRow>
<label
htmlFor="bos-block-roles"
className="bos-block-inspector__label"
>
{__("Achievement", "badgeos")}
</label>
</PanelRow>
<PanelRow>
<Select
className="bos-block-inspector__control"
name="bos-achievement-types"
value={selectedAchievement}
onChange={handleAchievementChange}
options={achievements_list}
menuPlacement="auto"
/>
</PanelRow>
<PanelRow>
<label
htmlFor="bos-block-roles"
className="bos-block-inspector__label"
>
{__("User", "badgeos")}
</label>
</PanelRow>
<PanelRow>
<Select
className="bos-block-inspector__control"
name="bos-achievement-types"
value={selectedUser}
onChange={handleUserChange}
options={user_lists}
menuPlacement="auto"
/>
</PanelRow>
<PanelRow>
<label
htmlFor="bos-block-roles"
className="bos-block-inspector__label"
>
{__("Award Id", "badgeos")}
</label>
</PanelRow>
<PanelRow>
<Select
className="bos-block-inspector__control"
name="bos-achievement-types"
value={selectedAwardId}
onChange={handleAwardChange}
options={entries}
menuPlacement="auto"
/>
</PanelRow>
</PanelBody>
</InspectorControls>
];
},
/**
* The save function defines the way in which the different attributes should be combined
* into the final markup, which is then serialized by Gutenberg into post_content.
*
* The "save" property must be specified and must be a valid function.
*
* @link https://wordpress.org/gutenberg/handbook/block-api/block-edit-save/
*
* @param {Object} props Props.
* @returns {Mixed} JSX Frontend HTML.
*/
save: props => {
return <div>Content</div>;
}
});
| opencredit/badgeos | includes/blocks/src/evidence-block/block.js | JavaScript | agpl-3.0 | 6,640 |
<?php
/**
* @package WordPress
* @subpackage Fast_Blog_Theme
* @since Fast Blog 1.0
*/
?>
<!-- Sidebar -->
<ul id="sidebar" class="<?php fastblog_option('sidebar'); ?>">
<?php dynamic_sidebar('sidebar'); ?>
</ul>
<!-- // Sidebar --> | iwxfer/wordpress | wp-content/themes/fastblog/sidebar.php | PHP | agpl-3.0 | 238 |
# -*- coding: utf-8 -*-
from openerp import models, fields
class AccountBankStatementLine(models.Model):
_inherit = "account.bank.statement.line"
name = fields.Char(
string='Memo',
required=False,
default="",
)
| houssine78/addons | account_bank_statement_line_memo/models/models.py | Python | agpl-3.0 | 251 |
# -*- coding: utf-8 -*-
# Etalage -- Open Data POIs portal
# By: Emmanuel Raviart <eraviart@easter-eggs.com>
#
# Copyright (C) 2011, 2012 Easter-eggs
# http://gitorious.org/infos-pratiques/etalage
#
# This file is part of Etalage.
#
# Etalage is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Etalage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Context loaded and saved in WSGI requests"""
import gettext
import webob
from . import conf
__all__ = ['Ctx', 'null_ctx']
class Ctx(object):
_parent = None
default_values = dict(
_lang = None,
_scopes = UnboundLocalError,
_translator = None,
base_categories_slug = None,
category_tags_slug = None,
container_base_url = None,
distance = None, # Max distance in km
gadget_id = None,
hide_directory = False,
req = None,
subscriber = None,
)
env_keys = ('_lang', '_scopes', '_translator')
def __init__(self, req = None):
if req is not None:
self.req = req
etalage_env = req.environ.get('etalage', {})
for key in object.__getattribute__(self, 'env_keys'):
value = etalage_env.get(key)
if value is not None:
setattr(self, key, value)
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
parent = object.__getattribute__(self, '_parent')
if parent is None:
default_values = object.__getattribute__(self, 'default_values')
if name in default_values:
return default_values[name]
raise
return getattr(parent, name)
@property
def _(self):
return self.translator.ugettext
def blank_req(self, path, environ = None, base_url = None, headers = None, POST = None, **kw):
env = environ.copy() if environ else {}
etalage_env = env.setdefault('etalage', {})
for key in self.env_keys:
value = getattr(self, key)
if value is not None:
etalage_env[key] = value
return webob.Request.blank(path, environ = env, base_url = base_url, headers = headers, POST = POST, **kw)
def get_containing(self, name, depth = 0):
"""Return the n-th (n = ``depth``) context containing attribute named ``name``."""
ctx_dict = object.__getattribute__(self, '__dict__')
if name in ctx_dict:
if depth <= 0:
return self
depth -= 1
parent = ctx_dict.get('_parent')
if parent is None:
return None
return parent.get_containing(name, depth = depth)
def get_inherited(self, name, default = UnboundLocalError, depth = 1):
ctx = self.get_containing(name, depth = depth)
if ctx is None:
if default is UnboundLocalError:
raise AttributeError('Attribute %s not found in %s' % (name, self))
return default
return object.__getattribute__(ctx, name)
def iter(self):
yield self
parent = object.__getattribute__(self, '_parent')
if parent is not None:
for ancestor in parent.iter():
yield ancestor
def iter_containing(self, name):
ctx_dict = object.__getattribute__(self, '__dict__')
if name in ctx_dict:
yield self
parent = ctx_dict.get('_parent')
if parent is not None:
for ancestor in parent.iter_containing(name):
yield ancestor
def iter_inherited(self, name):
for ctx in self.iter_containing(name):
yield object.__getattribute__(ctx, name)
def lang_del(self):
del self._lang
if self.req is not None and self.req.environ.get('etalage') is not None \
and '_lang' in self.req.environ['etalage']:
del self.req.environ['etalage']['_lang']
def lang_get(self):
if self._lang is None:
# self._lang = self.req.accept_language.best_matches('en-US') if self.req is not None else []
# Note: Don't forget to add country-less language code when only a "language-COUNTRY" code is given.
self._lang = ['fr-FR', 'fr']
if self.req is not None:
self.req.environ.setdefault('etalage', {})['_lang'] = self._lang
return self._lang
def lang_set(self, lang):
self._lang = lang
if self.req is not None:
self.req.environ.setdefault('etalage', {})['_lang'] = self._lang
# Reinitialize translator for new languages.
if self._translator is not None:
# Don't del self._translator, because attribute _translator can be defined in a parent.
self._translator = None
if self.req is not None and self.req.environ.get('etalage') is not None \
and '_translator' in self.req.environ['etalage']:
del self.req.environ['etalage']['_translator']
lang = property(lang_get, lang_set, lang_del)
def new(self, **kwargs):
ctx = Ctx()
ctx._parent = self
for name, value in kwargs.iteritems():
setattr(ctx, name, value)
return ctx
@property
def parent(self):
return object.__getattribute__(self, '_parent')
def scopes_del(self):
del self._scopes
if self.req is not None and self.req.environ.get('wenoit_etalage') is not None \
and '_scopes' in self.req.environ['wenoit_etalage']:
del self.req.environ['wenoit_etalage']['_scopes']
def scopes_get(self):
return self._scopes
def scopes_set(self, scopes):
self._scopes = scopes
if self.req is not None:
self.req.environ.setdefault('wenoit_etalage', {})['_scopes'] = scopes
scopes = property(scopes_get, scopes_set, scopes_del)
@property
def session(self):
return self.req.environ.get('beaker.session') if self.req is not None else None
@property
def translator(self):
"""Get a valid translator object from one or several languages names."""
if self._translator is None:
languages = self.lang
if not languages:
return gettext.NullTranslations()
if not isinstance(languages, list):
languages = [languages]
translator = gettext.NullTranslations()
i18n_dir_by_plugin_name = conf['i18n_dir_by_plugin_name'] or {}
for name, i18n_dir in [
('biryani', conf['biryani_i18n_dir']),
(conf['package_name'], conf['i18n_dir']),
] + sorted(i18n_dir_by_plugin_name.iteritems()):
if name is not None and i18n_dir is not None:
translator = new_translator(name, i18n_dir, languages, fallback = translator)
self._translator = translator
return self._translator
null_ctx = Ctx()
null_ctx.lang = ['fr-FR', 'fr']
def new_translator(domain, localedir, languages, fallback = None):
new = gettext.translation(domain, localedir, fallback = True, languages = languages)
if fallback is not None:
new.add_fallback(fallback)
return new
| Gentux/etalage | etalage/contexts.py | Python | agpl-3.0 | 7,875 |
<?php
/**
* StatusNet, the distributed open-source microblogging tool
*
* Subscribe to a peopletag
*
* PHP version 5
*
* LICENCE: This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* @category Peopletag
* @package StatusNet
* @author Shashi Gowda <connect2shashi@gmail.com>
* @license http://www.fsf.org/licensing/licenses/agpl-3.0.html GNU Affero General Public License version 3.0
* @link http://status.net/
*/
if (!defined('STATUSNET') && !defined('LACONICA')) {
exit(1);
}
/**
* Subscribe to a peopletag
*
* This is the action for subscribing to a peopletag. It works more or less like the join action
* for groups.
*
* @category Peopletag
* @package StatusNet
* @author Shashi Gowda <connect2shashi@gmail.com>
* @license http://www.fsf.org/licensing/licenses/agpl-3.0.html GNU Affero General Public License version 3.0
* @link http://status.net/
*/
class SubscribepeopletagAction extends Action
{
var $peopletag = null;
var $tagger = null;
/**
* Prepare to run
*/
function prepare($args)
{
parent::prepare($args);
if (!common_logged_in()) {
// TRANS: Client error displayed when trying to perform an action while not logged in.
$this->clientError(_('You must be logged in to unsubscribe from a list.'));
return false;
}
// Only allow POST requests
if ($_SERVER['REQUEST_METHOD'] != 'POST') {
// TRANS: Client error displayed when trying to use another method than POST.
$this->clientError(_('This action only accepts POST requests.'));
return false;
}
// CSRF protection
$token = $this->trimmed('token');
if (!$token || $token != common_session_token()) {
// TRANS: Client error displayed when the session token does not match or is not given.
$this->clientError(_('There was a problem with your session token.'.
' Try again, please.'));
return false;
}
$tagger_arg = $this->trimmed('tagger');
$tag_arg = $this->trimmed('tag');
$id = intval($this->arg('id'));
if ($id) {
$this->peopletag = Profile_list::getKV('id', $id);
} else {
// TRANS: Client error displayed when trying to perform an action without providing an ID.
$this->clientError(_('No ID given.'), 404);
return false;
}
if (!$this->peopletag || $this->peopletag->private) {
// TRANS: Client error displayed trying to reference a non-existing list.
$this->clientError(_('No such list.'), 404);
return false;
}
$this->tagger = Profile::getKV('id', $this->peopletag->tagger);
return true;
}
/**
* Handle the request
*
* On POST, add the current user to the group
*
* @param array $args unused
*
* @return void
*/
function handle($args)
{
parent::handle($args);
$cur = common_current_user();
try {
Profile_tag_subscription::add($this->peopletag, $cur);
} catch (Exception $e) {
// TRANS: Server error displayed subscribing to a list fails.
// TRANS: %1$s is a user nickname, %2$s is a list, %3$s is the error message (no period).
$this->serverError(sprintf(_('Could not subscribe user %1$s to list %2$s: %3$s'),
$cur->nickname, $this->peopletag->tag), $e->getMessage());
}
if ($this->boolean('ajax')) {
$this->startHTML('text/xml;charset=utf-8');
$this->elementStart('head');
// TRANS: Title of form to subscribe to a list.
// TRANS: %1%s is a user nickname, %2$s is a list, %3$s is a tagger nickname.
$this->element('title', null, sprintf(_('%1$s subscribed to list %2$s by %3$s'),
$cur->nickname,
$this->peopletag->tag,
$this->tagger->nickname));
$this->elementEnd('head');
$this->elementStart('body');
$lf = new UnsubscribePeopletagForm($this, $this->peopletag);
$lf->show();
$this->elementEnd('body');
$this->endHTML();
} else {
common_redirect(common_local_url('peopletagsubscribers',
array('tagger' => $this->tagger->nickname,
'tag' =>$this->peopletag->tag)),
303);
}
}
}
| ZealIndustries/white-glint | actions/subscribepeopletag.php | PHP | agpl-3.0 | 5,342 |
/*
* Copyright (C) 2013 OpenJST Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openjst.protocols.basic.encoder;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.handler.codec.oneone.OneToOneEncoder;
import org.openjst.commons.io.buffer.DataBufferException;
import org.openjst.commons.security.checksum.CRC16;
import org.openjst.protocols.basic.constants.ProtocolBasicConstants;
import org.openjst.protocols.basic.pdu.PDU;
public class ProtocolEncoder extends OneToOneEncoder {
public static final byte[] RESERVED = new byte[]{0, 0, 0, 0, 0};
@Override
protected Object encode(final ChannelHandlerContext ctx, final Channel channel, final Object msg) throws Exception {
if (msg instanceof PDU) {
return encodePacket((PDU) msg);
} else {
return msg;
}
}
public static ChannelBuffer encodePacket(final PDU packet) throws DataBufferException {
final byte[] msgBody = packet.encode();
final ChannelBuffer buffer = ChannelBuffers.buffer(16 + msgBody.length);
buffer.writeByte(ProtocolBasicConstants.VERSION);
buffer.writeShort(0);
buffer.writeShort(packet.getType());
buffer.writeInt(msgBody.length);
buffer.writeBytes(RESERVED);
buffer.writeShort(CRC16.checksum(msgBody));
if (msgBody.length > 0) {
buffer.writeBytes(msgBody);
}
return buffer;
}
}
| devmix/openjst | protocol/basic/commons/src/main/java/org/openjst/protocols/basic/encoder/ProtocolEncoder.java | Java | agpl-3.0 | 2,229 |
package com.thegame.server.presentation.exceptions;
import com.thegame.server.common.exceptions.TypifiedException;
/**
* @author e103880
*/
public class PresentationException extends TypifiedException{
private final PresentationExceptionType exceptionType;
private final Object[] arguments;
public PresentationException(final PresentationExceptionType _exceptionType){
this(_exceptionType,new Object[]{});
}
public PresentationException(final PresentationExceptionType _exceptionType,final Object... _arguments){
super(_exceptionType.getDescription());
this.exceptionType=_exceptionType;
this.arguments=_arguments;
}
public PresentationException(final Throwable _cause,final PresentationExceptionType _exceptionType){
this(_cause,_exceptionType,new Object[]{});
}
public PresentationException(final Throwable _cause,final PresentationExceptionType _exceptionType,final Object... _arguments){
super(_exceptionType.getDescription(),_cause);
this.exceptionType=_exceptionType;
this.arguments=_arguments;
}
@Override
public PresentationExceptionType getExceptionType(){
return this.exceptionType;
}
@Override
public Object[] getArguments() {
return arguments;
}
@Override
public String getMessage() {
return getProcessedMessage();
}
}
| bernatmv/thegame | server/server-presentation/src/main/java/com/thegame/server/presentation/exceptions/PresentationException.java | Java | agpl-3.0 | 1,333 |
/**
* @ngdoc service
* @name ftepApp.SubscriptionService
* @description
* # SubscriptionService
* Service for subscriptions.
*/
'use strict';
define(['../ftepmodules', 'traversonHal'], function (ftepmodules, TraversonJsonHalAdapter) {
ftepmodules.service('SubscriptionService', [ 'ftepProperties', '$q', 'traverson', function (ftepProperties, $q, traverson) {
var self = this;
traverson.registerMediaType(TraversonJsonHalAdapter.mediaType, TraversonJsonHalAdapter);
var rootUri = ftepProperties.URLv2;
var halAPI = traverson.from(rootUri).jsonHal().useAngularHttp();
var deleteAPI = traverson.from(rootUri).useAngularHttp();
this.getUserSubscriptions = function(user) {
var deferred = $q.defer();
halAPI.from(rootUri + '/subscriptions/search/findByOwner?owner=' + user._links.self.href)
.newRequest()
.getResource()
.result
.then(
function(document) {
deferred.resolve(document);
}, function(error) {
MessageService.addError('Failed to get subscriptions for user ' + user.name, error);
deferred.reject();
});
return deferred.promise;
};
this.updateSubscription = function(subscription) {
var patchedSubscription = {
packageName: subscription.packageName,
storageQuota: subscription.storageQuota,
processingQuota: subscription.processingQuota,
subscriptionStart: subscription.subscriptionStart,
subscriptionEnd: subscription.subscriptionEnd,
commentText: subscription.commentText
};
var deferred = $q.defer();
halAPI.from(rootUri + '/subscriptions/' + subscription.id)
.newRequest()
.patch(patchedSubscription)
.result
.then(
function(document) {
deferred.resolve(document);
}, function(error) {
MessageService.addError('Failed to update subscription ' + subscription.id, error);
deferred.reject();
});
return deferred.promise;
};
this.createSubscription = function(subscription, subscriptionOwner, subscriptionCreator) {
var newSubscription = {
owner: subscriptionOwner._links.self.href,
packageName: subscription.packageName,
storageQuota: subscription.storageQuota,
processingQuota: subscription.processingQuota,
subscriptionStart: subscription.subscriptionStart,
subscriptionEnd: subscription.subscriptionEnd,
commentText: subscription.commentText,
creator: subscriptionCreator._links.self.href
};
var deferred = $q.defer();
halAPI.from(rootUri + '/subscriptions')
.newRequest()
.post(newSubscription)
.result
.then(
function(document) {
deferred.resolve(document);
}, function(error) {
MessageService.addError('Failed to update subscription ' + subscription.id, error);
deferred.reject();
});
return deferred.promise;
};
this.deleteSubscription = function(subscription) {
var deferred = $q.defer();
deleteAPI.from(rootUri + '/subscriptions/' + subscription.id)
.newRequest()
.delete()
.result
.then(
function(document) {
if (200 <= document.status && document.status < 300) {
deferred.resolve(document);
} else {
MessageService.addError('Failed to delete subscription ' + subscription.id, error);
deferred.reject();
}
}, function(error) {
MessageService.addError('Failed to delete subscription ' + subscription.id, error);
deferred.reject();
});
return deferred.promise;
};
this.cancelSubscription = function(subscription) {
var deferred = $q.defer();
halAPI.from(rootUri + '/subscriptions/' + subscription.id + "/cancel")
.newRequest()
.post()
.result
.then(
function(document) {
deferred.resolve(document);
}, function(error) {
MessageService.addError('Failed to cancel subscription ' + subscription.id, error);
deferred.reject();
});
return deferred.promise;
};
return this;
}]);
});
| cgi-eoss/ftep | f-tep-portal/src/main/resources/app/scripts/services/subscriptionservice.js | JavaScript | agpl-3.0 | 4,930 |
var request = require("request");
var yaml = require("js-yaml");
var jsonfile = require("jsonfile");
request("https://raw.githubusercontent.com/unitedstates/congress-legislators/master/legislators-current.yaml", function(error, response, body) {
if (!error && response.statusCode == 200) {
var legislators = yaml.safeLoad(body);
legislators = legislators.map(function(legislator) {
var term = legislator.terms[legislator.terms.length-1];
return {
firstName: legislator.name.first,
lastName: legislator.name.last,
bioguideId: legislator.id.bioguide,
chamber: term.type == "rep" ? "house" : "senate",
title: term.type == "rep" ? "Rep" : "Sen",
state: term.state,
district: typeof term.district == "undefined" ? null : term.district.toString()
};
});
jsonfile.writeFileSync("congress.json", legislators, { spaces: 2 });
} else if (error) {
console.error("Failed to fetch legislators-current.yaml", error);
} else {
console.error("Failed to fetch legislators-current.yaml",
"("+response.statusCode+" "+response.statusMessage+")");
}
});
| EdenSG/democracy.io | bin/update-congress.js | JavaScript | agpl-3.0 | 1,161 |
/**
* Nooku Framework - http://www.nooku.org
*
* @copyright Copyright (C) 2011 - 2017 Johan Janssens and Timble CVBA. (http://www.timble.net)
* @license GNU AGPLv3 <https://www.gnu.org/licenses/agpl.html>
* @link https://github.com/timble/openpolice-platform
*/
if(!Ckeditor) var Ckeditor = {};
Ckeditor.Files = new Class({
Extends: Files.App,
Implements: [Events, Options],
options: {
types: ['file', 'image'],
editor: null,
preview: 'files-preview',
grid: {
cookie: false,
layout: 'compact',
batch_delete: false
},
history: {
enabled: false
}
},
initialize: function(options) {
this.parent(options);
this.editor = this.options.editor;
this.preview = document.id(this.options.preview);
},
setPaginator: function() {
},
setPathway: function() {
},
setState: function() {
// TODO: Implement pagination into the view
this.fireEvent('beforeSetState');
var opts = this.options.state;
this.state = new Files.State(opts);
this.fireEvent('afterSetState');
},
setGrid: function() {
var opts = this.options.grid;
var that = this;
$extend(opts, {
'onClickImage': function(e) {
that.setPreview(document.id(e.target), 'image');
},
'onClickFile': function(e) {
that.setPreview(document.id(e.target), 'file');
}
});
this.grid = new Files.Grid(this.options.grid.element, opts);
},
setPreview: function(target, type) {
var node = target.getParent('.files-node-shadow') || target.getParent('.files-node');
var row = node.retrieve('row');
var copy = $extend({}, row);
var path = row.baseurl+"/"+row.filepath;
var url = path.replace(Files.sitebase+'/', '').replace(/files\/[^\/]+\//, '');
// Update active row
node.getParent().getChildren().removeClass('active');
node.addClass('active');
// Load preview template
copy.template = 'details_'+type;
this.preview.empty();
copy.render('compact').inject(this.preview);
// Inject preview image
if (type == 'image') {
this.preview.getElement('img').set('src', copy.image);
}
// When no text is selected use the file name
if (type == 'file') {
if(document.id('image-text').get('value') == ""){
document.id('image-text').set('value', row.name);
}
}
document.id('image-url').set('value', url);
document.id('image-type').set('value',row.metadata.mimetype);
}
});
| timble/openpolice-platform | component/ckeditor/resources/assets/js/ckeditor.files.js | JavaScript | agpl-3.0 | 2,785 |
var articles = null;
function restore_all_articles_view() {
$("#allbtn").button('toggle');
$('#articleslist').empty();
$('#articleslist').append(articles);
$('#filterwarning').hide();
}
function switch_category(category) {
if (typeof category != "undefined") {
$("#articleslist").empty();
var filtered = articles.filter('.'.concat(category));
$("#articleslist").append(filtered);
} else {
restore_all_articles_view();
}
timeandtips("#articleslist");
}
$(document).ready(function() {
timeandtips();
articles = $('#articleslist article');
$('#searchfield').removeAttr("disabled");
});
$("#searchfield").keyup(function(event) {
var text = $('#searchfield').val();
if (text.length >= 3) {
$("#allbtn").button('toggle');
var found = articles.filter('article:containsi("'.concat(text, '")'));
$('#filterwarning').show();
$('#articleslist').empty();
$('#articleslist').append(found);
} else if (text.length == 0) {
restore_all_articles_view();
}
});
| gfidente/opinoid | webapp/static/js/country.js | JavaScript | agpl-3.0 | 1,017 |
/*
Copyright (c) 2014-2022 AscEmu Team <http://www.ascemu.org>
This file is released under the MIT license. See README-MIT for more information.
*/
#include "WowCrypt.hpp"
#include <algorithm>
#include <openssl/hmac.h>
WowCrypt::WowCrypt()
{
m_isInitialized = false;
m_clientWotlkDecryptKey.x = 0;
m_clientWotlkDecryptKey.y = 0;
m_serverWotlkEncryptKey.x = 0;
m_serverWotlkEncryptKey.y = 0;
m_sendI = 0;
m_sendJ = 0;
m_recvI = 0;
m_recvJ = 0;
}
WowCrypt::~WowCrypt()
{
}
bool WowCrypt::isInitialized()
{
return m_isInitialized;
}
//////////////////////////////////////////////////////////////////////////////////////////
// WotLK
void WowCrypt::initWotlkCrypt(uint8_t* key)
{
static const uint8_t send[seedLenght] = { 0xC2, 0xB3, 0x72, 0x3C, 0xC6, 0xAE, 0xD9, 0xB5, 0x34, 0x3C, 0x53, 0xEE, 0x2F, 0x43, 0x67, 0xCE };
static const uint8_t recv[seedLenght] = { 0xCC, 0x98, 0xAE, 0x04, 0xE8, 0x97, 0xEA, 0xCA, 0x12, 0xDD, 0xC0, 0x93, 0x42, 0x91, 0x53, 0x57 };
uint8_t encryptHash[SHA_DIGEST_LENGTH];
uint8_t decryptHash[SHA_DIGEST_LENGTH];
uint8_t pass[1024];
uint32_t mdLength;
HMAC(EVP_sha1(), send, seedLenght, key, 40, decryptHash, &mdLength);
assert(mdLength == SHA_DIGEST_LENGTH);
HMAC(EVP_sha1(), recv, seedLenght, key, 40, encryptHash, &mdLength);
assert(mdLength == SHA_DIGEST_LENGTH);
RC4_set_key(&m_clientWotlkDecryptKey, SHA_DIGEST_LENGTH, decryptHash);
RC4_set_key(&m_serverWotlkEncryptKey, SHA_DIGEST_LENGTH, encryptHash);
RC4(&m_serverWotlkEncryptKey, 1024, pass, pass);
RC4(&m_clientWotlkDecryptKey, 1024, pass, pass);
m_isInitialized = true;
}
void WowCrypt::initMopCrypt(uint8_t* key)
{
static const uint8_t send[seedLenght] = { 0x40, 0xAA, 0xD3, 0x92, 0x26, 0x71, 0x43, 0x47, 0x3A, 0x31, 0x08, 0xA6, 0xE7, 0xDC, 0x98, 0x2A };
static const uint8_t recv[seedLenght] = { 0x08, 0xF1, 0x95, 0x9F, 0x47, 0xE5, 0xD2, 0xDB, 0xA1, 0x3D, 0x77, 0x8F, 0x3F, 0x3E, 0xE7, 0x00 };
uint8_t encryptHash[SHA_DIGEST_LENGTH];
uint8_t decryptHash[SHA_DIGEST_LENGTH];
uint8_t pass[1024];
uint32_t mdLength;
HMAC(EVP_sha1(), send, seedLenght, key, 40, decryptHash, &mdLength);
assert(mdLength == SHA_DIGEST_LENGTH);
HMAC(EVP_sha1(), recv, seedLenght, key, 40, encryptHash, &mdLength);
assert(mdLength == SHA_DIGEST_LENGTH);
RC4_set_key(&m_clientWotlkDecryptKey, SHA_DIGEST_LENGTH, decryptHash);
RC4_set_key(&m_serverWotlkEncryptKey, SHA_DIGEST_LENGTH, encryptHash);
RC4(&m_serverWotlkEncryptKey, 1024, pass, pass);
RC4(&m_clientWotlkDecryptKey, 1024, pass, pass);
m_isInitialized = true;
}
void WowCrypt::decryptWotlkReceive(uint8_t* data, size_t length)
{
if (!m_isInitialized)
return;
RC4(&m_clientWotlkDecryptKey, (unsigned long)length, data, data);
}
void WowCrypt::encryptWotlkSend(uint8_t* data, size_t length)
{
if (!m_isInitialized)
return;
RC4(&m_serverWotlkEncryptKey, (unsigned long)length, data, data);
}
//////////////////////////////////////////////////////////////////////////////////////////
// Legacy
void WowCrypt::initLegacyCrypt()
{
m_isInitialized = true;
}
void WowCrypt::decryptLegacyReceive(uint8_t* data, size_t length)
{
if (!m_isInitialized)
return;
if (length < cryptedReceiveLength)
return;
uint8_t x;
for (size_t t = 0; t < cryptedReceiveLength; ++t)
{
m_recvI %= crypKeyVector.size();
x = (data[t] - m_recvJ) ^ crypKeyVector[m_recvI];
++m_recvI;
m_recvJ = data[t];
data[t] = x;
}
}
void WowCrypt::encryptLegacySend(uint8_t* data, size_t length)
{
if (!m_isInitialized)
return;
if (length < cryptedSendLength)
return;
for (size_t t = 0; t < cryptedSendLength; ++t)
{
m_sendI %= crypKeyVector.size();
data[t] = m_sendJ = (data[t] ^ crypKeyVector[m_sendI]) + m_sendJ;
++m_sendI;
}
}
void WowCrypt::setLegacyKey(uint8_t* key, size_t length)
{
crypKeyVector.resize(length);
std::copy(key, key + length, crypKeyVector.begin());
}
void WowCrypt::generateTbcKey(uint8_t* key, uint8_t* sessionkey)
{
uint8_t seedKey[seedLenght] = { 0x38, 0xA7, 0x83, 0x15, 0xF8, 0x92, 0x25, 0x30, 0x71, 0x98, 0x67, 0xB1, 0x8C, 0x4, 0xE2, 0xAA };
uint8_t firstBuffer[64];
uint8_t secondBuffer[64];
memset(firstBuffer, 0x36, 64);
memset(secondBuffer, 0x5C, 64);
for (uint8_t i = 0; i < seedLenght; ++i)
{
firstBuffer[i] = (uint8_t)(seedKey[i] ^ firstBuffer[i]);
secondBuffer[i] = (uint8_t)(seedKey[i] ^ secondBuffer[i]);
}
Sha1Hash sha1;
sha1.UpdateData(firstBuffer, 64);
sha1.UpdateData(sessionkey, 40);
sha1.Finalize();
uint8_t* tempDigest = sha1.GetDigest();
Sha1Hash sha2;
sha2.UpdateData(secondBuffer, 64);
sha2.UpdateData(tempDigest, SHA_DIGEST_LENGTH);
sha2.Finalize();
memcpy(key, sha2.GetDigest(), SHA_DIGEST_LENGTH);
}
| AscEmu/AscEmu | src/shared/Auth/WowCrypt.cpp | C++ | agpl-3.0 | 5,007 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Service
* @subpackage Amazon
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id$
*/
/**
* @see Zend_Service_Amazon_Item
*/
//$1 'Zend/Service/Amazon/Item.php';
/**
* @category Zend
* @package Zend_Service
* @subpackage Amazon
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Service_Amazon_ResultSet implements SeekableIterator
{
/**
* A DOMNodeList of <Item> elements
*
* @var DOMNodeList
*/
protected $_results = null;
/**
* Amazon Web Service Return Document
*
* @var DOMDocument
*/
protected $_dom;
/**
* XPath Object for $this->_dom
*
* @var DOMXPath
*/
protected $_xpath;
/**
* Current index for SeekableIterator
*
* @var int
*/
protected $_currentIndex = 0;
/**
* Create an instance of Zend_Service_Amazon_ResultSet and create the necessary data objects
*
* @param DOMDocument $dom
* @return void
*/
public function __construct(DOMDocument $dom)
{
$this->_dom = $dom;
$this->_xpath = new DOMXPath($dom);
$this->_xpath->registerNamespace('az', 'http://webservices.amazon.com/AWSECommerceService/2005-10-05');
$this->_results = $this->_xpath->query('//az:Item');
}
/**
* Total Number of results returned
*
* @return int Total number of results returned
*/
public function totalResults()
{
$result = $this->_xpath->query('//az:TotalResults/text()');
return (int) $result->item(0)->data;
}
/**
* Total Number of pages returned
*
* @return int Total number of pages returned
*/
public function totalPages()
{
$result = $this->_xpath->query('//az:TotalPages/text()');
return (int) $result->item(0)->data;
}
/**
* Implement SeekableIterator::current()
*
* @return Zend_Service_Amazon_Item
*/
public function current()
{
return new Zend_Service_Amazon_Item($this->_results->item($this->_currentIndex));
}
/**
* Implement SeekableIterator::key()
*
* @return int
*/
public function key()
{
return $this->_currentIndex;
}
/**
* Implement SeekableIterator::next()
*
* @return void
*/
public function next()
{
$this->_currentIndex += 1;
}
/**
* Implement SeekableIterator::rewind()
*
* @return void
*/
public function rewind()
{
$this->_currentIndex = 0;
}
/**
* Implement SeekableIterator::seek()
*
* @param int $index
* @throws OutOfBoundsException
* @return void
*/
public function seek($index)
{
$indexInt = (int) $index;
if ($indexInt >= 0 && (null === $this->_results || $indexInt < $this->_results->length)) {
$this->_currentIndex = $indexInt;
} else {
throw new OutOfBoundsException("Illegal index '$index'");
}
}
/**
* Implement SeekableIterator::valid()
*
* @return boolean
*/
public function valid()
{
return null !== $this->_results && $this->_currentIndex < $this->_results->length;
}
}
| Sparfel/iTop-s-Portal | library/Zend/Service/Amazon/ResultSet.php | PHP | agpl-3.0 | 4,003 |
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect, Http404
from django.db.models import Q
from django.contrib import messages
from cc.general.util import render
import cc.ripple.api as ripple
from cc.profile.models import Profile
from cc.relate.forms import EndorseForm, AcknowledgementForm
from cc.relate.models import Endorsement
from cc.feed.models import FeedItem
from cc.general.mail import send_notification
from django.utils.translation import ugettext as _
MESSAGES = {
'endorsement_saved': _("Endorsement saved."),
'endorsement_deleted': _("Endorsement deleted."),
'acknowledgement_sent': _("Acknowledgement sent."),
}
@login_required
@render()
def endorse_user(request, recipient_username):
recipient = get_object_or_404(Profile, user__username=recipient_username)
if recipient == request.profile:
raise Http404()
try:
endorsement = Endorsement.objects.get(
endorser=request.profile, recipient=recipient)
except Endorsement.DoesNotExist:
endorsement = None
if request.method == 'POST':
if 'delete' in request.POST and endorsement:
endorsement.delete()
messages.info(request, MESSAGES['endorsement_deleted'])
return HttpResponseRedirect(
endorsement.recipient.get_absolute_url())
form = EndorseForm(request.POST, instance=endorsement,
endorser=request.profile, recipient=recipient)
if form.is_valid():
is_new = endorsement is None
endorsement = form.save()
if is_new:
send_endorsement_notification(endorsement)
messages.info(request, MESSAGES['endorsement_saved'])
return HttpResponseRedirect(endorsement.get_absolute_url())
else:
form = EndorseForm(instance=endorsement, endorser=request.profile,
recipient=recipient)
profile = recipient # For profile_base.html.
return locals()
def send_endorsement_notification(endorsement):
subject = _("%s has endorsed you on Villages.cc") % endorsement.endorser
send_notification(subject, endorsement.endorser, endorsement.recipient,
'endorsement_notification_email.txt',
{'endorsement': endorsement})
@login_required
@render()
def endorsement(request, endorsement_id):
endorsement = get_object_or_404(Endorsement, pk=endorsement_id)
return locals()
@login_required
@render()
def relationships(request):
accounts = ripple.get_user_accounts(request.profile)
return locals()
@login_required
@render()
def relationship(request, partner_username):
partner = get_object_or_404(Profile, user__username=partner_username)
if partner == request.profile:
raise Http404 # Can't have relationship with yourself.
account = request.profile.account(partner)
if account:
entries = account.entries
balance = account.balance
else:
entries = []
balance = 0
profile = partner # For profile_base.html.
return locals()
@login_required
@render()
def acknowledge_user(request, recipient_username):
recipient = get_object_or_404(Profile, user__username=recipient_username)
if recipient == request.profile:
raise Http404
# TODO: Don't recompute max_amount on form submit? Cache, or put in form
# as hidden field?
max_amount = ripple.max_payment(request.profile, recipient)
if request.method == 'POST':
form = AcknowledgementForm(request.POST, max_ripple=max_amount)
if form.is_valid():
acknowledgement = form.send_acknowledgement(
request.profile, recipient)
send_acknowledgement_notification(acknowledgement)
messages.info(request, MESSAGES['acknowledgement_sent'])
return HttpResponseRedirect(acknowledgement.get_absolute_url())
else:
form = AcknowledgementForm(max_ripple=max_amount, initial=request.GET)
can_ripple = max_amount > 0
profile = recipient # For profile_base.html.
return locals()
def send_acknowledgement_notification(acknowledgement):
subject = _("%s has acknowledged you on Villages.cc") % (
acknowledgement.payer)
send_notification(subject, acknowledgement.payer, acknowledgement.recipient,
'acknowledgement_notification_email.txt',
{'acknowledgement': acknowledgement})
@login_required
@render()
def view_acknowledgement(request, payment_id):
try:
payment = ripple.get_payment(payment_id)
except ripple.RipplePayment.DoesNotExist:
raise Http404
entries = payment.entries_for_user(request.profile)
if not entries:
raise Http404 # Non-participants don't get to see anything.
sent_entries = []
received_entries = []
for entry in entries:
if entry.amount < 0:
sent_entries.append(entry)
else:
received_entries.append(entry)
return locals()
| rfugger/villagescc | cc/relate/views.py | Python | agpl-3.0 | 5,122 |
<?php
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
/*********************************************************************************
* The contents of this file are subject to the SugarCRM Master Subscription
* Agreement ("License") which can be viewed at
* http://www.sugarcrm.com/crm/en/msa/master_subscription_agreement_11_April_2011.pdf
* By installing or using this file, You have unconditionally agreed to the
* terms and conditions of the License, and You may not use this file except in
* compliance with the License. Under the terms of the license, You shall not,
* among other things: 1) sublicense, resell, rent, lease, redistribute, assign
* or otherwise transfer Your rights to the Software, and 2) use the Software
* for timesharing or service bureau purposes such as hosting the Software for
* commercial gain and/or for the benefit of a third party. Use of the Software
* may be subject to applicable fees and any use of the Software without first
* paying applicable fees is strictly prohibited. You do not have the right to
* remove SugarCRM copyrights from the source code or user interface.
*
* All copies of the Covered Code must include on each user interface screen:
* (i) the "Powered by SugarCRM" logo and
* (ii) the SugarCRM copyright notice
* in the same form as they appear in the distribution. See full license for
* requirements.
*
* Your Warranty, Limitations of liability and Indemnity are expressly stated
* in the License. Please refer to the License for the specific language
* governing these rights and limitations under the License. Portions created
* by SugarCRM are Copyright (C) 2004-2011 SugarCRM, Inc.; All Rights Reserved.
********************************************************************************/
$dictionary['linked_documents'] = array ( 'table' => 'linked_documents'
, 'fields' => array (
array('name' =>'id', 'type' =>'varchar', 'len'=>'36')
, array('name' =>'parent_id', 'type' =>'varchar', 'len'=>'36')
, array('name' =>'parent_type', 'type' =>'varchar', 'len'=>'25')
, array('name' =>'document_id', 'type' =>'varchar', 'len'=>'36')
, array('name' =>'document_revision_id', 'type' =>'varchar', 'len'=>'36')
, array('name' =>'date_modified','type' => 'datetime')
, array('name' =>'deleted', 'type' =>'bool', 'len'=>'1', 'default'=>'0', 'required'=>false)
)
, 'indices' => array (
array('name' =>'linked_documentspk', 'type' =>'primary', 'fields'=>array('id')),
array( 'name' => 'idx_parent_document',
'type' => 'alternate_key',
'fields' => array('parent_type','parent_id','document_id'),
),
)
, 'relationships' => array (
'contracts_documents' => array('lhs_module'=> 'Contracts', 'lhs_table'=> 'contracts', 'lhs_key' => 'id',
'rhs_module'=> 'Documents', 'rhs_table'=> 'documents', 'rhs_key' => 'id',
'relationship_type'=>'many-to-many',
'join_table'=> 'linked_documents', 'join_key_lhs'=>'parent_id', 'join_key_rhs'=>'document_id', 'relationship_role_column'=>'parent_type',
'relationship_role_column_value'=>'Contracts'),
'leads_documents' => array('lhs_module'=> 'Leads', 'lhs_table'=> 'leads', 'lhs_key' => 'id',
'rhs_module'=> 'Documents', 'rhs_table'=> 'documents', 'rhs_key' => 'id',
'relationship_type'=>'many-to-many',
'join_table'=> 'linked_documents', 'join_key_lhs'=>'parent_id', 'join_key_rhs'=>'document_id', 'relationship_role_column'=>'parent_type',
'relationship_role_column_value'=>'Leads'),
'contracttype_documents' => array('lhs_module'=> 'ContractTypes', 'lhs_table'=> 'contract_types', 'lhs_key' => 'id',
'rhs_module'=> 'Documents', 'rhs_table'=> 'documents', 'rhs_key' => 'id',
'relationship_type'=>'many-to-many',
'join_table'=> 'linked_documents', 'join_key_lhs'=>'parent_id', 'join_key_rhs'=>'document_id', 'relationship_role_column'=>'parent_type',
'relationship_role_column_value'=>'ContracTemplates'),
),
);
?> | harish-patel/ecrm | metadata/linked_documentsMetaData.php | PHP | agpl-3.0 | 4,059 |
/**
* Copyright (C) 2013 The Language Archive, Max Planck Institute for
* Psycholinguistics
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 59 Temple
* Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package nl.mpi.yams.common.data;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
/**
* Created on : Aug 28, 2013, 5:24:13 PM
*
* @author Peter Withers <peter.withers@mpi.nl>
*/
@XmlRootElement(name = "Highlight")
public class DataNodeHighlight implements Serializable {
private String dataNodeId = null;
private String highlightPath = null;
public String getDataNodeId() {
return dataNodeId;
}
@XmlAttribute(name = "ID")
public void setDataNodeId(String dataNodeId) {
this.dataNodeId = dataNodeId;
}
public String getHighlightPath() {
return highlightPath;
}
@XmlAttribute(name = "Path")
public void setHighlightPath(String highlightPath) {
this.highlightPath = highlightPath;
}
}
| TheLanguageArchive/YAMS | common/src/main/java/nl/mpi/yams/common/data/DataNodeHighlight.java | Java | agpl-3.0 | 1,663 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import waybill
import wizard
import travel
import vehicle
import requirement
import res_partner
import waybill_expense
import account_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| adhoc-dev/odoo-logistic | addons/logistic_x/__init__.py | Python | agpl-3.0 | 1,161 |
/*
* JBILLING CONFIDENTIAL
* _____________________
*
* [2003] - [2012] Enterprise jBilling Software Ltd.
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Enterprise jBilling Software.
* The intellectual and technical concepts contained
* herein are proprietary to Enterprise jBilling Software
* and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden.
*/
package com.sapienter.jbilling.server.pluggableTask.admin;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.iterators.ArrayListIterator;
import org.apache.log4j.Logger;
import com.sapienter.jbilling.common.SessionInternalError;
import com.sapienter.jbilling.server.pluggableTask.PluggableTask;
import com.sapienter.jbilling.server.util.Constants;
import com.sapienter.jbilling.server.util.Context;
import com.sapienter.jbilling.server.util.audit.EventLogger;
public class PluggableTaskBL<T> {
private static final Logger LOG = Logger.getLogger(PluggableTaskBL.class);
private EventLogger eLogger = null;
private PluggableTaskDAS das = null;
private PluggableTaskParameterDAS dasParameter = null;
private PluggableTaskDTO pluggableTask = null;
public PluggableTaskBL(Integer pluggableTaskId) {
init();
set(pluggableTaskId);
}
public PluggableTaskBL() {
init();
}
private void init() {
eLogger = EventLogger.getInstance();
das = (PluggableTaskDAS) Context.getBean(Context.Name.PLUGGABLE_TASK_DAS);
dasParameter = new PluggableTaskParameterDAS();
}
public void set(Integer id) {
pluggableTask = das.find(id);
}
public void set(Integer entityId, Integer typeId) {
pluggableTask = das.findByEntityType(entityId, typeId);
}
public void set(PluggableTaskDTO task) {
pluggableTask = task;
}
public PluggableTaskDTO getDTO() {
return pluggableTask;
}
public int create(Integer executorId, PluggableTaskDTO dto) {
validate(dto);
LOG.debug("Creating a new pluggable task row " + dto);
pluggableTask = das.save(dto);
eLogger.audit(executorId, null, Constants.TABLE_PLUGGABLE_TASK,
pluggableTask.getId(), EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_CREATED, null, null, null);
return pluggableTask.getId();
}
public void createParameter(Integer taskId,
PluggableTaskParameterDTO dto) {
PluggableTaskDTO task = das.find(taskId);
dto.setTask(task);
task.getParameters().add(dasParameter.save(dto));
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(taskId);
}
public void update(Integer executorId, PluggableTaskDTO dto) {
if (dto == null || dto.getId() == null) {
throw new SessionInternalError("task to update can't be null");
}
validate(dto);
List<PluggableTaskParameterDTO> parameterDTOList = dasParameter.findAllByTask(dto);
for (PluggableTaskParameterDTO param: dto.getParameters()) {
parameterDTOList.remove(dasParameter.find(param.getId()));
param.expandValue();
}
for (PluggableTaskParameterDTO param: parameterDTOList){
dasParameter.delete(param);
}
LOG.debug("updating " + dto);
pluggableTask = das.save(dto);
eLogger.audit(executorId, null
, Constants.TABLE_PLUGGABLE_TASK,
dto.getId(), EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_UPDATED, null, null, null);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(dto.getId());
das.invalidateCache(); // 3rd level cache
pluggableTask.populateParamValues();
}
public void delete(Integer executor) {
eLogger.audit(executor, null, Constants.TABLE_PLUGGABLE_TASK,
pluggableTask.getId(), EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_DELETED, null, null, null);
das.delete(pluggableTask);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(pluggableTask.getId());
}
public void deleteParameter(Integer executor, Integer id) {
eLogger.audit(executor, null, Constants.TABLE_PLUGGABLE_TASK_PARAMETER,
id, EventLogger.MODULE_TASK_MAINTENANCE,
EventLogger.ROW_DELETED, null, null, null);
PluggableTaskParameterDTO toDelete = dasParameter.find(id);
toDelete.getTask().getParameters().remove(toDelete);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(toDelete.getTask().getId());
dasParameter.delete(toDelete);
}
public void updateParameters(PluggableTaskDTO dto) {
// update the parameters from the dto
for (PluggableTaskParameterDTO parameter: dto.getParameters()) {
updateParameter(parameter);
}
}
private void updateParameter(PluggableTaskParameterDTO dto) {
dto.expandValue();
dasParameter.save(dto);
// clear the rules cache (just in case this plug-in was ruled based)
PluggableTask.invalidateRuleCache(dto.getTask().getId());
}
public T instantiateTask()
throws PluggableTaskException {
PluggableTaskDTO localTask = getDTO();
String fqn = localTask.getType().getClassName();
T result;
try {
Class taskClazz = Class.forName(fqn);
//.asSubclass(result.getClass());
result = (T) taskClazz.newInstance();
} catch (ClassCastException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": implementation class does not implements PaymentTask:"
+ fqn, e);
} catch (InstantiationException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": Can not instantiate : " + fqn, e);
} catch (IllegalAccessException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": Can not find public constructor for : " + fqn, e);
} catch (ClassNotFoundException e) {
throw new PluggableTaskException("Task id: " + pluggableTask.getId()
+ ": Unknown class: " + fqn, e);
}
if (result instanceof PluggableTask) {
PluggableTask pluggable = (PluggableTask) result;
pluggable.initializeParamters(localTask);
} else {
throw new PluggableTaskException("Plug-in has to extend PluggableTask " +
pluggableTask.getId());
}
return result;
}
private void validate(PluggableTaskDTO task) {
List<ParameterDescription> missingParameters = new ArrayList<ParameterDescription>();
try {
// start by getting an instance of this type
PluggableTask instance = (PluggableTask) PluggableTaskManager.getInstance(
task.getType().getClassName(), task.getType().getCategory().getInterfaceName());
// loop through the descriptions of parameters
for (ParameterDescription param: instance.getParameterDescriptions()) {
if (param.isRequired()) {
if(task.getParameters()== null || task.getParameters().size() == 0) {
missingParameters.add(param);
} else {
boolean found = false;
for (PluggableTaskParameterDTO parameter:task.getParameters()) {
if (parameter.getName().equals(param.getName()) && parameter.getStrValue() != null &&
parameter.getStrValue().trim().length() > 0) {
found = true;
break;
}
}
if (!found) {
missingParameters.add(param);
}
}
}
}
} catch (PluggableTaskException e) {
LOG.error("Getting instance of plug-in for validation", e);
throw new SessionInternalError("Validating plug-in");
}
if (missingParameters.size() > 0) {
SessionInternalError exception = new SessionInternalError("Validation of new plug-in");
String messages[] = new String[missingParameters.size()];
int f=0;
for (ParameterDescription param: missingParameters) {
messages[f] = new String("PluggableTaskWS,parameter,plugins.error.required_parameter," + param.getName());
f++;
}
exception.setErrorMessages(messages);
throw exception;
}
// now validate that the processing order is not already taken
boolean nonUniqueResult= false;
try {
PluggableTaskDTO samePlugin = das.findByEntityCategoryOrder(task.getEntityId(), task.getType().getCategory().getId(),
task.getProcessingOrder());
if (samePlugin != null && !samePlugin.getId().equals(task.getId())) {
nonUniqueResult=true;
}
} catch (Exception e) {
nonUniqueResult=true;
}
if (nonUniqueResult) {
SessionInternalError exception = new SessionInternalError("Validation of new plug-in");
exception.setErrorMessages(new String[] {
"PluggableTaskWS,processingOrder,plugins.error.same_order," + task.getProcessingOrder()});
throw exception;
}
}
}
| rahith/ComtalkA-S | src/java/com/sapienter/jbilling/server/pluggableTask/admin/PluggableTaskBL.java | Java | agpl-3.0 | 10,431 |
<?php
define('DB_ADAPTER', 'mysql');
define('DB_HOST', 'localhost');
define('DB_USER', 'root');
define('DB_PASS', 'e3i71BFGRqda3');
define('DB_NAME', 'ppdev_mf');
define('DB_PREFIX', 'pp088_');
define('DB_CHARSET', 'utf8');
define('DB_PERSIST', false);
return true;
?>
| pjsangat/ppdev | config/config_mf.php | PHP | agpl-3.0 | 295 |
class CreateDidNotVotes < ActiveRecord::Migration[4.2]
def change
create_table :did_not_votes do |t|
t.references :user
t.references :motion
t.timestamps
end
add_index :did_not_votes, :user_id
add_index :did_not_votes, :motion_id
end
end
| piratas-ar/loomio | db/migrate/20120501012737_create_did_not_votes.rb | Ruby | agpl-3.0 | 277 |
module Isi
module FreeChat
Isi::db_hello __FILE__, name
require 'pathname'
ModuleRootDir = Pathname(__FILE__).dirname + name.split('::').last
# require all files for this module
require ModuleRootDir + 'protocol' # this should include everything else
require ModuleRootDir + 'free_chat_u_i' # except for this
Isi::db_bye __FILE__, name
end
end | HistoricalValue/freechat | isi/freechat.rb | Ruby | agpl-3.0 | 385 |
# Copyright 2015-2018 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
from . import mass_reconcile
from . import advanced_reconciliation
| OCA/bank-statement-reconcile | account_mass_reconcile_ref_deep_search/models/__init__.py | Python | agpl-3.0 | 171 |
# == Schema Information
#
# Table name: terms
#
# id :integer not null, primary key
# sourced_id :string
# title :string
# start_at :datetime
# end_at :datetime
# created_at :datetime not null
# updated_at :datetime not null
#
class Term < ApplicationRecord
has_many :courses, -> { where('courses.enabled = ?', true) }
validates :end_at, presence: true
validates :sourced_id, uniqueness: true, allow_nil: true
validates :start_at, presence: true
validates :title, presence: true
validates :title, uniqueness: true
validate :end_at_is_after_start_at
# ====================================================================
# Public Functions
# ====================================================================
def self.sync_roster(rterms)
# Create and Update with OneRoster data
# Synchronous term condition
now = Time.zone.now
rterms.select!{|rt| ((Time.zone.parse(rt['startDate']) - 1.month)...Time.zone.parse(rt['endDate'])).cover? now}
ids = []
rterms.each do |rt|
term = Term.find_or_initialize_by(sourced_id: rt['sourcedId'])
if term.update_attributes(title: rt['title'], start_at: rt['startDate'], end_at: rt['endDate'])
ids.push({id: term.id, sourced_id: term.sourced_id, status: term.status})
end
end
ids
end
def self.creatable?(user_id)
# Not permitted when SYSTEM_ROSTER_SYNC is :suspended
return false if %i[on off].exclude? SYSTEM_ROSTER_SYNC
user = User.find user_id
user.system_staff?
end
def destroyable?(user_id)
return false if new_record?
return false unless courses.size.zero?
updatable? user_id
end
def updatable?(user_id)
return false if SYSTEM_ROSTER_SYNC == :on && sourced_id.blank?
return false if SYSTEM_ROSTER_SYNC == :off && sourced_id.present?
Term.creatable? user_id
end
def status
now = Time.zone.now
if now < start_at
'draft'
elsif end_at <= now
'archived'
else
'open'
end
end
def to_roster_hash
hash = {
title: self.title,
type: 'term',
startDate: self.start_at,
endDate: self.end_at,
schoolYear: self.end_at.year
}
end
# ====================================================================
# Private Functions
# ====================================================================
private
def end_at_is_after_start_at
# start_at: inclusive start date for the term
# end_at: exclusive end date for the term
if start_at.present? && end_at.present?
errors.add(:end_at) unless end_at > start_at
end
end
end
| lepo-project/lepo | app/models/term.rb | Ruby | agpl-3.0 | 2,652 |
OC.L10N.register(
"templateeditor",
{
"Could not load template" : "امکان بارگذاری قالب وجود ندارد",
"Saved" : "ذخیره شد",
"Reset" : "تنظیم مجدد",
"An error occurred" : "یک خطا رخ داده است",
"Sharing email - public link shares (HTML)" : "ایمیل اشتراک گذاری-لینک عمومی اشتراک گذاری(HTML)",
"Sharing email - public link shares (plain text fallback)" : "ایمیل اشتراک گذاری-لینک عمومی اشتراک گذاری(plain text fallback)",
"Sharing email (HTML)" : "اشتراکگذاری ایمیل (HTML)",
"Sharing email (plain text fallback)" : "ایمیل اشتراک گذاری (plain text fallback)",
"Lost password mail" : "ایمیل فراموش کردن رمز عبور",
"New user email (HTML)" : "ایمیل کاربری جدید (HTML)",
"New user email (plain text fallback)" : "ایمیل کاربر جدید (plain text fallback)",
"Activity notification mail" : "ایمیل هشدار فعالیت",
"Mail Templates" : "قالبهای ایمیل",
"Theme" : "تم",
"Template" : "قالب",
"Please choose a template" : "لطفا یک قالب انتخاب کنید",
"Save" : "ذخیره"
},
"nplurals=1; plural=0;");
| jacklicn/owncloud | apps/templateeditor/l10n/fa.js | JavaScript | agpl-3.0 | 1,324 |
# -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import invoice
| Elico-Corp/openerp-7.0 | sale_bom_split_anglo_saxon/__init__.py | Python | agpl-3.0 | 158 |
<?php
/*
* This file is part of the FOSUserBundle package.
*
* (c) FriendsOfSymfony <http://friendsofsymfony.github.com/>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace FOS\UserBundle\Model;
/**
* Interface to be implemented by group managers. This adds an additional level
* of abstraction between your application, and the actual repository.
*
* All changes to groups should happen through this interface.
*
* @author Christophe Coevoet <stof@notk.org>
*/
interface GroupManagerInterface {
/**
* Returns an empty group instance.
*
* @param string $name
* @return GroupInterface
*/
function createGroup($name);
/**
* Deletes a group.
*
* @param GroupInterface $group
* @return void
*/
function deleteGroup(GroupInterface $group);
/**
* Finds one group by the given criteria.
*
* @param array $criteria
* @return GroupInterface
*/
function findGroupBy(array $criteria);
/**
* Finds a group by name.
*
* @param string $name
* @return GroupInterface
*/
function findGroupByName($name);
/**
* Returns a collection with all user instances.
*
* @return \Traversable
*/
function findGroups();
/**
* Returns the group's fully qualified class name.
*
* @return string
*/
function getClass();
/**
* Updates a group.
*
* @param GroupInterface $group
*/
function updateGroup(GroupInterface $group);
}
| lolostates/Lap | vendor/bundles/FOS/UserBundle/Model/GroupManagerInterface.php | PHP | agpl-3.0 | 1,483 |
/*
* StatusBarWidget.java
*
* Copyright (C) 2009-11 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.views.source.editors.text.status;
import com.google.gwt.core.client.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.*;
import org.rstudio.core.client.widget.IsWidgetWithHeight;
public class StatusBarWidget extends Composite
implements StatusBar, IsWidgetWithHeight
{
private int height_;
interface Binder extends UiBinder<HorizontalPanel, StatusBarWidget>
{
}
public StatusBarWidget()
{
Binder binder = GWT.create(Binder.class);
HorizontalPanel hpanel = binder.createAndBindUi(this);
hpanel.setVerticalAlignment(HorizontalPanel.ALIGN_TOP);
hpanel.setCellWidth(hpanel.getWidget(2), "100%");
initWidget(hpanel);
height_ = 16;
}
public int getHeight()
{
return height_;
}
public Widget asWidget()
{
return this;
}
public StatusBarElement getPosition()
{
return position_;
}
public StatusBarElement getFunction()
{
return function_;
}
public StatusBarElement getLanguage()
{
return language_;
}
public void setFunctionVisible(boolean visible)
{
function_.setContentsVisible(visible);
funcIcon_.setVisible(visible);
}
@UiField
StatusBarElementWidget position_;
@UiField
StatusBarElementWidget function_;
@UiField
StatusBarElementWidget language_;
@UiField
Image funcIcon_;
}
| Sage-Bionetworks/rstudio | src/gwt/src/org/rstudio/studio/client/workbench/views/source/editors/text/status/StatusBarWidget.java | Java | agpl-3.0 | 1,945 |
/*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'specialchar', 'cs', {
options: 'Nastavení speciálních znaků',
title: 'Výběr speciálního znaku',
toolbar: 'Vložit speciální znaky'
} );
| astrobin/astrobin | astrobin/static/astrobin/ckeditor/plugins/specialchar/lang/cs.js | JavaScript | agpl-3.0 | 339 |
/*
* Copyright 2011 Witoslaw Koczewsi <wi@koczewski.de>, Artjom Kochtchi
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero
* General Public License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
* implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU General Public License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*/
package scrum.client.communication;
import ilarkesto.core.logging.Log;
import ilarkesto.core.time.Tm;
import java.util.LinkedList;
import scrum.client.DataTransferObject;
import scrum.client.core.ApplicationStartedEvent;
import scrum.client.core.ApplicationStartedHandler;
import scrum.client.project.Requirement;
import scrum.client.workspace.BlockCollapsedEvent;
import scrum.client.workspace.BlockCollapsedHandler;
import scrum.client.workspace.BlockExpandedEvent;
import scrum.client.workspace.BlockExpandedHandler;
import com.google.gwt.user.client.Timer;
public class Pinger extends GPinger implements ServerDataReceivedHandler, BlockExpandedHandler, BlockCollapsedHandler,
ApplicationStartedHandler {
private static Log log = Log.get(Pinger.class);
public static final int MIN_DELAY = 1000;
public static final int MAX_DELAY = 5000;
private Timer timer;
private int maxDelay = MAX_DELAY;
private long lastDataReceiveTime = Tm.getCurrentTimeMillis();
private LinkedList<Long> pingTimes = new LinkedList<Long>();
private boolean disabled;
@Override
public void onApplicationStarted(ApplicationStartedEvent event) {
timer = new Timer() {
@Override
public void run() {
if (!disabled && !serviceCaller.containsServiceCall(PingServiceCall.class)) {
final long start = Tm.getCurrentTimeMillis();
new PingServiceCall().execute(new Runnable() {
@Override
public void run() {
long time = Tm.getCurrentTimeMillis() - start;
pingTimes.add(time);
if (pingTimes.size() > 10) pingTimes.removeFirst();
}
});
}
reschedule();
}
};
reschedule();
}
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
public boolean isDisabled() {
return disabled;
}
public void shutdown() {
log.info("Shutting down");
if (timer == null) return;
timer.cancel();
timer = null;
}
@Override
public void onServerDataReceived(ServerDataReceivedEvent event) {
DataTransferObject data = event.getData();
if (data.containsEntities()) {
lastDataReceiveTime = Tm.getCurrentTimeMillis();
reschedule();
}
}
@Override
public void onBlockCollapsed(BlockCollapsedEvent event) {
deactivatePowerPolling();
}
@Override
public void onBlockExpanded(BlockExpandedEvent event) {
Object object = event.getObject();
if (object instanceof Requirement) {
Requirement requirement = (Requirement) object;
if (requirement.isWorkEstimationVotingActive()) activatePowerPolling();
}
}
public void reschedule() {
if (timer == null) return;
long idle = Tm.getCurrentTimeMillis() - lastDataReceiveTime;
idle = (int) (idle * 0.15);
if (idle < MIN_DELAY) idle = MIN_DELAY;
if (idle > maxDelay) idle = maxDelay;
timer.scheduleRepeating((int) idle);
}
private void activatePowerPolling() {
maxDelay = MIN_DELAY;
log.debug("PowerPolling activated");
}
private void deactivatePowerPolling() {
if (maxDelay == MAX_DELAY) return;
maxDelay = MAX_DELAY;
lastDataReceiveTime = Tm.getCurrentTimeMillis();
log.debug("PowerPolling deactivated");
}
public Long getAvaragePingTime() {
if (pingTimes.isEmpty()) return null;
long sum = 0;
for (Long time : pingTimes) {
sum += time;
}
return sum / pingTimes.size();
}
public String getAvaragePingTimeMessage() {
Long time = getAvaragePingTime();
if (time == null) return null;
return "Current response time: " + time + " ms.";
}
}
| MiguelSMendoza/Kunagi | WEB-INF/classes/scrum/client/communication/Pinger.java | Java | agpl-3.0 | 4,177 |
<?php
//============================================================+
// File name : tce_page_timer.php
// Begin : 2004-04-29
// Last Update : 2010-10-05
//
// Description : Display timer (date-time + countdown).
//
// Author: Nicola Asuni
//
// (c) Copyright:
// Nicola Asuni
// Tecnick.com LTD
// www.tecnick.com
// info@tecnick.com
//
// License:
// Copyright (C) 2004-2010 Nicola Asuni - Tecnick.com LTD
// See LICENSE.TXT file for more information.
//============================================================+
/**
* @file
* Display client timer (date-time + countdown).
* @package com.tecnick.tcexam.shared
* @author Nicola Asuni
* @since 2004-04-29
*/
if (!isset($_REQUEST['examtime'])) {
$examtime = 0; // remaining exam time in seconds
$enable_countdown = 'false';
$timeout_logout = 'false';
} else {
$examtime = floatval($_REQUEST['examtime']);
$enable_countdown = 'true';
if (isset($_REQUEST['timeout_logout']) AND ($_REQUEST['timeout_logout'])) {
$timeout_logout = 'true';
} else {
$timeout_logout = 'false';
}
}
echo '<form action="'.$_SERVER['SCRIPT_NAME'].'" id="timerform">'.K_NEWLINE;
echo '<div>'.K_NEWLINE;
//echo '<label for="timer" class="timerlabel">'.$l['w_time'].':</label>'.K_NEWLINE;
echo '<input type="text" name="timer" id="timer" value="" size="29" maxlength="29" title="'.$l['w_clock_timer'].'" readonly="readonly"/>'.K_NEWLINE;
echo ' </div>'.K_NEWLINE;
echo '</form>'.K_NEWLINE;
echo '<script src="'.K_PATH_SHARED_JSCRIPTS.'timer.js" type="text/javascript"></script>'.K_NEWLINE;
echo '<script type="text/javascript">'.K_NEWLINE;
echo '//<![CDATA['.K_NEWLINE;
echo 'FJ_start_timer('.$enable_countdown.', '.(time() - $examtime).', \''.addslashes($l['m_exam_end_time']).'\', '.$timeout_logout.', '.(round(microtime(true) * 1000)).');'.K_NEWLINE;
echo '//]]>'.K_NEWLINE;
echo '</script>'.K_NEWLINE;
//============================================================+
// END OF FILE
//============================================================+
| satheesh586/raudra | shared/code/tce_page_timer.php | PHP | agpl-3.0 | 2,069 |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2017.11.06 at 05:19:55 PM ICT
//
package org.opencps.api.dossierlog.model;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="total" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="data" type="{}DossierLogModel" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"total",
"data"
})
@XmlRootElement(name = "DossierLogResultsModel")
public class DossierLogResultsModel {
protected Integer total;
protected List<DossierLogModel> data;
/**
* Gets the value of the total property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getTotal() {
return total;
}
/**
* Sets the value of the total property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setTotal(Integer value) {
this.total = value;
}
/**
* Gets the value of the data property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the data property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getData().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link DossierLogModel }
*
*
*/
public List<DossierLogModel> getData() {
if (data == null) {
data = new ArrayList<DossierLogModel>();
}
return this.data;
}
}
| VietOpenCPS/opencps-v2 | modules/backend-api-rest/src/main/java/org/opencps/api/dossierlog/model/DossierLogResultsModel.java | Java | agpl-3.0 | 2,858 |