bugged
stringlengths 4
228k
| fixed
stringlengths 0
96.3M
| __index_level_0__
int64 0
481k
|
|---|---|---|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
| 469,100
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
| 469,101
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
|
defdo_partial(self,cr,uid,ids,partial_datas,context=None):"""Makespartialpickingandmovesdone.@parampartial_datas:Dictionarycontainingdetailsofpartialpickinglikepartner_id,address_id,delivery_date,deliverymoveswithproduct_id,product_qty,uom@return:Dictionaryofvalues"""ifcontextisNone:context={}else:context=dict(context)res={}move_obj=self.pool.get('stock.move')product_obj=self.pool.get('product.product')currency_obj=self.pool.get('res.currency')users_obj=self.pool.get('res.users')uom_obj=self.pool.get('product.uom')price_type_obj=self.pool.get('product.price.type')sequence_obj=self.pool.get('ir.sequence')wf_service=netsvc.LocalService("workflow")partner_id=partial_datas.get('partner_id',False)address_id=partial_datas.get('address_id',False)delivery_date=partial_datas.get('delivery_date',False)forpickinself.browse(cr,uid,ids,context=context):new_picking=Nonenew_moves=[]complete,too_many,too_few=[],[],[]move_product_qty={}formoveinpick.move_lines:ifmove.statein('done','cancel'):continuepartial_data=partial_datas.get('move%s'%(move.id),False)assertpartial_data,_('DonotFoundPartialdataofStockMoveLine:%s'%(move.id))product_qty=partial_data.get('product_qty',0.0)move_product_qty[move.id]=product_qtyproduct_uom=partial_data.get('product_uom',False)product_price=partial_data.get('product_price',0.0)product_currency=partial_data.get('product_currency',False)prodlot_id=partial_data.get('prodlot_id',False)ifmove.product_qty==product_qty:complete.append(move)elifmove.product_qty>product_qty:too_few.append(move)else:too_many.append(move)
| 469,102
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
| 469,103
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial picking and moves done. @param partial_datas : Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom @return: Dictionary of values """ if context is None: context = {} else: context = dict(context) res = {} move_obj = self.pool.get('stock.move') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) for pick in self.browse(cr, uid, ids, context=context): new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] move_product_qty = {} for move in pick.move_lines: if move.state in ('done', 'cancel'): continue partial_data = partial_datas.get('move%s'%(move.id), False) assert partial_data, _('Do not Found Partial data of Stock Move Line :%s' %(move.id)) product_qty = partial_data.get('product_qty',0.0) move_product_qty[move.id] = product_qty product_uom = partial_data.get('product_uom',False) product_price = partial_data.get('product_price',0.0) product_currency = partial_data.get('product_currency',False) prodlot_id = partial_data.get('prodlot_id',False) if move.product_qty == product_qty: complete.append(move) elif move.product_qty > product_qty: too_few.append(move) else: too_many.append(move)
| 469,104
|
def _check_product_lot(self, cr, uid, ids): """ Checks whether move is done or not and production lot is assigned to that move. @return: True or False """ for move in self.browse(cr, uid, ids): if move.prodlot_id and move.state == 'done' and (move.prodlot_id.product_id.id != move.product_id.id): return False return True
|
def _check_product_lot(self, cr, uid, ids): """ Checks whether move is done or not and production lot is assigned to that move. @return: True or False """ for move in self.browse(cr, uid, ids): if move.prodlot_id and move.state == 'done' and (move.prodlot_id.product_id.id != move.product_id.id): return False return True
| 469,105
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,106
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,107
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,108
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,109
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,110
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,111
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,112
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
|
def do_partial(self, cr, uid, ids, partial_datas, context=None): """ Makes partial pickings and moves done. @param partial_datas: Dictionary containing details of partial picking like partner_id, address_id, delivery_date, delivery moves with product_id, product_qty, uom """ res = {} picking_obj = self.pool.get('stock.picking') product_obj = self.pool.get('product.product') currency_obj = self.pool.get('res.currency') users_obj = self.pool.get('res.users') uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') sequence_obj = self.pool.get('ir.sequence') wf_service = netsvc.LocalService("workflow") partner_id = partial_datas.get('partner_id', False) address_id = partial_datas.get('address_id', False) delivery_date = partial_datas.get('delivery_date', False) new_moves = []
| 469,113
|
def init(self, cr): tools.drop_view_if_exists(cr, 'report_delivery_products_planned') cr.execute(""" create or replace view report_delivery_products_planned as ( select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'out' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
|
def init(self, cr): tools.drop_view_if_exists(cr, 'report_delivery_products_planned') cr.execute(""" create or replace view report_delivery_products_planned as ( select stock.create_date as date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'out' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
| 469,114
|
def init(self, cr): tools.drop_view_if_exists(cr, 'report_delivery_products_planned') cr.execute(""" create or replace view report_delivery_products_planned as ( select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'out' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
|
def init(self, cr): tools.drop_view_if_exists(cr, 'report_delivery_products_planned') cr.execute(""" create or replace view report_delivery_products_planned as ( select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'out' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
| 469,115
|
def init(self, cr): tools.drop_view_if_exists(cr, 'report_delivery_products_planned') cr.execute(""" create or replace view report_delivery_products_planned as ( select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'out' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
|
def init(self, cr): tools.drop_view_if_exists(cr, 'report_delivery_products_planned') cr.execute(""" create or replace view report_delivery_products_planned as ( select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'out' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
| 469,116
|
def __init__(self, path, parent, context, fil): super(node_file,self).__init__(path, parent,context) self.file_id = fil.id #todo: more info from ir_attachment if fil.file_type and '/' in fil.file_type: self.mimetype = str(fil.file_type) self.create_date = fil.create_date self.write_date = fil.write_date or fil.create_date self.content_length = fil.file_size self.displayname = fil.name # This only propagates the problem to get_data. Better # fix those files to point to the root dir. if fil.parent_id: self.storage_id = fil.parent_id.storage_id.id else: self.storage_id = None
|
def __init__(self, path, parent, context, fil): super(node_file,self).__init__(path, parent,context) self.file_id = fil.id #todo: more info from ir_attachment if fil.file_type and '/' in fil.file_type: self.mimetype = str(fil.file_type) self.create_date = fil.create_date self.write_date = fil.write_date or fil.create_date self.content_length = fil.file_size self.displayname = fil.name # This only propagates the problem to get_data. Better # fix those files to point to the root dir. if fil.parent_id: self.storage_id = fil.parent_id.storage_id.id else: self.storage_id = None
| 469,117
|
def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_dir, None, 'file_id', self.file_id) #'document.webdav.dir.property', 'dir_id', self.dir_id)
|
def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_file, None, 'file_id', self.file_id) #'document.webdav.dir.property', 'dir_id', self.dir_id)
| 469,118
|
def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_dir, 'document.webdav.dir.property', 'dir_id', False)
|
def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_database, 'document.webdav.dir.property', 'dir_id', False)
| 469,119
|
def get_dav_eprop(self, cr, ns, prop): return self._get_dav_eprop_hlpr(cr, nodes.node_dir, ns, prop, 'document.webdav.dir.property', 'dir_id', False)
|
def get_dav_eprop(self, cr, ns, prop): return self._get_dav_eprop_hlpr(cr, nodes.node_database, ns, prop, 'document.webdav.dir.property', 'dir_id', False)
| 469,120
|
def literal_eval(node_or_string): """literal_eval(expression) -> value Safely evaluate an expression node or a string containing a Python expression. The string or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None.
|
defdef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) literal_eval(node_or_string):def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) """literal_eval(expression)def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) ->def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) valuedef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) Safelydef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) evaluatedef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) andef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) expressiondef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) nodedef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) ordef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) adef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) stringdef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) containingdef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) adef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) Pythondef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) expression.def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) Thedef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) stringdef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) ordef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) nodedef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) provideddef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) maydef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) onlydef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) consistdef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) ofdef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) thedef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) followingdef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) Pythondef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) literaldef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) structures:def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) strings,def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) numbers,def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) tuples,def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) lists,def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) dicts,def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) booleans,def _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) anddef _import(name, globals={}, locals={}, fromlist=[], level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) None.
| 469,121
|
def _set_previous_todo(self, cr, uid, state): """ lookup the previous (which is still the next at this point) ir.actions.todo, set it to whatever state was provided.
|
def _set_previous_todo(self, cr, uid, state): """ lookup the previous (which is still the next at this point) ir.actions.todo, set it to whatever state was provided.
| 469,122
|
def next(self, cr, uid, ids, context=None): """ Returns the next action to execute execute (using the default sort order) """ return self._next(cr, uid)
|
def next(self, cr, uid, ids, context=None): """ Returns the next todo action to execute (using the default sort order) """ return self._next(cr, uid)
| 469,123
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
| 469,124
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
| 469,125
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
| 469,126
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
| 469,127
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) if not domain: domain = [] domain2 = domain + [('calendar_collection','=', False)] res = super(node_database, self)._child_get(cr, name=name, parent_id=parent_id, domain=domain2) where = [('parent_id','=',parent_id)] domain2 = domain + [('calendar_collection','=', True)] if name: where.append(('name','=',name)) if domain2: where += domain2
| 469,128
|
def get_dav_props(self, cr): return self.PROPS
|
def get_dav_props(self, cr): return self.PROPS
| 469,129
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None
| 469,130
|
def _file_get(self,cr, nodename=False): return []
|
def _file_get(self,cr, nodename=False): return []
| 469,131
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
| 469,132
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
| 469,133
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
| 469,134
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [('collection_id','=',self.dir_id)] ext = False if name: res = name.split('.ics') if len(res) > 1: name = res[0] ext = '.ics' if name: where.append(('name','=',name)) if not domain: domain = [] where = where + domain fil_obj = dirobj.pool.get('basic.calendar') ids = fil_obj.search(cr,uid,where,context=ctx) res = [] for calender in fil_obj.browse(cr, uid, ids, context=ctx): if not ext: res.append(node_calendar(calender.name, self, self.context, calender)) else: res.append(res_node_calendar(name, self, self.context, calender)) return res
| 469,135
|
def _get_dav_owner(self, cr): return False
|
def _get_dav_owner(self, cr): return False
| 469,136
|
def _get_dav_getctag(self, cr): result = self.get_etag(cr) return str(result)
|
def _get_dav_getctag(self, cr): result = self.get_etag(cr) return str(result)
| 469,137
|
def _get_dav_getctag(self, cr): result = self.get_etag(cr) return str(result)
|
def _get_dav_getctag(self, cr): result = self.get_etag(cr) return str(result)
| 469,138
|
def _get_dav_getctag(self, cr): result = self.get_etag(cr) return str(result)
|
def _get_dav_getctag(self, cr): result = self.get_etag(cr) return str(result)
| 469,139
|
def _get_dav_getctag(self, cr): result = self._get_ttag(cr) + ':' + str(time.time()) return str(result)
|
def _get_dav_getctag(self, cr): result = self._get_ttag(cr) + ':' + str(time.time()) return str(result)
| 469,140
|
def _get_dav_getctag(self, cr): result = self._get_ttag(cr) + ':' + str(time.time()) return str(result)
|
def _get_dav_getctag(self, cr): result = self._get_ttag(cr) + ':' + str(time.time()) return str(result)
| 469,141
|
def match_dav_eprop(self, cr, match, ns, prop): if ns == "DAV:" and prop == "getetag": dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() tem, dav_time = tuple(match.split(':')) model, res_id = tuple(tem.split('_')) model_obj = dirobj.pool.get(model) model = model_obj.browse(cr, uid, res_id, context=ctx) write_time = model.write_date or model.create_date wtime = time.mktime(time.strptime(write_time,'%Y-%m-%d %H:%M:%S')) if float(dav_time) == float(wtime): return True return False res = super(node_calendar, self).match_dav_eprop(cr, match, ns, prop) return res
|
def match_dav_eprop(self, cr, match, ns, prop): if ns == "DAV:" and prop == "getetag": dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() tem, dav_time = tuple(match.split(':')) model, res_id = tuple(tem.split('_')) model_obj = dirobj.pool.get(model) model = model_obj.browse(cr, uid, res_id, context=ctx) write_time = model.write_date or model.create_date wtime = time.mktime(time.strptime(write_time,'%Y-%m-%d %H:%M:%S')) if float(dav_time) == float(wtime): return True return False res = super(node_calendar, self).match_dav_eprop(cr, match, ns, prop) return res
| 469,142
|
def match_dav_eprop(self, cr, match, ns, prop): if ns == "DAV:" and prop == "getetag": dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() tem, dav_time = tuple(match.split(':')) model, res_id = tuple(tem.split('_')) model_obj = dirobj.pool.get(model) model = model_obj.browse(cr, uid, res_id, context=ctx) write_time = model.write_date or model.create_date wtime = time.mktime(time.strptime(write_time,'%Y-%m-%d %H:%M:%S')) if float(dav_time) == float(wtime): return True return False res = super(node_calendar, self).match_dav_eprop(cr, match, ns, prop) return res
|
def match_dav_eprop(self, cr, match, ns, prop): if ns == "DAV:" and prop == "getetag": dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() tem, dav_time = tuple(match.split(':')) model, res_id = tuple(tem.split('_')) model_obj = dirobj.pool.get(model) model = model_obj.browse(cr, uid, res_id, context=ctx) write_time = model.write_date or model.create_date wtime = time.mktime(time.strptime(write_time,'%Y-%m-%d %H:%M:%S')) if float(dav_time) == float(wtime): return True return False res = super(node_calendar, self).match_dav_eprop(cr, match, ns, prop) return res
| 469,143
|
def match_dav_eprop(self, cr, match, ns, prop): if ns == "DAV:" and prop == "getetag": dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() tem, dav_time = tuple(match.split(':')) model, res_id = tuple(tem.split('_')) model_obj = dirobj.pool.get(model) model = model_obj.browse(cr, uid, res_id, context=ctx) write_time = model.write_date or model.create_date wtime = time.mktime(time.strptime(write_time,'%Y-%m-%d %H:%M:%S')) if float(dav_time) == float(wtime): return True return False res = super(node_calendar, self).match_dav_eprop(cr, match, ns, prop) return res
|
def match_dav_eprop(self, cr, match, ns, prop): if ns == "DAV:" and prop == "getetag": dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() tem, dav_time = tuple(match.split(':')) model, res_id = tuple(tem.split('_')) model_obj = dirobj.pool.get(model) model = model_obj.browse(cr, uid, res_id, context=ctx) write_time = model.write_date or model.create_date wtime = time.mktime(time.strptime(write_time,'%Y-%m-%d %H:%M:%S')) if float(dav_time) == float(wtime): return True return False res = super(node_calendar, self).match_dav_eprop(cr, match, ns, prop) return res
| 469,144
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
| 469,145
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
| 469,146
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
| 469,147
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
| 469,148
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
| 469,149
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
| 469,150
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
|
def get_domain(self, cr, filters): res = [] dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = dirobj.pool.get('basic.calendar') if not filters: return res if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'filter': for vcalendar_filter in filter_child.childNodes: if vcalendar_filter.nodeType == vcalendar_filter.TEXT_NODE: continue if vcalendar_filter.localName == 'comp-filter': if vcalendar_filter.getAttribute('name') == 'VCALENDAR': for vevent_filter in vcalendar_filter.childNodes: if vevent_filter.nodeType == vevent_filter.TEXT_NODE: continue if vevent_filter.localName == 'comp-filter': if vevent_filter.getAttribute('name') == 'VEVENT': res = [('type','=','vevent')] if vevent_filter.getAttribute('name') == 'VTODO': res = [('type','=','vtodo')] return res elif filters.localName == 'calendar-multiget': names = [] for filter_child in filters.childNodes: if filter_child.nodeType == filter_child.TEXT_NODE: continue if filter_child.localName == 'href': if not filter_child.firstChild: continue uri = filter_child.firstChild.data caluri = uri.split('/') if len(caluri): caluri = caluri[-2] if caluri not in names : names.append(caluri) res = [('name','in',names)] return res return res
| 469,151
|
def child(self,cr, name, domain=None): res = self._child_get(cr, name, domain=domain) if res: return res[0] return None
|
def child(self,cr, name, domain=None): res = self._child_get(cr, name, domain=domain) if res: return res[0] return None
| 469,152
|
def child(self,cr, name, domain=None): res = self._child_get(cr, name, domain=domain) if res: return res[0] return None
|
def child(self,cr, name, domain=None): res = self._child_get(cr, name, domain=domain) if res: return res[0] return None
| 469,153
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [] if name: where.append(('id','=',int(name))) if not domain: domain = [] #for opr1, opt, opr2 in domain: # if opr1 == 'type' and opr2 != self.cal_type: # return []
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [] if name: where.append(('id','=',int(name))) if not domain: domain = [] #for opr1, opt, opr2 in domain: # if opr1 == 'type' and opr2 != self.cal_type: # return []
| 469,154
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [] if name: where.append(('id','=',int(name))) if not domain: domain = [] #for opr1, opt, opr2 in domain: # if opr1 == 'type' and opr2 != self.cal_type: # return []
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [] if name: where.append(('id','=',int(name))) if not domain: domain = [] #for opr1, opt, opr2 in domain: # if opr1 == 'type' and opr2 != self.cal_type: # return []
| 469,155
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [] if name: where.append(('id','=',int(name))) if not domain: domain = [] #for opr1, opt, opr2 in domain: # if opr1 == 'type' and opr2 != self.cal_type: # return []
|
def _child_get(self, cr, name=False, parent_id=False, domain=None): dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) where = [] if name: where.append(('id','=',int(name))) if not domain: domain = [] #for opr1, opt, opr2 in domain: # if opr1 == 'type' and opr2 != self.cal_type: # return []
| 469,156
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
| 469,157
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
| 469,158
|
def create_child(self,cr,path,data): """ API function to create a child file object and node Return the node_* created """ return self.set_data(cr, data)
|
def create_child(self,cr,path,data): """ API function to create a child file object and node Return the node_* created """ return self.set_data(cr, data)
| 469,159
|
def get_data_len(self, cr, fil_obj = None): return self.content_length
|
def get_data_len(self, cr, fil_obj = None): return self.content_length
| 469,160
|
def get_data_len(self, cr, fil_obj = None): return self.content_length
|
def get_data_len(self, cr, fil_obj = None): return self.content_length
| 469,161
|
def _get_ttag(self,cr): return 'calendar-%d' % (self.calendar_id,)
|
def _get_ttag(self,cr): return 'calendar-%d' % (self.calendar_id,)
| 469,162
|
def _get_wtag(self, cr): """ Return the modification time as a unique, compact string """ if self.write_date: wtime = time.mktime(time.strptime(self.write_date, '%Y-%m-%d %H:%M:%S')) else: wtime = time.time() return str(wtime)
|
def _get_wtag(self, cr): """ Return the modification time as a unique, compact string """ if self.write_date: wtime = time.mktime(time.strptime(self.write_date, '%Y-%m-%d %H:%M:%S')) else: wtime = time.time() return str(wtime)
| 469,163
|
def rmcol(self, cr): return False
|
def rmcol(self, cr): return False
| 469,164
|
def rmcol(self, cr): return False
|
def rmcol(self, cr): return False
| 469,165
|
def __init__(self,path, parent, context, res_obj, res_model=None, res_id=None): super(res_node_calendar,self).__init__(path, parent, context) self.mimetype = 'text/calendar' self.create_date = parent.create_date self.write_date = parent.write_date or parent.create_date self.calendar_id = hasattr(parent, 'calendar_id') and parent.calendar_id or False if res_obj: if not self.calendar_id: self.calendar_id = res_obj.id self.create_date = res_obj.create_date self.write_date = res_obj.write_date or res_obj.create_date self.displayname = res_obj.name
|
def __init__(self,path, parent, context, res_obj, res_model=None, res_id=None): super(res_node_calendar,self).__init__(path, parent, context) self.mimetype = 'text/calendar' self.create_date = parent.create_date self.write_date = parent.write_date or parent.create_date self.calendar_id = hasattr(parent, 'calendar_id') and parent.calendar_id or False if res_obj: if not self.calendar_id: self.calendar_id = res_obj.id self.create_date = res_obj.create_date self.write_date = res_obj.write_date or res_obj.create_date self.displayname = res_obj.name
| 469,166
|
def __init__(self,path, parent, context, res_obj, res_model=None, res_id=None): super(res_node_calendar,self).__init__(path, parent, context) self.mimetype = 'text/calendar' self.create_date = parent.create_date self.write_date = parent.write_date or parent.create_date self.calendar_id = hasattr(parent, 'calendar_id') and parent.calendar_id or False if res_obj: if not self.calendar_id: self.calendar_id = res_obj.id self.create_date = res_obj.create_date self.write_date = res_obj.write_date or res_obj.create_date self.displayname = res_obj.name
|
def __init__(self,path, parent, context, res_obj, res_model=None, res_id=None): super(res_node_calendar,self).__init__(path, parent, context) self.mimetype = 'text/calendar' self.create_date = parent.create_date self.write_date = parent.write_date or parent.create_date self.calendar_id = hasattr(parent, 'calendar_id') and parent.calendar_id or False if res_obj: if not self.calendar_id: self.calendar_id = res_obj.id self.create_date = res_obj.create_date self.write_date = res_obj.write_date or res_obj.create_date self.displayname = res_obj.name
| 469,167
|
def __init__(self,path, parent, context, res_obj, res_model=None, res_id=None): super(res_node_calendar,self).__init__(path, parent, context) self.mimetype = 'text/calendar' self.create_date = parent.create_date self.write_date = parent.write_date or parent.create_date self.calendar_id = hasattr(parent, 'calendar_id') and parent.calendar_id or False if res_obj: if not self.calendar_id: self.calendar_id = res_obj.id self.create_date = res_obj.create_date self.write_date = res_obj.write_date or res_obj.create_date self.displayname = res_obj.name
|
def __init__(self,path, parent, context, res_obj, res_model=None, res_id=None): super(res_node_calendar,self).__init__(path, parent, context) self.mimetype = 'text/calendar' self.create_date = parent.create_date self.write_date = parent.write_date or parent.create_date self.calendar_id = hasattr(parent, 'calendar_id') and parent.calendar_id or False if res_obj: if not self.calendar_id: self.calendar_id = res_obj.id self.create_date = res_obj.create_date self.write_date = res_obj.write_date or res_obj.create_date self.displayname = res_obj.name
| 469,168
|
def open(self, cr, mode=False): uid = self.context.uid if self.type in ('collection','database'): return False s = StringIO.StringIO(self.get_data(cr)) s.name = self return s
|
def open(self, cr, mode=False): uid = self.context.uid if self.type in ('collection','database'): return False s = StringIO.StringIO(self.get_data(cr)) s.name = self return s
| 469,169
|
def open(self, cr, mode=False): uid = self.context.uid if self.type in ('collection','database'): return False s = StringIO.StringIO(self.get_data(cr)) s.name = self return s
|
def open(self, cr, mode=False): uid = self.context.uid if self.type in ('collection','database'): return False s = StringIO.StringIO(self.get_data(cr)) s.name = self return s
| 469,170
|
def open(self, cr, mode=False): uid = self.context.uid if self.type in ('collection','database'): return False s = StringIO.StringIO(self.get_data(cr)) s.name = self return s
|
def open(self, cr, mode=False): uid = self.context.uid if self.type in ('collection','database'): return False s = StringIO.StringIO(self.get_data(cr)) s.name = self return s
| 469,171
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
| 469,172
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
| 469,173
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
|
def get_dav_eprop(self,cr, ns, propname): if self.M_NS.has_key(ns): prefix = self.M_NS[ns] else: print "No namespace:",ns, "( for prop:", propname,")" return None propname = propname.replace('-','_') mname = prefix + "_" + propname if not hasattr(self, mname): return None
| 469,174
|
def get_data(self, cr, fil_obj = None): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') context = self.context.context.copy() context.update({'model': self.model, 'res_id':self.res_id}) res = calendar_obj.export_cal(cr, uid, [self.calendar_id], context=context) return res
|
def get_data(self, cr, fil_obj = None): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') context = self.context.context.copy() context.update({'model': self.model, 'res_id':self.res_id}) res = calendar_obj.export_cal(cr, uid, [self.calendar_id], context=context) return res
| 469,175
|
def get_data(self, cr, fil_obj = None): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') context = self.context.context.copy() context.update({'model': self.model, 'res_id':self.res_id}) res = calendar_obj.export_cal(cr, uid, [self.calendar_id], context=context) return res
|
def get_data(self, cr, fil_obj = None): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') context = self.context.context.copy() context.update({'model': self.model, 'res_id':self.res_id}) res = calendar_obj.export_cal(cr, uid, [self.calendar_id], context=context) return res
| 469,176
|
def _get_ttag(self,cr): res = False if self.model and self.res_id: res = '%s_%d' % (self.model, self.res_id) elif self.calendar_id: res = '%d' % (self.calendar_id) return res
|
def _get_ttag(self,cr): res = False if self.model and self.res_id: res = '%s_%d' % (self.model, self.res_id) elif self.calendar_id: res = '%d' % (self.calendar_id) return res
| 469,177
|
def _get_caldav_calendar_data(self, cr): return self.get_data(cr)
|
def _get_caldav_calendar_data(self, cr): return self.get_data(cr)
| 469,178
|
def _get_caldav_calendar_description(self, cr): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') ctx = self.context.context.copy() ctx.update(self.dctx) calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) return calendar.description
|
def _get_caldav_calendar_description(self, cr): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') ctx = self.context.context.copy() ctx.update(self.dctx) calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) return calendar.description
| 469,179
|
def _get_caldav_calendar_description(self, cr): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') ctx = self.context.context.copy() ctx.update(self.dctx) calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) return calendar.description
|
def _get_caldav_calendar_description(self, cr): uid = self.context.uid calendar_obj = self.context._dirobj.pool.get('basic.calendar') ctx = self.context.context.copy() ctx.update(self.dctx) calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) return calendar.description
| 469,180
|
def _get_caldav_calendar_home_set(self, cr): import xml.dom.minidom import urllib uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) doc = xml.dom.minidom.getDOMImplementation().createDocument(None, 'href', None) calendar_obj = self.context._dirobj.pool.get('basic.calendar') calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) huri = doc.createTextNode(urllib.quote('/%s/%s' % (cr.dbname, calendar.collection_id.name))) href = doc.documentElement href.tagName = 'D:href' href.appendChild(huri) return href
|
def _get_caldav_calendar_home_set(self, cr): import xml.dom.minidom import urllib uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) doc = xml.dom.minidom.getDOMImplementation().createDocument(None, 'href', None) calendar_obj = self.context._dirobj.pool.get('basic.calendar') calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) huri = doc.createTextNode(urllib.quote('/%s/%s' % (cr.dbname, calendar.collection_id.name))) href = doc.documentElement href.tagName = 'D:href' href.appendChild(huri) return href
| 469,181
|
def _get_caldav_calendar_user_address_set(self, cr): import xml.dom.minidom dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) user_obj = self.context._dirobj.pool.get('res.users') user = user_obj.browse(cr, uid, uid, context=ctx) doc = xml.dom.minidom.getDOMImplementation().createDocument(None, 'href', None) href = doc.documentElement href.tagName = 'D:href' huri = doc.createTextNode('MAILTO:' + user.email) href.appendChild(huri) return href
|
def _get_caldav_calendar_user_address_set(self, cr): import xml.dom.minidom dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) user_obj = self.context._dirobj.pool.get('res.users') user = user_obj.browse(cr, uid, uid, context=ctx) doc = xml.dom.minidom.getDOMImplementation().createDocument(None, 'href', None) href = doc.documentElement href.tagName = 'D:href' huri = doc.createTextNode('MAILTO:' + user.email) href.appendChild(huri) return href
| 469,182
|
def _get_caldav_schedule_inbox_URL(self, cr): import xml.dom.minidom import urllib uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = self.context._dirobj.pool.get('basic.calendar') calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) res = '%s/%s' %(calendar.name, calendar.collection_id.name) doc = xml.dom.minidom.getDOMImplementation().createDocument(None, 'href', None) href = doc.documentElement href.tagName = 'D:href' huri = doc.createTextNode(urllib.quote('/%s/%s' % (cr.dbname, res))) href.appendChild(huri) return href
|
def _get_caldav_schedule_inbox_URL(self, cr): import xml.dom.minidom import urllib uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) calendar_obj = self.context._dirobj.pool.get('basic.calendar') calendar = calendar_obj.browse(cr, uid, self.calendar_id, context=ctx) res = '%s/%s' %(calendar.name, calendar.collection_id.name) doc = xml.dom.minidom.getDOMImplementation().createDocument(None, 'href', None) href = doc.documentElement href.tagName = 'D:href' huri = doc.createTextNode(urllib.quote('/%s/%s' % (cr.dbname, res))) href.appendChild(huri) return href
| 469,183
|
def rm(self, cr): uid = self.context.uid res = False if self.type in ('collection','database'): return False if self.model and self.res_id: document_obj = self.context._dirobj.pool.get(self.model) if document_obj: res = False #res = document_obj.unlink(cr, uid, [self.res_id]) #TOFIX return res
|
def rm(self, cr): uid = self.context.uid res = False if self.type in ('collection','database'): return False if self.model and self.res_id: document_obj = self.context._dirobj.pool.get(self.model) if document_obj: res = False #res = document_obj.unlink(cr, uid, [self.res_id]) #TOFIX return res
| 469,184
|
def rm(self, cr): uid = self.context.uid res = False if self.type in ('collection','database'): return False if self.model and self.res_id: document_obj = self.context._dirobj.pool.get(self.model) if document_obj: res = False #res = document_obj.unlink(cr, uid, [self.res_id]) #TOFIX return res
|
def rm(self, cr): uid = self.context.uid res = False if self.type in ('collection','database'): return False if self.model and self.res_id: document_obj = self.context._dirobj.pool.get(self.model) if document_obj: res = False #res = document_obj.unlink(cr, uid, [self.res_id]) #TOFIX return res
| 469,185
|
def _get_caldav_schedule_outbox_URL(self, cr): return self._get_caldav_schedule_inbox_URL(cr)
|
def _get_caldav_schedule_outbox_URL(self, cr): return self._get_caldav_schedule_inbox_URL(cr)
| 469,186
|
def action_move_create(self, cr, uid, ids, *args): """Creates invoice related analytics and financial move lines""" ait_obj = self.pool.get('account.invoice.tax') cur_obj = self.pool.get('res.currency') context = {} for inv in self.browse(cr, uid, ids): if not inv.journal_id.sequence_id: raise osv.except_osv(_('Error !'), _('Please define sequence on invoice journal')) if not inv.invoice_line: raise osv.except_osv(_('No Invoice Lines !'), _('Please create some invoice lines.')) if inv.move_id: continue
|
def action_move_create(self, cr, uid, ids, *args): """Creates invoice related analytics and financial move lines""" ait_obj = self.pool.get('account.invoice.tax') cur_obj = self.pool.get('res.currency') context = {} for inv in self.browse(cr, uid, ids): if not inv.journal_id.sequence_id: raise osv.except_osv(_('Error !'), _('Please define sequence on invoice journal')) if not inv.invoice_line: raise osv.except_osv(_('No Invoice Lines !'), _('Please create some invoice lines.')) if inv.move_id: continue
| 469,187
|
def get_fiscalyear(self,form): print "formmmmmmmmmm", form return pooler.get_pool(self.cr.dbname).get('account.fiscalyear').browse(self.cr,self.uid,form['fiscalyear_id']).name
|
def get_fiscalyear(self,form): return pooler.get_pool(self.cr.dbname).get('account.fiscalyear').browse(self.cr,self.uid,form['fiscalyear_id']).name
| 469,188
|
def get_start_period(self, form): return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name
|
def get_start_period(self, form): if form['filter'] == 'filter_period': if form['period_from']: return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name return ''
| 469,189
|
def get_end_period(self, form): return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name
|
def get_end_period(self, form): return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name
| 469,190
|
def write(self, cr, uid, ids, vals, context=None): obj=[] if 'company_id' in vals: move_lines = self.pool.get('account.move.line').search(cr, uid, [('journal_id', 'in', ids)]) if move_lines: raise osv.except_osv(_('Warning !'), _('You cannot modify company of this journal as its related record exist in Entry Lines')) return super(account_journal, self).write(cr, uid, ids, vals, context=context)
|
def write(self, cr, uid, ids, vals, context=None): if 'company_id' in vals: move_lines = self.pool.get('account.move.line').search(cr, uid, [('journal_id', 'in', ids)]) if move_lines: raise osv.except_osv(_('Warning !'), _('You cannot modify company of this journal as its related record exist in Entry Lines')) return super(account_journal, self).write(cr, uid, ids, vals, context=context)
| 469,191
|
def create_sequence(self, cr, uid, vals, context=None): """ Create new entry sequence for every new Joural @param cr: cursor to database @param user: id of current user @param ids: list of record ids to be process @param context: context arguments, like lang, time zone @return: return a result """
|
def create_sequence(self, cr, uid, vals, context=None): """ Create new entry sequence for every new Joural @param cr: cursor to database @param user: id of current user @param ids: list of record ids to be process @param context: context arguments, like lang, time zone @return: return a result """
| 469,192
|
def create_sequence(self, cr, uid, vals, context=None): """ Create new entry sequence for every new Joural @param cr: cursor to database @param user: id of current user @param ids: list of record ids to be process @param context: context arguments, like lang, time zone @return: return a result """
|
def create_sequence(self, cr, uid, vals, context=None): """ Create new entry sequence for every new Joural @param cr: cursor to database @param user: id of current user @param ids: list of record ids to be process @param context: context arguments, like lang, time zone @return: return a result """
| 469,193
|
def write(self, cr, uid, ids, vals, context={}): obj=[] if 'company_id' in vals: move_lines = self.pool.get('account.move.line').search(cr, uid, [('period_id', 'in', ids)]) if move_lines: raise osv.except_osv(_('Warning !'), _('You cannot modify company of this period as its related record exist in Entry Lines')) return super(account_period, self).write(cr, uid, ids, vals, context=context)
|
def write(self, cr, uid, ids, vals, context={}): if 'company_id' in vals: move_lines = self.pool.get('account.move.line').search(cr, uid, [('period_id', 'in', ids)]) if move_lines: raise osv.except_osv(_('Warning !'), _('You cannot modify company of this period as its related record exist in Entry Lines')) return super(account_period, self).write(cr, uid, ids, vals, context=context)
| 469,194
|
def action_create(self,cr,uid,ids,context=None): acc_obj = self.pool.get('account.account') tmpl_obj = self.pool.get('account.account.template') data = self.read(cr, uid, ids) company_id = acc_obj.read(cr, uid, [data[0]['cparent_id']], ['company_id'])[0]['company_id'][0] account_template = tmpl_obj.browse(cr, uid, context['tmpl_ids']) vals = { 'name': account_template.name, 'currency_id': account_template.currency_id and account_template.currency_id.id or False, 'code': account_template.code, 'type': account_template.type, 'user_type': account_template.user_type and account_template.user_type.id or False, 'reconcile': account_template.reconcile, 'shortcut': account_template.shortcut, 'note': account_template.note, 'parent_id': data[0]['cparent_id'], 'company_id': company_id, } new_account = acc_obj.create(cr, uid, vals) return {'type':'state', 'state': 'end' }
|
def action_create(self,cr,uid,ids,context=None): acc_obj = self.pool.get('account.account') tmpl_obj = self.pool.get('account.account.template') data = self.read(cr, uid, ids) company_id = acc_obj.read(cr, uid, [data[0]['cparent_id']], ['company_id'])[0]['company_id'][0] account_template = tmpl_obj.browse(cr, uid, context['tmpl_ids']) vals = { 'name': account_template.name, 'currency_id': account_template.currency_id and account_template.currency_id.id or False, 'code': account_template.code, 'type': account_template.type, 'user_type': account_template.user_type and account_template.user_type.id or False, 'reconcile': account_template.reconcile, 'shortcut': account_template.shortcut, 'note': account_template.note, 'parent_id': data[0]['cparent_id'], 'company_id': company_id, } acc_obj.create(cr, uid, vals) return {'type':'state', 'state': 'end' }
| 469,195
|
def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') analytic_journal_obj = self.pool.get('account.analytic.journal')
|
def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') analytic_journal_obj = self.pool.get('account.analytic.journal')
| 469,196
|
def update_from_db(self, cr): # update the graph with values from the database (if exist) ## First, we set the default values for each package in graph additional_data = dict.fromkeys(self.keys(), {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None}) ## Then we get the values from the database cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version' ' FROM ir_module_module' ' WHERE name IN %s',(tuple(additional_data),) )
|
def update_from_db(self, cr): # update the graph with values from the database (if exist) ## First, we set the default values for each package in graph additional_data = dict.fromkeys(self.keys(), {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None}) ## Then we get the values from the database cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version' 'FROM ir_module_module' ' WHERE name IN %s',(tuple(additional_data),) )
| 469,197
|
def upgrade_graph(graph, cr, module_list, force=None): if force is None: force = [] packages = [] len_graph = len(graph) for module in module_list: mod_path = get_module_path(module) terp_file = get_module_resource(module, '__openerp__.py') if not terp_file or not os.path.isfile(terp_file): terp_file = get_module_resource(module, '__terp__.py') if not mod_path or not terp_file: global not_loaded not_loaded.append(module) logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: not installable' % (module)) continue #raise osv.osv.except_osv('Error!',"Module '%s' was not found" % (module,)) if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path+'.zip'): try: info = eval(tools.file_open(terp_file).read()) except: logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: eval file %s' % (module, terp_file)) raise if info.get('installable', True): packages.append((module, info.get('depends', []), info)) dependencies = dict([(p, deps) for p, deps, data in packages]) current, later = set([p for p, dep, data in packages]), set() while packages and current > later: package, deps, data = packages[0] # if all dependencies of 'package' are already in the graph, add 'package' in the graph if reduce(lambda x, y: x and y in graph, deps, True): if not package in current: packages.pop(0) continue later.clear() current.remove(package) graph.addNode(package, deps) node = Node(package, graph) node.data = data for kind in ('init', 'demo', 'update'): if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force: setattr(node, kind, True) else: later.add(package) packages.append((package, deps, data)) packages.pop(0) graph.update_from_db(cr) for package in later: unmet_deps = filter(lambda p: p not in graph, dependencies[package]) logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps))) result = len(graph) - len_graph if result != len(module_list): logger.notifyChannel('init', netsvc.LOG_WARNING, 'Not all modules have loaded.') return result
|
def upgrade_graph(graph, cr, module_list, force=None): if force is None: force = [] packages = [] len_graph = len(graph) for module in module_list: mod_path = get_module_path(module) terp_file = get_module_resource(module, '__openerp__.py') if not terp_file or not os.path.isfile(terp_file): terp_file = get_module_resource(module, '__terp__.py') if not mod_path or not terp_file: logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: not found, skipped' % (module)) continue #raise osv.osv.except_osv('Error!',"Module '%s' was not found" % (module,)) if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path+'.zip'): try: info = eval(tools.file_open(terp_file).read()) except: logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: eval file %s' % (module, terp_file)) raise if info.get('installable', True): packages.append((module, info.get('depends', []), info)) dependencies = dict([(p, deps) for p, deps, data in packages]) current, later = set([p for p, dep, data in packages]), set() while packages and current > later: package, deps, data = packages[0] # if all dependencies of 'package' are already in the graph, add 'package' in the graph if reduce(lambda x, y: x and y in graph, deps, True): if not package in current: packages.pop(0) continue later.clear() current.remove(package) graph.addNode(package, deps) node = Node(package, graph) node.data = data for kind in ('init', 'demo', 'update'): if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force: setattr(node, kind, True) else: later.add(package) packages.append((package, deps, data)) packages.pop(0) graph.update_from_db(cr) for package in later: unmet_deps = filter(lambda p: p not in graph, dependencies[package]) logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps))) result = len(graph) - len_graph if result != len(module_list): logger.notifyChannel('init', netsvc.LOG_WARNING, 'Not all modules have loaded.') return result
| 469,198
|
def upgrade_graph(graph, cr, module_list, force=None): if force is None: force = [] packages = [] len_graph = len(graph) for module in module_list: mod_path = get_module_path(module) terp_file = get_module_resource(module, '__openerp__.py') if not terp_file or not os.path.isfile(terp_file): terp_file = get_module_resource(module, '__terp__.py') if not mod_path or not terp_file: global not_loaded not_loaded.append(module) logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: not installable' % (module)) continue #raise osv.osv.except_osv('Error!',"Module '%s' was not found" % (module,)) if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path+'.zip'): try: info = eval(tools.file_open(terp_file).read()) except: logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: eval file %s' % (module, terp_file)) raise if info.get('installable', True): packages.append((module, info.get('depends', []), info)) dependencies = dict([(p, deps) for p, deps, data in packages]) current, later = set([p for p, dep, data in packages]), set() while packages and current > later: package, deps, data = packages[0] # if all dependencies of 'package' are already in the graph, add 'package' in the graph if reduce(lambda x, y: x and y in graph, deps, True): if not package in current: packages.pop(0) continue later.clear() current.remove(package) graph.addNode(package, deps) node = Node(package, graph) node.data = data for kind in ('init', 'demo', 'update'): if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force: setattr(node, kind, True) else: later.add(package) packages.append((package, deps, data)) packages.pop(0) graph.update_from_db(cr) for package in later: unmet_deps = filter(lambda p: p not in graph, dependencies[package]) logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps))) result = len(graph) - len_graph if result != len(module_list): logger.notifyChannel('init', netsvc.LOG_WARNING, 'Not all modules have loaded.') return result
|
def upgrade_graph(graph, cr, module_list, force=None): if force is None: force = [] packages = [] len_graph = len(graph) for module in module_list: mod_path = get_module_path(module) terp_file = get_module_resource(module, '__openerp__.py') if not terp_file or not os.path.isfile(terp_file): terp_file = get_module_resource(module, '__terp__.py')if not mod_path or not terp_file: global not_loaded not_loaded.append(module) logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: not installable' % (module)) continue #raise osv.osv.except_osv('Error!',"Module '%s' was not found" % (module,)) if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path+'.zip'): try: info = eval(tools.file_open(terp_file).read()) except: logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: eval file %s' % (module, terp_file)) raise if info.get('installable', True): packages.append((module, info.get('depends', []), info)) dependencies = dict([(p, deps) for p, deps, data in packages]) current, later = set([p for p, dep, data in packages]), set()while packages and current > later: package, deps, data = packages[0]# if all dependencies of 'package' are already in the graph, add 'package' in the graph if reduce(lambda x, y: x and y in graph, deps, True): if not package in current: packages.pop(0) continue later.clear() current.remove(package) graph.addNode(package, deps) node = Node(package, graph) node.data = data for kind in ('init', 'demo', 'update'): if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force: setattr(node, kind, True) else: later.add(package) packages.append((package, deps, data)) packages.pop(0)graph.update_from_db(cr)for package in later: unmet_deps = filter(lambda p: p not in graph, dependencies[package]) logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps)))result = len(graph) - len_graph if result != len(module_list): logger.notifyChannel('init', netsvc.LOG_WARNING, 'Not all modules have loaded.') return result
| 469,199
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.