fix bugs and add normalization feataure
This commit is contained in:
parent
ffd0db1ecd
commit
2f328ff193
@ -1,5 +1,6 @@
|
|||||||
from odoo import api, fields, models, _
|
from odoo import api, fields, models, _
|
||||||
from odoo.exceptions import UserError, ValidationError
|
from odoo.exceptions import UserError, ValidationError
|
||||||
|
from odoo.tools import float_is_zero, float_compare
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
@ -549,4 +550,169 @@ class StockInventoryBackdateLine(models.Model):
|
|||||||
# Invalidate cache
|
# Invalidate cache
|
||||||
self.env.cache.invalidate()
|
self.env.cache.invalidate()
|
||||||
|
|
||||||
|
# Ghost Value Fix: Propagate consumption to subsequent sales
|
||||||
|
# Note: We pass the backdate because move.date might still be cached as 'now' until full reload
|
||||||
|
self._propagate_consumption(move, backdate)
|
||||||
|
|
||||||
return move
|
return move
|
||||||
|
|
||||||
|
def _propagate_consumption(self, move, backdate_datetime):
|
||||||
|
"""
|
||||||
|
Ghost Value Fix:
|
||||||
|
Simulate 'FIFO' consumption for the newly backdated layer against subsequent outgoing moves.
|
||||||
|
If we retroactively add stock to Jan 1, but we sold stock on Jan 2,
|
||||||
|
that sale should have consumed this (cheaper/older) stock depending on FIFO/AVCO.
|
||||||
|
|
||||||
|
This logic creates 'Correction SVLs' to expense the value of the backdated layer
|
||||||
|
that corresponds to quantities subsequently sold.
|
||||||
|
"""
|
||||||
|
if move.product_id.categ_id.property_cost_method not in ['average', 'fifo']:
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1. Identify the new Layer
|
||||||
|
new_svl = self.env['stock.valuation.layer'].search([
|
||||||
|
('stock_move_id', '=', move.id)
|
||||||
|
], limit=1)
|
||||||
|
|
||||||
|
if not new_svl:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if we have quantity to propagate
|
||||||
|
# We generally only propagate INCOMING adjustments (Positive Qty) that have remaining qty
|
||||||
|
# But for robustness (and user request), we ensure float comparison
|
||||||
|
if float_compare(new_svl.quantity, 0, precision_rounding=move.product_uom.rounding) <= 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
if float_compare(new_svl.remaining_qty, 0, precision_rounding=move.product_uom.rounding) <= 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
# 2. Find Subsequent Outgoing Moves (The "Missed" Sales)
|
||||||
|
# We look for outgoing SVLs (qty < 0) created AFTER the backdate
|
||||||
|
outgoing_svls = self.env['stock.valuation.layer'].search([
|
||||||
|
('product_id', '=', move.product_id.id),
|
||||||
|
('company_id', '=', move.company_id.id),
|
||||||
|
('quantity', '<', 0), # Outgoing
|
||||||
|
('create_date', '>', backdate_datetime),
|
||||||
|
], order='create_date asc, id asc')
|
||||||
|
|
||||||
|
if not outgoing_svls:
|
||||||
|
return
|
||||||
|
|
||||||
|
# 3. Consumption Loop
|
||||||
|
qty_to_consume_total = 0.0
|
||||||
|
|
||||||
|
for out_layer in outgoing_svls:
|
||||||
|
if new_svl.remaining_qty <= 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
# How much can this sale consume from our new layer?
|
||||||
|
# It can consume whole sale qty, limited by what we have in the new layer.
|
||||||
|
sale_qty = abs(out_layer.quantity)
|
||||||
|
|
||||||
|
# Logic: In strict FIFO, this sale `out_layer` MIGHT have already consumed
|
||||||
|
# some OLDER layer if it wasn't empty.
|
||||||
|
# But the whole point of "Ghost Value" is that we assume the user wanted this Backdated Stock
|
||||||
|
# to be available "Back Then".
|
||||||
|
# So effectively, we are injecting this layer into the past.
|
||||||
|
# Ideally, we should re-run the WHOLE fifo queue. That's too risky/complex.
|
||||||
|
# Approximate Logic: "Any subsequent sale is a candidate to consume this 'unexpected' old stock".
|
||||||
|
|
||||||
|
consume_qty = min(new_svl.remaining_qty, sale_qty)
|
||||||
|
|
||||||
|
if consume_qty <= 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Calculate Value to Expense
|
||||||
|
# We expense based on OUR layer's unit cost (since we are consuming OUR layer)
|
||||||
|
unit_val = new_svl.value / new_svl.quantity if new_svl.quantity else 0
|
||||||
|
expense_value = consume_qty * unit_val
|
||||||
|
|
||||||
|
# Rounding
|
||||||
|
expense_value = move.company_id.currency_id.round(expense_value)
|
||||||
|
|
||||||
|
if float_is_zero(expense_value, precision_rounding=move.company_id.currency_id.rounding):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --- ACTION: REDUCE OUR LAYER ---
|
||||||
|
# We update SQL directly to avoid ORM side effects / recomputes
|
||||||
|
# Reduce remaining_qty and remaining_value
|
||||||
|
# Note: We need to fetch latest state of new_svl inside loop if we modify it?
|
||||||
|
# Yes, new_svl.remaining_qty is simple float in memory, we update it manually here to track loop.
|
||||||
|
|
||||||
|
new_svl.remaining_qty -= consume_qty
|
||||||
|
new_svl.remaining_value -= expense_value
|
||||||
|
|
||||||
|
# Commit this reduction to DB immediately so it sticks
|
||||||
|
self.env.cr.execute(
|
||||||
|
"UPDATE stock_valuation_layer SET remaining_qty = remaining_qty - %s, remaining_value = remaining_value - %s WHERE id = %s",
|
||||||
|
(consume_qty, expense_value, new_svl.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
# --- ACTION: CREATE Expense Entry (Correction) ---
|
||||||
|
# Credit Asset (we just reduced remaining_value, effectively saying "It's gone")
|
||||||
|
# Debit COGS (Expense it)
|
||||||
|
|
||||||
|
stock_val_acc = move.product_id.categ_id.property_stock_valuation_account_id.id
|
||||||
|
cogs_acc = move.product_id.categ_id.property_stock_account_output_categ_id.id
|
||||||
|
|
||||||
|
if not stock_val_acc or not cogs_acc:
|
||||||
|
continue
|
||||||
|
|
||||||
|
move_lines = [
|
||||||
|
(0, 0, {
|
||||||
|
'name': _('Backdate Correction for %s') % out_layer.stock_move_id.name,
|
||||||
|
'account_id': cogs_acc,
|
||||||
|
'debit': expense_value,
|
||||||
|
'credit': 0,
|
||||||
|
'product_id': move.product_id.id,
|
||||||
|
}),
|
||||||
|
(0, 0, {
|
||||||
|
'name': _('Backdate Correction for %s') % out_layer.stock_move_id.name,
|
||||||
|
'account_id': stock_val_acc,
|
||||||
|
'debit': 0,
|
||||||
|
'credit': expense_value,
|
||||||
|
'product_id': move.product_id.id,
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
|
||||||
|
am_vals = {
|
||||||
|
'ref': f"{move.name} - Consumed by {out_layer.stock_move_id.name}",
|
||||||
|
'date': out_layer.create_date.date(), # Match the SALE date
|
||||||
|
'journal_id': move.account_move_ids[0].journal_id.id if move.account_move_ids else False,
|
||||||
|
# Use same journal as original move adjustment? Or Stock Journal?
|
||||||
|
# Generally Stock Journal.
|
||||||
|
'line_ids': move_lines,
|
||||||
|
'move_type': 'entry',
|
||||||
|
'company_id': move.company_id.id,
|
||||||
|
}
|
||||||
|
# Fallback journal if not found
|
||||||
|
if not am_vals['journal_id']:
|
||||||
|
am_vals['journal_id'] = move.product_id.categ_id.property_stock_journal.id
|
||||||
|
|
||||||
|
am = self.env['account.move'].create(am_vals)
|
||||||
|
am.action_post()
|
||||||
|
|
||||||
|
# Create Correction SVL
|
||||||
|
# Value is negative (Reducing Asset)
|
||||||
|
self.env['stock.valuation.layer'].create({
|
||||||
|
'product_id': move.product_id.id,
|
||||||
|
'value': -expense_value,
|
||||||
|
'quantity': 0,
|
||||||
|
'unit_cost': 0,
|
||||||
|
'remaining_qty': 0,
|
||||||
|
'stock_move_id': out_layer.stock_move_id.id, # Link to sale move
|
||||||
|
'company_id': move.company_id.id,
|
||||||
|
'description': _('Backdate Correction (from %s)') % move.name,
|
||||||
|
'account_move_id': am.id,
|
||||||
|
})
|
||||||
|
# Backdate this correction SVL to match sale date
|
||||||
|
# We don't have the ID easily here as create returns record but separate from SQL.
|
||||||
|
# But standard create sets create_date=Now.
|
||||||
|
# We want it to look like it happened AT SALE TIME.
|
||||||
|
# We can find it via account_move_id
|
||||||
|
self.env.cr.execute(
|
||||||
|
"UPDATE stock_valuation_layer SET create_date = %s WHERE account_move_id = %s",
|
||||||
|
(out_layer.create_date, am.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.info(f"Propagated consumption: Consumed {consume_qty} from Backdate Layer for Sale {out_layer.stock_move_id.name}")
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user