-
Notifications
You must be signed in to change notification settings - Fork 24.6k
/
fields.py
5327 lines (4460 loc) · 228 KB
/
fields.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
""" High-level objects for fields. """
from __future__ import annotations
import ast
import base64
import binascii
import copy
import contextlib
import enum
import itertools
import json
import logging
import typing
import uuid
import warnings
from collections import defaultdict
from datetime import date, datetime, time
from difflib import get_close_matches, unified_diff
from hashlib import sha256
from operator import attrgetter
from xmlrpc.client import MAXINT
import psycopg2
import pytz
from markupsafe import Markup, escape as markup_escape
from psycopg2.extras import Json as PsycopgJson
from .api import ContextType, DomainType, IdType, NewId, ValuesType
from .models import BaseModel, check_property_field_value_name
from .netsvc import ColoredFormatter, GREEN, RED, DEFAULT, COLOR_PATTERN
from .tools import (
float_repr, float_round, float_compare, float_is_zero, human_size,
OrderedSet, sql, SQL, date_utils, unique, lazy_property,
image_process, merge_sequences, is_list_of,
html_normalize, html_sanitize,
DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT,
)
from .tools.sql import pg_varchar
from .tools.mimetypes import guess_mimetype
from .tools.misc import unquote, has_list_types, Sentinel, SENTINEL
from .tools.translate import html_translate
from odoo.exceptions import CacheMiss
from odoo.osv import expression
T = typing.TypeVar("T")
M = typing.TypeVar("M", bound=BaseModel)
DATE_LENGTH = len(date.today().strftime(DATE_FORMAT))
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
# hacky-ish way to prevent access to a field through the ORM (except for sudo mode)
NO_ACCESS='.'
IR_MODELS = (
'ir.model', 'ir.model.data', 'ir.model.fields', 'ir.model.fields.selection',
'ir.model.relation', 'ir.model.constraint', 'ir.module.module',
)
COMPANY_DEPENDENT_FIELDS = (
'char', 'float', 'boolean', 'integer', 'text', 'many2one', 'date', 'datetime', 'selection', 'html'
)
_logger = logging.getLogger(__name__)
_schema = logging.getLogger(__name__[:-7] + '.schema')
NoneType = type(None)
def first(records):
""" Return the first record in ``records``, with the same prefetching. """
return next(iter(records)) if len(records) > 1 else records
def resolve_mro(model, name, predicate):
""" Return the list of successively overridden values of attribute ``name``
in mro order on ``model`` that satisfy ``predicate``. Model registry
classes are ignored.
"""
result = []
for cls in model._model_classes:
value = cls.__dict__.get(name, SENTINEL)
if value is SENTINEL:
continue
if not predicate(value):
break
result.append(value)
return result
def determine(needle, records, *args):
""" Simple helper for calling a method given as a string or a function.
:param needle: callable or name of method to call on ``records``
:param BaseModel records: recordset to call ``needle`` on or with
:params args: additional arguments to pass to the determinant
:returns: the determined value if the determinant is a method name or callable
:raise TypeError: if ``records`` is not a recordset, or ``needle`` is not
a callable or valid method name
"""
if not isinstance(records, BaseModel):
raise TypeError("Determination requires a subject recordset")
if isinstance(needle, str):
needle = getattr(records, needle)
if needle.__name__.find('__'):
return needle(*args)
elif callable(needle):
if needle.__name__.find('__'):
return needle(records, *args)
raise TypeError("Determination requires a callable or method name")
class MetaField(type):
""" Metaclass for field classes. """
by_type = {}
def __init__(cls, name, bases, attrs):
super(MetaField, cls).__init__(name, bases, attrs)
if not hasattr(cls, 'type'):
return
if cls.type and cls.type not in MetaField.by_type:
MetaField.by_type[cls.type] = cls
# compute class attributes to avoid calling dir() on fields
cls.related_attrs = []
cls.description_attrs = []
for attr in dir(cls):
if attr.startswith('_related_'):
cls.related_attrs.append((attr[9:], attr))
elif attr.startswith('_description_'):
cls.description_attrs.append((attr[13:], attr))
_global_seq = iter(itertools.count())
class Field(MetaField('DummyField', (object,), {}), typing.Generic[T]):
"""The field descriptor contains the field definition, and manages accesses
and assignments of the corresponding field on records. The following
attributes may be provided when instantiating a field:
:param str string: the label of the field seen by users; if not
set, the ORM takes the field name in the class (capitalized).
:param str help: the tooltip of the field seen by users
:param bool readonly: whether the field is readonly (default: ``False``)
This only has an impact on the UI. Any field assignation in code will work
(if the field is a stored field or an inversable one).
:param bool required: whether the value of the field is required (default: ``False``)
:param str index: whether the field is indexed in database, and the kind of index.
Note: this has no effect on non-stored and virtual fields.
The possible values are:
* ``"btree"`` or ``True``: standard index, good for many2one
* ``"btree_not_null"``: BTREE index without NULL values (useful when most
values are NULL, or when NULL is never searched for)
* ``"trigram"``: Generalized Inverted Index (GIN) with trigrams (good for full-text search)
* ``None`` or ``False``: no index (default)
:param default: the default value for the field; this is either a static
value, or a function taking a recordset and returning a value; use
``default=None`` to discard default values for the field
:type default: value or callable
:param str groups: comma-separated list of group xml ids (string); this
restricts the field access to the users of the given groups only
:param bool company_dependent: whether the field value is dependent of the current company;
The value is stored on the model table as jsonb dict with the company id as the key.
The field's default values stored in model ir.default are used as fallbacks for
unspecified values in the jsonb dict.
:param bool copy: whether the field value should be copied when the record
is duplicated (default: ``True`` for normal fields, ``False`` for
``one2many`` and computed fields, including property fields and
related fields)
:param bool store: whether the field is stored in database
(default:``True``, ``False`` for computed fields)
:param str aggregator: aggregate function used by :meth:`~odoo.models.Model.read_group`
when grouping on this field.
Supported aggregate functions are:
* ``array_agg`` : values, including nulls, concatenated into an array
* ``count`` : number of rows
* ``count_distinct`` : number of distinct rows
* ``bool_and`` : true if all values are true, otherwise false
* ``bool_or`` : true if at least one value is true, otherwise false
* ``max`` : maximum value of all values
* ``min`` : minimum value of all values
* ``avg`` : the average (arithmetic mean) of all values
* ``sum`` : sum of all values
:param str group_expand: function used to expand read_group results when grouping on
the current field. For selection fields, ``group_expand=True`` automatically
expands groups for all selection keys.
.. code-block:: python
@api.model
def _read_group_selection_field(self, values, domain, order):
return ['choice1', 'choice2', ...] # available selection choices.
@api.model
def _read_group_many2one_field(self, records, domain, order):
return records + self.search([custom_domain])
.. rubric:: Computed Fields
:param str compute: name of a method that computes the field
.. seealso:: :ref:`Advanced Fields/Compute fields <reference/fields/compute>`
:param bool precompute: whether the field should be computed before record insertion
in database. Should be used to specify manually some fields as precompute=True
when the field can be computed before record insertion.
(e.g. avoid statistics fields based on search/read_group), many2one
linking to the previous record, ... (default: `False`)
.. warning::
Precomputation only happens when no explicit value and no default
value is provided to create(). This means that a default value
disables the precomputation, even if the field is specified as
precompute=True.
Precomputing a field can be counterproductive if the records of the
given model are not created in batch. Consider the situation were
many records are created one by one. If the field is not
precomputed, it will normally be computed in batch at the flush(),
and the prefetching mechanism will help making the computation
efficient. On the other hand, if the field is precomputed, the
computation will be made one by one, and will therefore not be able
to take advantage of the prefetching mechanism.
Following the remark above, precomputed fields can be interesting on
the lines of a one2many, which are usually created in batch by the
ORM itself, provided that they are created by writing on the record
that contains them.
:param bool compute_sudo: whether the field should be recomputed as superuser
to bypass access rights (by default ``True`` for stored fields, ``False``
for non stored fields)
:param bool recursive: whether the field has recursive dependencies (the field
``X`` has a dependency like ``parent_id.X``); declaring a field recursive
must be explicit to guarantee that recomputation is correct
:param str inverse: name of a method that inverses the field (optional)
:param str search: name of a method that implement search on the field (optional)
:param str related: sequence of field names
:param bool default_export_compatible: whether the field must be exported by default in an import-compatible export
.. seealso:: :ref:`Advanced fields/Related fields <reference/fields/related>`
"""
type: str # type of the field (string)
relational = False # whether the field is a relational one
translate = False # whether the field is translated
write_sequence = 0 # field ordering for write()
# Database column type (ident, spec) for non-company-dependent fields.
# Company-dependent fields are stored as jsonb (see column_type).
_column_type: typing.Tuple[str, str] | None = None
args = None # the parameters given to __init__()
_module = None # the field's module name
_modules = None # modules that define this field
_setup_done = True # whether the field is completely set up
_sequence = None # absolute ordering of the field
_base_fields = () # the fields defining self, in override order
_extra_keys = () # unknown attributes set on the field
_direct = False # whether self may be used directly (shared)
_toplevel = False # whether self is on the model's registry class
automatic = False # whether the field is automatically created ("magic" field)
inherited = False # whether the field is inherited (_inherits)
inherited_field = None # the corresponding inherited field
name: str # name of the field
model_name: str | None = None # name of the model of this field
comodel_name: str | None = None # name of the model of values (if relational)
store = True # whether the field is stored in database
index = None # how the field is indexed in database
manual = False # whether the field is a custom field
copy = True # whether the field is copied over by BaseModel.copy()
_depends = None # collection of field dependencies
_depends_context = None # collection of context key dependencies
recursive = False # whether self depends on itself
compute = None # compute(recs) computes field on recs
compute_sudo = False # whether field should be recomputed as superuser
precompute = False # whether field has to be computed before creation
inverse = None # inverse(recs) inverses field on recs
search = None # search(recs, operator, value) searches on self
related = None # sequence of field names, for related fields
company_dependent = False # whether ``self`` is company-dependent (property field)
default = None # default(recs) returns the default value
string: str | None = None # field label
export_string_translation = True # whether the field label translations are exported
help: str | None = None # field tooltip
readonly = False # whether the field is readonly
required = False # whether the field is required
groups: str | None = None # csv list of group xml ids
change_default = False # whether the field may trigger a "user-onchange"
related_field = None # corresponding related field
aggregator = None # operator for aggregating values
group_expand = None # name of method to expand groups in read_group()
prefetch = True # the prefetch group (False means no group)
default_export_compatible = False # whether the field must be exported by default in an import-compatible export
exportable = True
def __init__(self, string: str | Sentinel = SENTINEL, **kwargs):
kwargs['string'] = string
self._sequence = next(_global_seq)
self.args = {key: val for key, val in kwargs.items() if val is not SENTINEL}
def __str__(self):
if self.name is None:
return "<%s.%s>" % (__name__, type(self).__name__)
return "%s.%s" % (self.model_name, self.name)
def __repr__(self):
if self.name is None:
return f"{'<%s.%s>'!r}" % (__name__, type(self).__name__)
return f"{'%s.%s'!r}" % (self.model_name, self.name)
############################################################################
#
# Base field setup: things that do not depend on other models/fields
#
# The base field setup is done by field.__set_name__(), which determines the
# field's name, model name, module and its parameters.
#
# The dictionary field.args gives the parameters passed to the field's
# constructor. Most parameters have an attribute of the same name on the
# field. The parameters as attributes are assigned by the field setup.
#
# When several definition classes of the same model redefine a given field,
# the field occurrences are "merged" into one new field instantiated at
# runtime on the registry class of the model. The occurrences of the field
# are given to the new field as the parameter '_base_fields'; it is a list
# of fields in override order (or reverse MRO).
#
# In order to save memory, a field should avoid having field.args and/or
# many attributes when possible. We call "direct" a field that can be set
# up directly from its definition class. Direct fields are non-related
# fields defined on models, and can be shared across registries. We call
# "toplevel" a field that is put on the model's registry class, and is
# therefore specific to the registry.
#
# Toplevel field are set up once, and are no longer set up from scratch
# after that. Those fields can save memory by discarding field.args and
# field._base_fields once set up, because those are no longer necessary.
#
# Non-toplevel non-direct fields are the fields on definition classes that
# may not be shared. In other words, those fields are never used directly,
# and are always recreated as toplevel fields. On those fields, the base
# setup is useless, because only field.args is used for setting up other
# fields. We therefore skip the base setup for those fields. The only
# attributes of those fields are: '_sequence', 'args', 'model_name', 'name'
# and '_module', which makes their __dict__'s size minimal.
def __set_name__(self, owner, name):
""" Perform the base setup of a field.
:param owner: the owner class of the field (the model's definition or registry class)
:param name: the name of the field
"""
assert issubclass(owner, BaseModel)
self.model_name = owner._name
self.name = name
if is_definition_class(owner):
# only for fields on definition classes, not registry classes
self._module = owner._module
owner._field_definitions.append(self)
if not self.args.get('related'):
self._direct = True
if self._direct or self._toplevel:
self._setup_attrs(owner, name)
if self._toplevel:
# free memory, self.args and self._base_fields are no longer useful
self.__dict__.pop('args', None)
self.__dict__.pop('_base_fields', None)
#
# Setup field parameter attributes
#
def _get_attrs(self, model_class, name):
""" Return the field parameter attributes as a dictionary. """
# determine all inherited field attributes
attrs = {}
modules = []
for field in self.args.get('_base_fields', ()):
if not isinstance(self, type(field)):
# 'self' overrides 'field' and their types are not compatible;
# so we ignore all the parameters collected so far
attrs.clear()
modules.clear()
continue
attrs.update(field.args)
if field._module:
modules.append(field._module)
attrs.update(self.args)
if self._module:
modules.append(self._module)
attrs['args'] = self.args
attrs['model_name'] = model_class._name
attrs['name'] = name
attrs['_module'] = modules[-1] if modules else None
attrs['_modules'] = tuple(set(modules))
# initialize ``self`` with ``attrs``
if name == 'state':
# by default, `state` fields should be reset on copy
attrs['copy'] = attrs.get('copy', False)
if attrs.get('compute'):
# by default, computed fields are not stored, computed in superuser
# mode if stored, not copied (unless stored and explicitly not
# readonly), and readonly (unless inversible)
attrs['store'] = store = attrs.get('store', False)
attrs['compute_sudo'] = attrs.get('compute_sudo', store)
if not (attrs['store'] and not attrs.get('readonly', True)):
attrs['copy'] = attrs.get('copy', False)
attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse'))
if attrs.get('related'):
# by default, related fields are not stored, computed in superuser
# mode, not copied and readonly
attrs['store'] = store = attrs.get('store', False)
attrs['compute_sudo'] = attrs.get('compute_sudo', attrs.get('related_sudo', True))
attrs['copy'] = attrs.get('copy', False)
attrs['readonly'] = attrs.get('readonly', True)
if attrs.get('precompute'):
if not attrs.get('compute') and not attrs.get('related'):
warnings.warn(f"precompute attribute doesn't make any sense on non computed field {self}")
attrs['precompute'] = False
elif not attrs.get('store'):
warnings.warn(f"precompute attribute has no impact on non stored field {self}")
attrs['precompute'] = False
if attrs.get('company_dependent'):
if attrs.get('required'):
warnings.warn(f"company_dependent field {self} cannot be required")
if attrs.get('translate'):
warnings.warn(f"company_dependent field {self} cannot be translated")
if self.type not in COMPANY_DEPENDENT_FIELDS:
warnings.warn(f"company_dependent field {self} is not one of the allowed types {COMPANY_DEPENDENT_FIELDS}")
attrs['copy'] = attrs.get('copy', False)
# speed up search and on delete
attrs['index'] = attrs.get('index', 'btree_not_null')
attrs['prefetch'] = attrs.get('prefetch', 'company_dependent')
attrs['_depends_context'] = ('company',)
# parameters 'depends' and 'depends_context' are stored in attributes
# '_depends' and '_depends_context', respectively
if 'depends' in attrs:
attrs['_depends'] = tuple(attrs.pop('depends'))
if 'depends_context' in attrs:
attrs['_depends_context'] = tuple(attrs.pop('depends_context'))
if 'group_operator' in attrs:
warnings.warn("Since Odoo 18, 'group_operator' is deprecated, use 'aggregator' instead", DeprecationWarning, 2)
attrs['aggregator'] = attrs.pop('group_operator')
return attrs
def _setup_attrs(self, model_class, name):
""" Initialize the field parameter attributes. """
attrs = self._get_attrs(model_class, name)
# determine parameters that must be validated
extra_keys = [key for key in attrs if not hasattr(self, key)]
if extra_keys:
attrs['_extra_keys'] = extra_keys
self.__dict__.update(attrs)
# prefetch only stored, column, non-manual fields
if not self.store or not self.column_type or self.manual:
self.prefetch = False
if not self.string and not self.related:
# related fields get their string from their parent field
self.string = (
name[:-4] if name.endswith('_ids') else
name[:-3] if name.endswith('_id') else name
).replace('_', ' ').title()
# self.default must be either None or a callable
if self.default is not None and not callable(self.default):
value = self.default
self.default = lambda model: value
############################################################################
#
# Complete field setup: everything else
#
def prepare_setup(self):
self._setup_done = False
def setup(self, model):
""" Perform the complete setup of a field. """
if not self._setup_done:
# validate field params
for key in self._extra_keys:
if not model._valid_field_parameter(self, key):
_logger.warning(
"Field %s: unknown parameter %r, if this is an actual"
" parameter you may want to override the method"
" _valid_field_parameter on the relevant model in order to"
" allow it",
self, key
)
if self.related:
self.setup_related(model)
else:
self.setup_nonrelated(model)
if not isinstance(self.required, bool):
warnings.warn(f'Property {self}.required should be a boolean ({self.required}).')
if not isinstance(self.readonly, bool):
warnings.warn(f'Property {self}.readonly should be a boolean ({self.readonly}).')
self._setup_done = True
#
# Setup of non-related fields
#
def setup_nonrelated(self, model):
""" Determine the dependencies and inverse field(s) of ``self``. """
pass
def get_depends(self, model: BaseModel):
""" Return the field's dependencies and cache dependencies. """
if self._depends is not None:
# the parameter 'depends' has priority over 'depends' on compute
return self._depends, self._depends_context or ()
if self.related:
if self._depends_context is not None:
depends_context = self._depends_context
else:
related_model = model.env[self.related_field.model_name]
depends, depends_context = self.related_field.get_depends(related_model)
return [self.related], depends_context
if not self.compute:
return (), self._depends_context or ()
# determine the functions implementing self.compute
if isinstance(self.compute, str):
funcs = resolve_mro(model, self.compute, callable)
else:
funcs = [self.compute]
# collect depends and depends_context
depends = []
depends_context = list(self._depends_context or ())
for func in funcs:
deps = getattr(func, '_depends', ())
depends.extend(deps(model) if callable(deps) else deps)
depends_context.extend(getattr(func, '_depends_context', ()))
# display_name may depend on context['lang'] (`test_lp1071710`)
if self.automatic and self.name == 'display_name' and model._rec_name:
if model._fields[model._rec_name].base_field.translate:
if 'lang' not in depends_context:
depends_context.append('lang')
return depends, depends_context
#
# Setup of related fields
#
def setup_related(self, model):
""" Setup the attributes of a related field. """
assert isinstance(self.related, str), self.related
# determine the chain of fields, and make sure they are all set up
model_name = self.model_name
for name in self.related.split('.'):
field = model.pool[model_name]._fields.get(name)
if field is None:
raise KeyError(
f"Field {name} referenced in related field definition {self} does not exist."
)
if not field._setup_done:
field.setup(model.env[model_name])
model_name = field.comodel_name
self.related_field = field
# check type consistency
if self.type != field.type:
raise TypeError("Type of related field %s is inconsistent with %s" % (self, field))
# determine dependencies, compute, inverse, and search
self.compute = self._compute_related
if self.inherited or not (self.readonly or field.readonly):
self.inverse = self._inverse_related
if field._description_searchable:
# allow searching on self only if the related field is searchable
self.search = self._search_related
# A readonly related field without an inverse method should not have a
# default value, as it does not make sense.
if self.default and self.readonly and not self.inverse:
_logger.warning("Redundant default on %s", self)
# copy attributes from field to self (string, help, etc.)
for attr, prop in self.related_attrs:
# check whether 'attr' is explicitly set on self (from its field
# definition), and ignore its class-level value (only a default)
if attr not in self.__dict__ and prop.startswith('_related_'):
setattr(self, attr, getattr(field, prop))
for attr in field._extra_keys:
if not hasattr(self, attr) and model._valid_field_parameter(self, attr):
setattr(self, attr, getattr(field, attr))
# special cases of inherited fields
if self.inherited:
self.inherited_field = field
if field.required:
self.required = True
# add modules from delegate and target fields; the first one ensures
# that inherited fields introduced via an abstract model (_inherits
# being on the abstract model) are assigned an XML id
delegate_field = model._fields[self.related.split('.')[0]]
self._modules = tuple({*self._modules, *delegate_field._modules, *field._modules})
if self.store and self.translate:
_logger.warning("Translated stored related field (%s) will not be computed correctly in all languages", self)
def traverse_related(self, record):
""" Traverse the fields of the related field `self` except for the last
one, and return it as a pair `(last_record, last_field)`. """
for name in self.related.split('.')[:-1]:
record = first(record[name])
return record, self.related_field
def _compute_related(self, records):
""" Compute the related field ``self`` on ``records``. """
#
# Traverse fields one by one for all records, in order to take advantage
# of prefetching for each field access. In order to clarify the impact
# of the algorithm, consider traversing 'foo.bar' for records a1 and a2,
# where 'foo' is already present in cache for a1, a2. Initially, both a1
# and a2 are marked for prefetching. As the commented code below shows,
# traversing all fields one record at a time will fetch 'bar' one record
# at a time.
#
# b1 = a1.foo # mark b1 for prefetching
# v1 = b1.bar # fetch/compute bar for b1
# b2 = a2.foo # mark b2 for prefetching
# v2 = b2.bar # fetch/compute bar for b2
#
# On the other hand, traversing all records one field at a time ensures
# maximal prefetching for each field access.
#
# b1 = a1.foo # mark b1 for prefetching
# b2 = a2.foo # mark b2 for prefetching
# v1 = b1.bar # fetch/compute bar for b1, b2
# v2 = b2.bar # value already in cache
#
# This difference has a major impact on performance, in particular in
# the case where 'bar' is a computed field that takes advantage of batch
# computation.
#
values = list(records)
for name in self.related.split('.')[:-1]:
try:
values = [first(value[name]) for value in values]
except AccessError as e:
description = records.env['ir.model']._get(records._name).name
env = records.env
raise AccessError(env._(
"%(previous_message)s\n\nImplicitly accessed through '%(document_kind)s' (%(document_model)s).",
previous_message=e.args[0],
document_kind=description,
document_model=records._name,
))
# assign final values to records
for record, value in zip(records, values):
record[self.name] = self._process_related(value[self.related_field.name], record.env)
def _process_related(self, value, env):
"""No transformation by default, but allows override."""
return value
def _inverse_related(self, records):
""" Inverse the related field ``self`` on ``records``. """
# store record values, otherwise they may be lost by cache invalidation!
record_value = {record: record[self.name] for record in records}
for record in records:
target, field = self.traverse_related(record)
# update 'target' only if 'record' and 'target' are both real or
# both new (see `test_base_objects.py`, `test_basic`)
if target and bool(target.id) == bool(record.id):
target[field.name] = record_value[record]
def _search_related(self, records, operator, value):
""" Determine the domain to search on field ``self``. """
# This should never happen to avoid bypassing security checks
# and should already be converted to (..., 'in', subquery)
assert operator not in ('any', 'not any')
# determine whether the related field can be null
if isinstance(value, (list, tuple)):
value_is_null = any(val is False or val is None for val in value)
else:
value_is_null = value is False or value is None
can_be_null = ( # (..., '=', False) or (..., 'not in', [truthy vals])
(operator not in expression.NEGATIVE_TERM_OPERATORS and value_is_null)
or (operator in expression.NEGATIVE_TERM_OPERATORS and not value_is_null)
)
def make_domain(path, model):
if '.' not in path:
return [(path, operator, value)]
prefix, suffix = path.split('.', 1)
field = model._fields[prefix]
comodel = model.env[field.comodel_name]
domain = [(prefix, 'in', comodel._search(make_domain(suffix, comodel)))]
if can_be_null and field.type == 'many2one' and not field.required:
return expression.OR([domain, [(prefix, '=', False)]])
return domain
model = records.env[self.model_name].with_context(active_test=False)
model = model.sudo(records.env.su or self.compute_sudo)
return make_domain(self.related, model)
# properties used by setup_related() to copy values from related field
_related_comodel_name = property(attrgetter('comodel_name'))
_related_string = property(attrgetter('string'))
_related_help = property(attrgetter('help'))
_related_groups = property(attrgetter('groups'))
_related_aggregator = property(attrgetter('aggregator'))
@lazy_property
def column_type(self) -> tuple[str, str] | None:
""" Return the actual column type for this field, if stored as a column. """
return ('jsonb', 'jsonb') if self.company_dependent or self.translate else self._column_type
@property
def base_field(self):
""" Return the base field of an inherited field, or ``self``. """
return self.inherited_field.base_field if self.inherited_field else self
#
# Company-dependent fields
#
def get_company_dependent_fallback(self, records):
assert self.company_dependent
fallback = records.env['ir.default']._get_model_defaults(records._name).get(self.name)
fallback = self.convert_to_cache(fallback, records, validate=False)
return self.convert_to_record(fallback, records)
#
# Setup of field triggers
#
def resolve_depends(self, registry):
""" Return the dependencies of `self` as a collection of field tuples. """
Model0 = registry[self.model_name]
for dotnames in registry.field_depends[self]:
field_seq = []
model_name = self.model_name
check_precompute = self.precompute
for index, fname in enumerate(dotnames.split('.')):
Model = registry[model_name]
if Model0._transient and not Model._transient:
# modifying fields on regular models should not trigger
# recomputations of fields on transient models
break
try:
field = Model._fields[fname]
except KeyError:
raise ValueError(
f"Wrong @depends on '{self.compute}' (compute method of field {self}). "
f"Dependency field '{fname}' not found in model {model_name}."
)
if field is self and index and not self.recursive:
self.recursive = True
warnings.warn(f"Field {self} should be declared with recursive=True")
# precomputed fields can depend on non-precomputed ones, as long
# as they are reachable through at least one many2one field
if check_precompute and field.store and field.compute and not field.precompute:
warnings.warn(f"Field {self} cannot be precomputed as it depends on non-precomputed field {field}")
self.precompute = False
if field_seq and not field_seq[-1]._description_searchable:
# the field before this one is not searchable, so there is
# no way to know which on records to recompute self
warnings.warn(
f"Field {field_seq[-1]!r} in dependency of {self} should be searchable. "
f"This is necessary to determine which records to recompute when {field} is modified. "
f"You should either make the field searchable, or simplify the field dependency."
)
field_seq.append(field)
# do not make self trigger itself: for instance, a one2many
# field line_ids with domain [('foo', ...)] will have
# 'line_ids.foo' as a dependency
if not (field is self and not index):
yield tuple(field_seq)
if field.type == 'one2many':
for inv_field in Model.pool.field_inverses[field]:
yield tuple(field_seq) + (inv_field,)
if check_precompute and field.type == 'many2one':
check_precompute = False
model_name = field.comodel_name
############################################################################
#
# Field description
#
def get_description(self, env, attributes=None):
""" Return a dictionary that describes the field ``self``. """
desc = {}
for attr, prop in self.description_attrs:
if attributes is not None and attr not in attributes:
continue
if not prop.startswith('_description_'):
continue
value = getattr(self, prop)
if callable(value):
value = value(env)
if value is not None:
desc[attr] = value
return desc
# properties used by get_description()
_description_name = property(attrgetter('name'))
_description_type = property(attrgetter('type'))
_description_store = property(attrgetter('store'))
_description_manual = property(attrgetter('manual'))
_description_related = property(attrgetter('related'))
_description_company_dependent = property(attrgetter('company_dependent'))
_description_readonly = property(attrgetter('readonly'))
_description_required = property(attrgetter('required'))
_description_groups = property(attrgetter('groups'))
_description_change_default = property(attrgetter('change_default'))
_description_default_export_compatible = property(attrgetter('default_export_compatible'))
_description_exportable = property(attrgetter('exportable'))
def _description_depends(self, env):
return env.registry.field_depends[self]
@property
def _description_searchable(self):
return bool(self.store or self.search)
def _description_sortable(self, env):
if self.column_type and self.store: # shortcut
return True
model = env[self.model_name]
query = model._as_query(ordered=False)
try:
model._order_field_to_sql(model._table, self.name, SQL(), SQL(), query)
return True
except (ValueError, AccessError):
return False
def _description_groupable(self, env):
if self.column_type and self.store: # shortcut
return True
model = env[self.model_name]
query = model._as_query(ordered=False)
groupby = self.name if self.type not in ('date', 'datetime') else f"{self.name}:month"
try:
model._read_group_groupby(groupby, query)
return True
except (ValueError, AccessError):
return False
def _description_aggregator(self, env):
if not self.aggregator or self.column_type and self.store: # shortcut
return self.aggregator
model = env[self.model_name]
query = model._as_query(ordered=False)
try:
model._read_group_select(f"{self.name}:{self.aggregator}", query)
return self.aggregator
except (ValueError, AccessError):
return None
def _description_string(self, env):
if self.string and env.lang:
model_name = self.base_field.model_name
field_string = env['ir.model.fields'].get_field_string(model_name)
return field_string.get(self.name) or self.string
return self.string
def _description_help(self, env):
if self.help and env.lang:
model_name = self.base_field.model_name
field_help = env['ir.model.fields'].get_field_help(model_name)
return field_help.get(self.name) or self.help
return self.help
def is_editable(self):
""" Return whether the field can be editable in a view. """
return not self.readonly
def is_accessible(self, env):
""" Return whether the field is accessible from the given environment. """
if not self.groups or env.is_superuser():
return True
if self.groups == '.':
return False
return env.user.has_groups(self.groups)
############################################################################
#
# Conversion of values
#
def convert_to_column(self, value, record, values=None, validate=True):
""" Convert ``value`` from the ``write`` format to the SQL parameter
format for SQL conditions. This is used to compare a field's value when
the field actually stores multiple values (translated or company-dependent).
"""
if value is None or value is False:
return None
if isinstance(value, str):
return value
elif isinstance(value, bytes):
return value.decode()
else:
return str(value)
def convert_to_column_insert(self, value, record, values=None, validate=True):
""" Convert ``value`` from the ``write`` format to the SQL parameter
format for INSERT queries. This method handles the case of fields that
store multiple values (translated or company-dependent).
"""
value = self.convert_to_column(value, record, values, validate)
if not self.company_dependent:
return value
fallback = record.env['ir.default']._get_model_defaults(record._name).get(self.name)
if value == self.convert_to_column(fallback, record):
return None
return PsycopgJson({record.env.company.id: value})
def convert_to_column_update(self, value, record):
""" Convert ``value`` from the ``to_flush`` format to the SQL parameter
format for UPDATE queries. The ``to_flush`` format is the same as the
cache format, except for translated fields (``{'lang_code': 'value', ...}``
or ``None``) and company-dependent fields (``{company_id: value, ...}``).
"""
if self.company_dependent:
return PsycopgJson(value)
return self.convert_to_column_insert(