Skip to content

Commit 677b743

Browse files
authored
Fix performance of DPF vector (#2249)
1 parent facbfc2 commit 677b743

File tree

5 files changed

+91
-64
lines changed

5 files changed

+91
-64
lines changed

src/ansys/dpf/core/check_version.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,30 +26,31 @@
2626
Used to verify if the server version is a minimum value.
2727
"""
2828

29+
from __future__ import annotations
30+
2931
from functools import wraps
3032
import sys
3133
import weakref
3234

3335
from ansys.dpf.core import errors as dpf_errors
3436

3537

36-
def server_meet_version(required_version, server):
38+
def server_meet_version(required_version, server: BaseServer):
3739
"""Check if a given server version matches with a required version.
3840
3941
Parameters
4042
----------
4143
required_version : str
4244
Required version to compare with the server version.
43-
server : :class:`ansys.dpf.core.server`
45+
server : :class:`ansys.dpf.core.server_types.BaseServer`
4446
DPF server object.
4547
4648
Returns
4749
-------
4850
bool
4951
``True`` when successful, ``False`` when failed.
5052
"""
51-
version = get_server_version(server)
52-
return meets_version(version, required_version)
53+
return server.meet_version(required_version)
5354

5455

5556
def server_meet_version_and_raise(required_version, server, msg=None):

src/ansys/dpf/core/server_types.py

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
import ansys.dpf.core as core
5050
from ansys.dpf.core import __version__, errors, server_context, server_factory
5151
from ansys.dpf.core._version import min_server_version, server_to_ansys_version
52-
from ansys.dpf.core.check_version import server_meet_version
52+
from ansys.dpf.core.check_version import get_server_version, meets_version, server_meet_version
5353
from ansys.dpf.core.server_context import AvailableServerContexts, ServerContext
5454
from ansys.dpf.gate import data_processing_grpcapi, load_api
5555

@@ -444,6 +444,7 @@ def __init__(self):
444444
self._context = None
445445
self._info_instance = None
446446
self._docker_config = server_factory.RunningDockerConfig()
447+
self._server_meet_version = {}
447448

448449
def set_as_global(self, as_global=True):
449450
"""Set the current server as global if necessary.
@@ -642,7 +643,11 @@ def meet_version(self, required_version):
642643
bool
643644
``True`` if the server version meets the requirement.
644645
"""
645-
return server_meet_version(required_version, self)
646+
if required_version not in self._server_meet_version:
647+
meet = meets_version(get_server_version(self), required_version)
648+
self._server_meet_version[required_version] = meet
649+
return meet
650+
return self._server_meet_version[required_version]
646651

647652
@property
648653
@abc.abstractmethod
@@ -1030,6 +1035,8 @@ def config(self):
10301035
class InProcessServer(CServer):
10311036
"""Server using the InProcess communication protocol."""
10321037

1038+
_version: str = None
1039+
10331040
def __init__(
10341041
self,
10351042
ansys_path: Union[str, None] = None,
@@ -1080,14 +1087,16 @@ def version(self):
10801087
version : str
10811088
The version of the InProcess server in the format "major.minor".
10821089
"""
1083-
from ansys.dpf.gate import data_processing_capi, integral_types
1090+
if self._version is None:
1091+
from ansys.dpf.gate import data_processing_capi, integral_types
10841092

1085-
api = data_processing_capi.DataProcessingCAPI
1086-
major = integral_types.MutableInt32()
1087-
minor = integral_types.MutableInt32()
1088-
api.data_processing_get_server_version(major, minor)
1089-
out = str(int(major)) + "." + str(int(minor))
1090-
return out
1093+
api = data_processing_capi.DataProcessingCAPI
1094+
major = integral_types.MutableInt32()
1095+
minor = integral_types.MutableInt32()
1096+
api.data_processing_get_server_version(major, minor)
1097+
out = str(int(major)) + "." + str(int(minor))
1098+
self._version = out
1099+
return self._version
10911100

10921101
@property
10931102
def os(self):

src/ansys/dpf/gate/dpf_array.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ def __new__(
2020
obj = vec.np_array.view(cls)
2121
except NotImplementedError as e:
2222
raise TypeError(e.args)
23-
vec.start_checking_modification()
2423
obj.vec = vec
2524
return obj
2625

src/ansys/dpf/gate/dpf_vector.py

Lines changed: 8 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,18 @@ class DPFVectorBase:
3333

3434
def __init__(self, owner, api):
3535
self.dpf_vector_api = api
36-
self._modified = False
37-
self._check_changes = True
36+
37+
# The updated version of the DPF vector will always be committed to DPF.
38+
# Ideally, this should be set to True only when modified, however this is not possible to do that efficiently.
39+
# Consequently, for performance reasons, it's much better to always commit the vector to DPF rather than
40+
# verifying whether the vector has changed. See issue #2201.
41+
self._modified = True
42+
3843
try:
3944
self._internal_obj = self.dpf_vector_api.dpf_vector_new_for_object(owner)
4045
if not server_meet_version("4.1",
4146
owner._server) and owner._server.client is None: # BUG in 22.2: DpfVector is not holding the data owner and not call to data owner should be done at delete
42-
self._check_changes = False
47+
self._modified = False
4348
except ctypes.ArgumentError:
4449
raise NotImplementedError
4550

@@ -53,35 +58,6 @@ def internal_size(self) -> MutableInt32:
5358
"""
5459
return self._array.internal_size
5560

56-
def start_checking_modification(self) -> None:
57-
"""
58-
Takes a deep copy of the current data as a numpy array
59-
in self._initial_data, if self._check_changes is set to True.
60-
In that case, at deletion, the current data is compared to the initial one
61-
and the data is updated server side if it has changed.
62-
63-
Notes
64-
-----
65-
self._check_changes is set to True by default when a client is added at the class init
66-
67-
"""
68-
if self._check_changes:
69-
self._initial_data = copy.deepcopy(self.np_array)
70-
71-
def has_changed(self):
72-
"""
73-
If self._check_changes is set to True, compares the initial data computed in
74-
```start_checking_modification``` to the current one.
75-
76-
Notes
77-
-----
78-
self._check_changes is set to True by default when a client is added at the class init
79-
"""
80-
if self._check_changes:
81-
if self._modified or not np.allclose(self._initial_data, self.np_array):
82-
self._modified = True
83-
return self._modified
84-
8561
@property
8662
def np_array(self) -> np.ndarray:
8763
"""
@@ -103,21 +79,6 @@ def size(self) -> int:
10379
"""Size of the data array (returns a copy)"""
10480
return int(self.internal_size)
10581

106-
def start_checking_modification(self) -> None:
107-
"""
108-
Takes a deep copy of the current data as a numpy array
109-
in self._initial_data, if self._check_changes is set to True.
110-
In that case, at deletion, the current data is compared to the initial one
111-
and the data is updated server side if it has changed.
112-
113-
Notes
114-
-----
115-
self._check_changes is set to True by default when a client is added at the class init
116-
117-
"""
118-
if self._check_changes:
119-
self._initial_data = copy.deepcopy(self.np_array)
120-
12182
def has_changed(self):
12283
"""
12384
If self._check_changes is set to True, compares the initial data computed in
@@ -127,9 +88,6 @@ def has_changed(self):
12788
-----
12889
self._check_changes is set to True by default when a client is added at the class init
12990
"""
130-
if self._check_changes:
131-
if self._modified or not np.allclose(self._initial_data, self.np_array):
132-
self._modified = True
13391
return self._modified
13492

13593
def __del__(self):

tests/test_dpf_vector.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
# Copyright (C) 2020 - 2025 ANSYS, Inc. and/or its affiliates.
2+
# SPDX-License-Identifier: MIT
3+
#
4+
#
5+
# Permission is hereby granted, free of charge, to any person obtaining a copy
6+
# of this software and associated documentation files (the "Software"), to deal
7+
# in the Software without restriction, including without limitation the rights
8+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9+
# copies of the Software, and to permit persons to whom the Software is
10+
# furnished to do so, subject to the following conditions:
11+
#
12+
# The above copyright notice and this permission notice shall be included in all
13+
# copies or substantial portions of the Software.
14+
#
15+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21+
# SOFTWARE.
22+
23+
import numpy as np
24+
25+
from ansys.dpf import core as dpf
26+
from ansys.dpf.core import fields_factory
27+
28+
29+
def test_perf_vec_setters(server_type):
30+
num_entities = int(1e5)
31+
field = fields_factory.create_scalar_field(
32+
num_entities=num_entities, location=dpf.locations.elemental, server=server_type
33+
)
34+
field.name = "my_field"
35+
field.data = np.zeros(num_entities, dtype=np.int32)
36+
field.scoping.ids = np.zeros(num_entities, dtype=np.int32)
37+
38+
all_indices = np.arange(num_entities)
39+
chunks = np.array_split(all_indices, 200)
40+
41+
for index, chunk in enumerate(chunks):
42+
field.data[chunk] = int(index)
43+
field.scoping.ids[chunk] = chunk
44+
45+
46+
def test_perf_vec_getters(server_type):
47+
num_entities = int(1e5)
48+
field = fields_factory.create_scalar_field(
49+
num_entities=num_entities, location=dpf.locations.elemental, server=server_type
50+
)
51+
field.name = "my_field"
52+
field.data = np.zeros(num_entities, dtype=np.int32)
53+
field.scoping.ids = np.zeros(num_entities, dtype=np.int32)
54+
55+
all_indices = np.arange(num_entities)
56+
chunks = np.array_split(all_indices, 200)
57+
58+
for index, chunk in enumerate(chunks):
59+
d = field.data[chunk]
60+
d = field.scoping.ids[chunk]

0 commit comments

Comments
 (0)