-
-
Notifications
You must be signed in to change notification settings - Fork 100
Introduce caching of configured live accounts in each test process #3298
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,8 @@ | ||
[mypy] | ||
|
||
[mypy-py.*] | ||
ignore_missing_imports = True | ||
|
||
[mypy-deltachat.capi.*] | ||
ignore_missing_imports = True | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -13,6 +13,7 @@ | |
|
||
import pytest | ||
import requests | ||
import pathlib | ||
|
||
from . import Account, const, account_hookimpl, get_core_info | ||
from .events import FFIEventLogger, FFIEventTracker | ||
|
@@ -178,6 +179,48 @@ def get_liveconfig_producer(self): | |
yield config | ||
pytest.fail("more than {} live accounts requested.".format(MAX_LIVE_CREATED_ACCOUNTS)) | ||
|
||
def cache_maybe_retrieve_configured_db_files(self, cache_addr, db_target_path): | ||
db_target_path = pathlib.Path(db_target_path) | ||
assert not db_target_path.exists() | ||
|
||
try: | ||
filescache = self._addr2files[cache_addr] | ||
except KeyError: | ||
print("CACHE FAIL for", cache_addr) | ||
link2xt marked this conversation as resolved.
Show resolved
Hide resolved
|
||
return False | ||
else: | ||
print("CACHE HIT for", cache_addr) | ||
hpk42 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
targetdir = db_target_path.parent | ||
write_dict_to_dir(filescache, targetdir) | ||
return True | ||
|
||
def cache_maybe_store_configured_db_files(self, acc): | ||
addr = acc.get_config("addr") | ||
assert acc.is_configured() | ||
# don't overwrite existing entries | ||
if addr not in self._addr2files: | ||
print("storing cache for", addr) | ||
basedir = pathlib.Path(acc.get_blobdir()).parent | ||
self._addr2files[addr] = create_dict_from_files_in_path(basedir) | ||
return True | ||
|
||
|
||
def create_dict_from_files_in_path(base): | ||
cachedict = {} | ||
for path in base.glob("**/*"): | ||
if path.is_file(): | ||
cachedict[path.relative_to(base)] = path.read_bytes() | ||
return cachedict | ||
|
||
|
||
def write_dict_to_dir(dic, target_dir): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why not just store a single file with JSON or pickle? This is like 1 lines instead of going through files in a directory:
And why store to files at all if nothing is persisted between pytest runs? Each time temporary directory is new and all stored dictionaries are lost. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There is no persistence for the caching, on purpose, and to keep it simple. Each testprocess just maintains an in-process cache so we don't need to care for outdated accounts (CI email accounts are valid for one hour). |
||
assert dic | ||
for relpath, content in dic.items(): | ||
path = target_dir.joinpath(relpath) | ||
if not path.parent.exists(): | ||
os.makedirs(path.parent) | ||
path.write_bytes(content) | ||
|
||
|
||
@pytest.fixture | ||
def data(request): | ||
|
@@ -225,6 +268,15 @@ def __init__(self, testprocess, init_time): | |
self.testprocess = testprocess | ||
self.init_time = init_time | ||
|
||
def log(self, *args): | ||
print("[acsetup]", "{:.3f}".format(time.time() - self.init_time), *args) | ||
|
||
def add_configured(self, account): | ||
""" add an already configured account. """ | ||
assert account.is_configured() | ||
self._account2state[account] = self.CONFIGURED | ||
self.log("added already configured account", account, account.get_config("addr")) | ||
|
||
def start_configure(self, account, reconfigure=False): | ||
""" add an account and start its configure process. """ | ||
class PendingTracker: | ||
|
@@ -235,7 +287,7 @@ def ac_configure_completed(this, success): | |
account.add_account_plugin(PendingTracker(), name="pending_tracker") | ||
self._account2state[account] = self.CONFIGURING | ||
account.configure(reconfigure=reconfigure) | ||
print("started configure on pending", account) | ||
self.log("started configure on", account) | ||
|
||
def wait_one_configured(self, account): | ||
""" wait until this account has successfully configured. """ | ||
|
@@ -328,6 +380,9 @@ def __init__(self, request, testprocess, tmpdir, data) -> None: | |
self.set_logging_default(False) | ||
request.addfinalizer(self.finalize) | ||
|
||
def log(self, *args): | ||
print("[acfactory]", "{:.3f}".format(time.time() - self.init_time), *args) | ||
|
||
def finalize(self): | ||
while self._finalizers: | ||
fin = self._finalizers.pop() | ||
|
@@ -359,9 +414,19 @@ def get_next_liveconfig(self): | |
assert "addr" in configdict and "mail_pw" in configdict | ||
return configdict | ||
|
||
def _get_cached_account(self, addr): | ||
if addr in self.testprocess._addr2files: | ||
return self._getaccount(addr) | ||
|
||
def get_unconfigured_account(self): | ||
return self._getaccount() | ||
|
||
def _getaccount(self, try_cache_addr=None): | ||
logid = "ac{}".format(len(self._accounts) + 1) | ||
path = self.tmpdir.join(logid) | ||
# we need to use fixed database basename for maybe_cache_* functions to work | ||
path = self.tmpdir.mkdir(logid).join("dc.db") | ||
if try_cache_addr: | ||
self.testprocess.cache_maybe_retrieve_configured_db_files(try_cache_addr, path) | ||
ac = Account(path.strpath, logging=self._logging) | ||
ac._logid = logid # later instantiated FFIEventLogger needs this | ||
ac._evtracker = ac.add_account_plugin(FFIEventTracker(ac)) | ||
|
@@ -394,7 +459,7 @@ def _preconfigure_key(self, account, addr): | |
def get_pseudo_configured_account(self): | ||
# do a pseudo-configured account | ||
ac = self.get_unconfigured_account() | ||
acname = os.path.basename(ac.db_path) | ||
acname = ac._logid | ||
addr = "{}@offline.org".format(acname) | ||
ac.update_config(dict( | ||
addr=addr, displayname=acname, mail_pw="123", | ||
|
@@ -405,7 +470,7 @@ def get_pseudo_configured_account(self): | |
self._acsetup.init_logging(ac) | ||
return ac | ||
|
||
def new_online_configuring_account(self, cloned_from=None, **kwargs): | ||
def new_online_configuring_account(self, cloned_from=None, cache=False, **kwargs): | ||
if cloned_from is None: | ||
configdict = self.get_next_liveconfig() | ||
else: | ||
|
@@ -415,6 +480,12 @@ def new_online_configuring_account(self, cloned_from=None, **kwargs): | |
mail_pw=cloned_from.get_config("mail_pw"), | ||
) | ||
configdict.update(kwargs) | ||
ac = self._get_cached_account(addr=configdict["addr"]) if cache else None | ||
if ac is not None: | ||
# make sure we consume a preconfig key, as if we had created a fresh account | ||
self._preconfigured_keys.pop(0) | ||
self._acsetup.add_configured(ac) | ||
return ac | ||
ac = self.prepare_account_from_liveconfig(configdict) | ||
self._acsetup.start_configure(ac) | ||
return ac | ||
|
@@ -439,9 +510,11 @@ def bring_accounts_online(self): | |
print("all accounts online") | ||
|
||
def get_online_accounts(self, num): | ||
# to reduce number of log events logging starts after accounts can receive | ||
accounts = [self.new_online_configuring_account() for i in range(num)] | ||
accounts = [self.new_online_configuring_account(cache=True) for i in range(num)] | ||
self.bring_accounts_online() | ||
# we cache fully configured and started accounts | ||
for acc in accounts: | ||
self.testprocess.cache_maybe_store_configured_db_files(acc) | ||
return accounts | ||
|
||
def run_bot_process(self, module, ffi=True): | ||
|
Uh oh!
There was an error while loading. Please reload this page.