Skip to content

Commit 9b2ec12

Browse files
committed
add other recordHar options
1 parent 62565e0 commit 9b2ec12

File tree

4 files changed

+237
-5
lines changed

4 files changed

+237
-5
lines changed

playwright/_impl/_browser.py

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -226,9 +226,6 @@ async def normalize_context_params(is_sync: bool, params: Dict) -> None:
226226
if "recordHarPath" in params:
227227
recordHar: Dict[str, Any] = {"path": str(params["recordHarPath"])}
228228
params["recordHar"] = recordHar
229-
if "recordHarOmitContent" in params:
230-
params["recordHar"]["omitContent"] = params["recordHarOmitContent"]
231-
del params["recordHarOmitContent"]
232229
if "recordHarUrlFilter" in params:
233230
opt = params["recordHarUrlFilter"]
234231
if isinstance(opt, str):
@@ -237,6 +234,22 @@ async def normalize_context_params(is_sync: bool, params: Dict) -> None:
237234
params["recordHar"]["urlRegexSource"] = opt.pattern
238235
params["recordHar"]["urlRegexFlags"] = escape_regex_flags(opt)
239236
del params["recordHarUrlFilter"]
237+
if "recordHarMode" in params:
238+
params["recordHar"]["mode"] = params["recordHarMode"]
239+
del params["recordHarMode"]
240+
241+
new_content_api = None
242+
old_content_api = None
243+
if "recordHarContent" in params:
244+
new_content_api = params["recordHarContent"]
245+
del params["recordHarContent"]
246+
if "recordHarOmitContent" in params:
247+
old_content_api = params["recordHarOmitContent"]
248+
del params["recordHarOmitContent"]
249+
content = new_content_api or ("omit" if old_content_api else None)
250+
if content:
251+
params["recordHar"]["content"] = content
252+
240253
del params["recordHarPath"]
241254
if "recordVideoDir" in params:
242255
params["recordVideo"] = {"dir": str(params["recordVideoDir"])}

tests/async/test_har.py

Lines changed: 108 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import json
1616
import os
1717
import re
18+
import zipfile
1819

1920
from playwright.async_api import Browser
2021
from tests.server import Server
@@ -32,6 +33,24 @@ async def test_should_work(browser, server, tmpdir):
3233

3334

3435
async def test_should_omit_content(browser, server, tmpdir):
36+
path = os.path.join(tmpdir, "log.har")
37+
context = await browser.new_context(
38+
record_har_path=path,
39+
record_har_content="omit",
40+
)
41+
page = await context.new_page()
42+
await page.goto(server.PREFIX + "/har.html")
43+
await context.close()
44+
with open(path) as f:
45+
data = json.load(f)
46+
assert "log" in data
47+
log = data["log"]
48+
content1 = log["entries"][0]["response"]["content"]
49+
assert "text" not in content1
50+
assert "encoding" not in content1
51+
52+
53+
async def test_should_omit_content_legacy(browser, server, tmpdir):
3554
path = os.path.join(tmpdir, "log.har")
3655
context = await browser.new_context(
3756
record_har_path=path, record_har_omit_content=True
@@ -44,10 +63,67 @@ async def test_should_omit_content(browser, server, tmpdir):
4463
assert "log" in data
4564
log = data["log"]
4665
content1 = log["entries"][0]["response"]["content"]
47-
assert "text" in content1
66+
assert "text" not in content1
4867
assert "encoding" not in content1
4968

5069

70+
async def test_should_attach_content(browser, server, tmpdir, is_firefox):
71+
path = os.path.join(tmpdir, "log.har.zip")
72+
context = await browser.new_context(
73+
record_har_path=path,
74+
record_har_content="attach",
75+
)
76+
page = await context.new_page()
77+
await page.goto(server.PREFIX + "/har.html")
78+
await page.evaluate("() => fetch('/pptr.png').then(r => r.arrayBuffer())")
79+
await context.close()
80+
with zipfile.ZipFile(path) as z:
81+
with z.open("har.har") as har:
82+
entries = json.load(har)["log"]["entries"]
83+
84+
assert "encoding" not in entries[0]["response"]["content"]
85+
assert (
86+
entries[0]["response"]["content"]["mimeType"]
87+
== "text/html; charset=utf-8"
88+
)
89+
assert (
90+
"75841480e2606c03389077304342fac2c58ccb1b"
91+
in entries[0]["response"]["content"]["_file"]
92+
)
93+
assert entries[0]["response"]["content"]["size"] >= 96
94+
assert entries[0]["response"]["content"]["compression"] == 0
95+
96+
assert "encoding" not in entries[1]["response"]["content"]
97+
assert (
98+
entries[1]["response"]["content"]["mimeType"]
99+
== "text/css; charset=utf-8"
100+
)
101+
assert (
102+
"79f739d7bc88e80f55b9891a22bf13a2b4e18adb"
103+
in entries[1]["response"]["content"]["_file"]
104+
)
105+
assert entries[1]["response"]["content"]["size"] >= 37
106+
assert entries[1]["response"]["content"]["compression"] == 0
107+
108+
assert "encoding" not in entries[2]["response"]["content"]
109+
assert entries[2]["response"]["content"]["mimeType"] == "image/png"
110+
assert (
111+
"a4c3a18f0bb83f5d9fe7ce561e065c36205762fa"
112+
in entries[2]["response"]["content"]["_file"]
113+
)
114+
assert entries[2]["response"]["content"]["size"] >= 6000
115+
assert entries[2]["response"]["content"]["compression"] == 0
116+
117+
with z.open("75841480e2606c03389077304342fac2c58ccb1b.html") as f:
118+
assert b"HAR Page" in f.read()
119+
120+
with z.open("79f739d7bc88e80f55b9891a22bf13a2b4e18adb.css") as f:
121+
assert b"pink" in f.read()
122+
123+
with z.open("a4c3a18f0bb83f5d9fe7ce561e065c36205762fa.png") as f:
124+
assert len(f.read()) == entries[2]["response"]["content"]["size"]
125+
126+
51127
async def test_should_not_omit_content(browser, server, tmpdir):
52128
path = os.path.join(tmpdir, "log.har")
53129
context = await browser.new_context(
@@ -78,6 +154,37 @@ async def test_should_include_content(browser, server, tmpdir):
78154
assert "HAR Page" in content1["text"]
79155

80156

157+
async def test_should_default_to_full_mode(browser, server, tmpdir):
158+
path = os.path.join(tmpdir, "log.har")
159+
context = await browser.new_context(
160+
record_har_path=path,
161+
)
162+
page = await context.new_page()
163+
await page.goto(server.PREFIX + "/har.html")
164+
await context.close()
165+
with open(path) as f:
166+
data = json.load(f)
167+
assert "log" in data
168+
log = data["log"]
169+
assert log["entries"][0]["request"]["bodySize"] >= 0
170+
171+
172+
async def test_should_support_minimal_mode(browser, server, tmpdir):
173+
path = os.path.join(tmpdir, "log.har")
174+
context = await browser.new_context(
175+
record_har_path=path,
176+
record_har_mode="minimal",
177+
)
178+
page = await context.new_page()
179+
await page.goto(server.PREFIX + "/har.html")
180+
await context.close()
181+
with open(path) as f:
182+
data = json.load(f)
183+
assert "log" in data
184+
log = data["log"]
185+
assert log["entries"][0]["request"]["bodySize"] == -1
186+
187+
81188
async def test_should_filter_by_glob(
82189
browser: Browser, server: Server, tmpdir: str
83190
) -> None:

tests/server.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,7 @@ def process(self) -> None:
139139
content_type += "; charset=utf-8"
140140
request.setHeader(b"Content-Type", content_type)
141141
request.setHeader(b"Cache-Control", "no-cache, no-store")
142+
request.setHeader(b"Content-Length", str(len(file_content)))
142143
if path in gzip_routes:
143144
request.setHeader("Content-Encoding", "gzip")
144145
request.write(gzip.compress(file_content))

tests/sync/test_har.py

Lines changed: 112 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import json
1616
import os
1717
import re
18+
import zipfile
1819
from pathlib import Path
1920

2021
from playwright.sync_api import Browser
@@ -33,6 +34,24 @@ def test_should_work(browser: Browser, server: Server, tmpdir: Path) -> None:
3334

3435

3536
def test_should_omit_content(browser: Browser, server: Server, tmpdir: Path) -> None:
37+
path = os.path.join(tmpdir, "log.har")
38+
context = browser.new_context(record_har_path=path, record_har_content="omit")
39+
page = context.new_page()
40+
page.goto(server.PREFIX + "/har.html")
41+
context.close()
42+
with open(path) as f:
43+
data = json.load(f)
44+
assert "log" in data
45+
log = data["log"]
46+
47+
content1 = log["entries"][0]["response"]["content"]
48+
assert "text" not in content1
49+
assert "encoding" not in content1
50+
51+
52+
def test_should_omit_content_legacy(
53+
browser: Browser, server: Server, tmpdir: Path
54+
) -> None:
3655
path = os.path.join(tmpdir, "log.har")
3756
context = browser.new_context(record_har_path=path, record_har_omit_content=True)
3857
page = context.new_page()
@@ -44,10 +63,67 @@ def test_should_omit_content(browser: Browser, server: Server, tmpdir: Path) ->
4463
log = data["log"]
4564

4665
content1 = log["entries"][0]["response"]["content"]
47-
assert "text" in content1
66+
assert "text" not in content1
4867
assert "encoding" not in content1
4968

5069

70+
def test_should_attach_content(browser: Browser, server: Server, tmpdir: Path) -> None:
71+
path = os.path.join(tmpdir, "log.har.zip")
72+
context = browser.new_context(
73+
record_har_path=path,
74+
record_har_content="attach",
75+
)
76+
page = context.new_page()
77+
page.goto(server.PREFIX + "/har.html")
78+
page.evaluate("() => fetch('/pptr.png').then(r => r.arrayBuffer())")
79+
context.close()
80+
with zipfile.ZipFile(path) as z:
81+
with z.open("har.har") as har:
82+
entries = json.load(har)["log"]["entries"]
83+
84+
assert "encoding" not in entries[0]["response"]["content"]
85+
assert (
86+
entries[0]["response"]["content"]["mimeType"]
87+
== "text/html; charset=utf-8"
88+
)
89+
assert (
90+
"75841480e2606c03389077304342fac2c58ccb1b"
91+
in entries[0]["response"]["content"]["_file"]
92+
)
93+
assert entries[0]["response"]["content"]["size"] >= 96
94+
assert entries[0]["response"]["content"]["compression"] == 0
95+
96+
assert "encoding" not in entries[1]["response"]["content"]
97+
assert (
98+
entries[1]["response"]["content"]["mimeType"]
99+
== "text/css; charset=utf-8"
100+
)
101+
assert (
102+
"79f739d7bc88e80f55b9891a22bf13a2b4e18adb"
103+
in entries[1]["response"]["content"]["_file"]
104+
)
105+
assert entries[1]["response"]["content"]["size"] >= 37
106+
assert entries[1]["response"]["content"]["compression"] == 0
107+
108+
assert "encoding" not in entries[2]["response"]["content"]
109+
assert entries[2]["response"]["content"]["mimeType"] == "image/png"
110+
assert (
111+
"a4c3a18f0bb83f5d9fe7ce561e065c36205762fa"
112+
in entries[2]["response"]["content"]["_file"]
113+
)
114+
assert entries[2]["response"]["content"]["size"] >= 6000
115+
assert entries[2]["response"]["content"]["compression"] == 0
116+
117+
with z.open("75841480e2606c03389077304342fac2c58ccb1b.html") as f:
118+
assert b"HAR Page" in f.read()
119+
120+
with z.open("79f739d7bc88e80f55b9891a22bf13a2b4e18adb.css") as f:
121+
assert b"pink" in f.read()
122+
123+
with z.open("a4c3a18f0bb83f5d9fe7ce561e065c36205762fa.png") as f:
124+
assert len(f.read()) == entries[2]["response"]["content"]["size"]
125+
126+
51127
def test_should_include_content(browser: Browser, server: Server, tmpdir: Path) -> None:
52128
path = os.path.join(tmpdir, "log.har")
53129
context = browser.new_context(record_har_path=path)
@@ -64,6 +140,41 @@ def test_should_include_content(browser: Browser, server: Server, tmpdir: Path)
64140
assert "HAR Page" in content1["text"]
65141

66142

143+
def test_should_default_to_full_mode(
144+
browser: Browser, server: Server, tmpdir: Path
145+
) -> None:
146+
path = os.path.join(tmpdir, "log.har")
147+
context = browser.new_context(
148+
record_har_path=path,
149+
)
150+
page = context.new_page()
151+
page.goto(server.PREFIX + "/har.html")
152+
context.close()
153+
with open(path) as f:
154+
data = json.load(f)
155+
assert "log" in data
156+
log = data["log"]
157+
assert log["entries"][0]["request"]["bodySize"] >= 0
158+
159+
160+
def test_should_support_minimal_mode(
161+
browser: Browser, server: Server, tmpdir: Path
162+
) -> None:
163+
path = os.path.join(tmpdir, "log.har")
164+
context = browser.new_context(
165+
record_har_path=path,
166+
record_har_mode="minimal",
167+
)
168+
page = context.new_page()
169+
page.goto(server.PREFIX + "/har.html")
170+
context.close()
171+
with open(path) as f:
172+
data = json.load(f)
173+
assert "log" in data
174+
log = data["log"]
175+
assert log["entries"][0]["request"]["bodySize"] == -1
176+
177+
67178
def test_should_filter_by_glob(browser: Browser, server: Server, tmpdir: str) -> None:
68179
path = os.path.join(tmpdir, "log.har")
69180
context = browser.new_context(

0 commit comments

Comments
 (0)