Skip to content

Commit 2d28acb

Browse files
committed
Drop unused code in .ikarus; tests
1 parent 0e8dd0e commit 2d28acb

File tree

2 files changed

+24
-253
lines changed

2 files changed

+24
-253
lines changed

message_ix_models/model/transport/ikarus.py

Lines changed: 22 additions & 161 deletions
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,28 @@
11
"""Prepare non-LDV data from the IKARUS model via :file:`GEAM_TRP_techinput.xlsx`."""
22

33
import logging
4-
from collections import defaultdict
54
from functools import lru_cache, partial
65
from operator import le
7-
from typing import TYPE_CHECKING, Dict
6+
from typing import Dict
87

98
import pandas as pd
109
import xarray as xr
1110
from genno import Computer, Key, KeySeq, Quantity, quote
1211
from genno.core.key import single_key
1312
from iam_units import registry
14-
from message_ix import make_df
1513
from openpyxl import load_workbook
1614

17-
from message_ix_models.model.structure import get_codes
1815
from message_ix_models.util import (
19-
ScenarioInfo,
20-
broadcast,
2116
cached,
2217
convert_units,
2318
make_matched_dfs,
24-
nodes_ex_world,
2519
package_data_path,
2620
same_node,
2721
same_time,
2822
series_of_pint_quantity,
2923
)
3024

3125
from .non_ldv import UNITS
32-
from .util import input_commodity_level
33-
34-
if TYPE_CHECKING:
35-
from .config import Config
3626

3727
log = logging.getLogger(__name__)
3828

@@ -219,7 +209,27 @@ def read_ikarus_data(occupancy, k_output, k_inv_cost):
219209

220210

221211
def prepare_computer(c: Computer):
222-
"""Prepare `c` to perform model data preparation using IKARUS data."""
212+
"""Prepare `c` to perform model data preparation using IKARUS data.
213+
214+
====================================================================================
215+
216+
The data is read from from ``GEAM_TRP_techinput.xlsx``, and the processed data is
217+
exported into ``non_LDV_techs_wrapped.csv``.
218+
219+
.. note:: superseded by the computations set up by :func:`prepare_computer`.
220+
221+
Parameters
222+
----------
223+
context : .Context
224+
225+
Returns
226+
-------
227+
data : dict of (str -> pandas.DataFrame)
228+
Keys are MESSAGE parameter names such as 'input', 'fix_cost'.
229+
Values are data frames ready for :meth:`~.Scenario.add_par`.
230+
Years in the data include the model horizon indicated by
231+
:attr:`.Config.base_model_info`, plus the additional year 2010.
232+
"""
223233
# TODO identify whether capacity_factor is needed
224234
c.configure(rename_dims={"source": "source"})
225235

@@ -337,152 +347,3 @@ def prepare_computer(c: Computer):
337347
# .non_ldv.prepare_computer() only if IKARUS is the selected data source for non-LDV
338348
# data. Other derived quantities (emissions factors) are also prepared there based
339349
# on these outputs.
340-
341-
342-
def get_ikarus_data(context) -> Dict[str, pd.DataFrame]:
343-
"""Prepare non-LDV data from :cite:`Martinsen2006`.
344-
345-
The data is read from from ``GEAM_TRP_techinput.xlsx``, and the processed data is
346-
exported into ``non_LDV_techs_wrapped.csv``.
347-
348-
.. note:: superseded by the computations set up by :func:`prepare_computer`.
349-
350-
Parameters
351-
----------
352-
context : .Context
353-
354-
Returns
355-
-------
356-
data : dict of (str -> pandas.DataFrame)
357-
Keys are MESSAGE parameter names such as 'input', 'fix_cost'.
358-
Values are data frames ready for :meth:`~.Scenario.add_par`.
359-
Years in the data include the model horizon indicated by
360-
:attr:`.Config.base_model_info`, plus the additional year 2010.
361-
"""
362-
# Reference to the transport configuration
363-
config: "Config" = context.transport
364-
tech_info = config.spec.add.set["technology"]
365-
info = config.base_model_info
366-
367-
# Merge with base model commodity information for io_units() below
368-
# TODO this duplicates code in .ldv; move to a common location
369-
all_info = ScenarioInfo()
370-
all_info.set["commodity"].extend(get_codes("commodity"))
371-
all_info.update(config.spec.add)
372-
373-
# Retrieve the data from the spreadsheet. Use additional output efficiency and
374-
# investment cost factors for some bus technologies
375-
data = read_ikarus_data(
376-
occupancy=config.non_ldv_output, # type: ignore [attr-defined]
377-
k_output=config.efficiency["bus output"],
378-
k_inv_cost=config.cost["bus inv"],
379-
)
380-
381-
# Create data frames to add imported params to MESSAGEix
382-
383-
# Vintage and active years from scenario info
384-
# Prepend years between 2010 and *firstmodelyear* so that values are saved
385-
missing_years = [x for x in info.set["year"] if (2010 <= x < info.y0)]
386-
vtg_years = missing_years + info.yv_ya["year_vtg"].tolist()
387-
act_years = missing_years + info.yv_ya["year_act"].tolist()
388-
389-
# Default values to be used as args in make_df()
390-
defaults = dict(
391-
mode="all",
392-
year_act=act_years,
393-
year_vtg=vtg_years,
394-
time="year",
395-
time_origin="year",
396-
time_dest="year",
397-
)
398-
399-
# Dict of ('parameter name' -> [list of data frames])
400-
dfs = defaultdict(list)
401-
402-
# Iterate over each parameter and technology
403-
for (par, tec), group_data in data.groupby(["param", "technology"]):
404-
# Dict including the default values to be used as args in make_df()
405-
args = defaults.copy()
406-
args["technology"] = tec
407-
408-
# Parameter-specific arguments/processing
409-
if par == "input":
410-
pass # Handled by input_commodity_level(), below
411-
elif par == "output":
412-
# Get the mode for a technology
413-
mode = tech_info[tech_info.index(tec)].parent.id
414-
args.update(dict(commodity=f"transport pax {mode.lower()}", level="useful"))
415-
416-
# Units, as an abbreviated string
417-
_units = group_data.apply(lambda x: x.units).unique()
418-
assert len(_units) == 1, "Units must be unique per (tec, par)"
419-
units = _units[0]
420-
args["unit"] = f"{units:~}"
421-
422-
# Create data frame with values from *args*
423-
df = make_df(par, **args)
424-
425-
# Assign input commodity and level according to the technology
426-
if par == "input":
427-
df = input_commodity_level(context, df, default_level="final")
428-
429-
# Copy data into the 'value' column, by vintage year
430-
for (year, *_), value in group_data.items():
431-
df.loc[df["year_vtg"] == year, "value"] = value.magnitude
432-
433-
# Drop duplicates. For parameters with 'year_vtg' but no 'year_act' dimension,
434-
# the same year_vtg appears multiple times because of the contents of *defaults*
435-
df.drop_duplicates(inplace=True)
436-
437-
# Fill remaining values for the rest of vintage years with the last value
438-
# registered, in this case for 2030.
439-
df["value"] = df["value"].fillna(method="ffill")
440-
441-
# Convert to the model's preferred input/output units for each commodity
442-
if par in ("input", "output"):
443-
target_units = df.apply(
444-
lambda row: all_info.io_units(
445-
row["technology"], row["commodity"], row["level"]
446-
),
447-
axis=1,
448-
).unique()
449-
assert 1 == len(target_units)
450-
else:
451-
target_units = []
452-
453-
if len(target_units):
454-
# FIXME improve convert_units() to handle more of these steps
455-
df["value"] = convert_units(
456-
df["value"], {"value": (1.0, units, target_units[0])}
457-
)
458-
df["unit"] = f"{target_units[0]:~}"
459-
460-
# Round up technical_lifetime values due to incompatibility in handling
461-
# non-integer values in the GAMS code
462-
if par == "technical_lifetime":
463-
df["value"] = df["value"].round()
464-
465-
# Broadcast across all nodes
466-
dfs[par].append(
467-
df.pipe(broadcast, node_loc=nodes_ex_world(info.N)).pipe(same_node)
468-
)
469-
470-
# Concatenate data frames for each model parameter
471-
result = {par: pd.concat(list_of_df) for par, list_of_df in dfs.items()}
472-
473-
# Capacity factors all 1.0
474-
result.update(make_matched_dfs(result["output"], capacity_factor=1.0))
475-
result["capacity_factor"]["unit"] = ""
476-
477-
if context.get("debug", False):
478-
# Directory for debug output (if any)
479-
debug_dir = context.get_local_path("debug")
480-
# Ensure the directory
481-
debug_dir.mkdir(parents=True, exist_ok=True)
482-
483-
for name, df in result.items():
484-
target = debug_dir.joinpath(f"ikarus-{name}.csv")
485-
log.info(f"Dump data to {target}")
486-
df.to_csv(target, index=False)
487-
488-
return result

message_ix_models/tests/model/transport/test_ikarus.py

Lines changed: 2 additions & 92 deletions
Original file line numberDiff line numberDiff line change
@@ -1,105 +1,15 @@
1-
import pandas as pd
21
import pytest
32
from iam_units import registry
43
from message_ix import make_df
54
from numpy.testing import assert_allclose
65
from pandas.testing import assert_series_equal
76

8-
from message_ix_models.model.transport import build, ikarus, testing
7+
from message_ix_models.model.transport import build, testing
98
from message_ix_models.model.transport.non_ldv import UNITS
109
from message_ix_models.model.transport.testing import assert_units
1110
from message_ix_models.project.navigate import T35_POLICY
1211

1312

14-
@pytest.mark.skip(reason="Deprecated, slow")
15-
@pytest.mark.parametrize("years", ["A", "B"])
16-
@pytest.mark.parametrize(
17-
"regions, N_node", [("R11", 11), ("R12", 12), ("R14", 14), ("ISR", 1)]
18-
)
19-
def test_get_ikarus_data0(test_context, regions, N_node, years):
20-
ctx = test_context
21-
_, info = testing.configure_build(ctx, regions=regions, years=years)
22-
23-
# get_ikarus_data() succeeds on the bare RES
24-
data = ikarus.get_ikarus_data(ctx)
25-
26-
# Returns a mapping
27-
assert {
28-
"capacity_factor",
29-
"fix_cost",
30-
"input",
31-
"inv_cost",
32-
"output",
33-
"technical_lifetime",
34-
} == set(data.keys())
35-
assert all(map(lambda df: isinstance(df, pd.DataFrame), data.values()))
36-
37-
# Retrieve DataFrame for par e.g. 'inv_cost' and tech e.g. 'rail_pub'
38-
inv = data["inv_cost"]
39-
inv_rail_pub = inv[inv["technology"] == "rail_pub"]
40-
41-
# NB: *prep_years* is created to accommodate prepended years before than
42-
# *firstmodelyear*. See ikarus.py to check how/why those are prepended.
43-
prep_years = (1 if years == "A" else 2) + len(info.Y)
44-
# Regions * 13 years (inv_cost has 'year_vtg' but not 'year_act' dim)
45-
rows_per_tech = N_node * prep_years
46-
N_techs = 18
47-
48-
# Data have been loaded with the correct shape and magnitude:
49-
assert inv_rail_pub.shape == (rows_per_tech, 5), inv_rail_pub
50-
assert inv.shape == (rows_per_tech * N_techs, 5)
51-
52-
# Magnitude for year e.g. 2020
53-
values = inv_rail_pub[inv_rail_pub["year_vtg"] == 2020]["value"]
54-
value = values.iloc[0]
55-
assert round(value, 3) == 3.233
56-
57-
# Units of each parameter have the correct dimensionality
58-
dims = {
59-
"capacity_factor": {}, # always dimensionless
60-
"inv_cost": {"[currency]": 1, "[vehicle]": -1},
61-
"fix_cost": {"[currency]": 1, "[vehicle]": -1, "[time]": -1},
62-
"output": {"[passenger]": 1, "[vehicle]": -1},
63-
"technical_lifetime": {"[time]": 1},
64-
}
65-
for par, dim in dims.items():
66-
assert_units(data[par], dim)
67-
68-
# Specific magnitudes of other values to check
69-
checks = [
70-
# commented (PNK 2022-06-17): corrected abuse of capacity_factor to include
71-
# unrelated concepts
72-
# dict(par="capacity_factor", year_vtg=2010, value=0.000905),
73-
# dict(par="capacity_factor", year_vtg=2050, value=0.000886),
74-
dict(par="technical_lifetime", year_vtg=2010, value=15.0),
75-
dict(par="technical_lifetime", year_vtg=2050, value=15.0),
76-
]
77-
defaults = dict(node_loc=info.N[-1], technology="ICG_bus", time="year")
78-
79-
for check in checks:
80-
# Create expected data
81-
par_name = check.pop("par")
82-
check["year_act"] = check["year_vtg"]
83-
exp = make_df(par_name, **defaults, **check)
84-
assert len(exp) == 1, "Single row for expected value"
85-
86-
# Use merge() to find data with matching column values
87-
columns = sorted(set(exp.columns) - {"value", "unit"})
88-
result = exp.merge(data[par_name], on=columns, how="inner")
89-
90-
# Single row matches
91-
assert len(result) == 1, result
92-
93-
# Values match
94-
assert_series_equal(
95-
result["value_x"],
96-
result["value_y"],
97-
check_exact=False,
98-
check_names=False,
99-
atol=1e-4,
100-
)
101-
102-
10313
@build.get_computer.minimum_version
10414
@pytest.mark.parametrize("years", ["A", "B"])
10515
@pytest.mark.parametrize(
@@ -112,7 +22,7 @@ def test_get_ikarus_data0(test_context, regions, N_node, years):
11222
],
11323
)
11424
@pytest.mark.parametrize("options", [{}, dict(navigate_scenario=T35_POLICY.TEC)])
115-
def test_get_ikarus_data1(test_context, regions, N_node, years, options):
25+
def test_get_ikarus_data(test_context, regions, N_node, years, options):
11626
"""Test genno-based IKARUS data prep."""
11727
ctx = test_context
11828
c, info = testing.configure_build(

0 commit comments

Comments
 (0)