diff --git a/CHANGELOG.md b/CHANGELOG.md index 9330924ab..0690ae4b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - SQL time series sources (`SqlTimeSeriesSource` and `SqlTimeSeriesMappingSource`) [#467](https://github.com/ie3-institute/PowerSystemDataModel/issues/467) - SQL time series have a different structure than CSV counterparts [#545](https://github.com/ie3-institute/PowerSystemDataModel/issues/545) - Graph with impedance weighted edges including facilities to create it [#440](https://github.com/ie3-institute/PowerSystemDataModel/issues/440) -- `TimeSeriesTypeSource` providing a source for the mapping of time series uuids to column schemes (previously provided by `TimeSeriesMappingSource`) [#515](https://github.com/ie3-institute/PowerSystemDataModel/issues/515) +- `TimeSeriesMetaInformationSource` providing a source for the mapping of time series uuids to column schemes (previously provided by `TimeSeriesMappingSource`) [#515](https://github.com/ie3-institute/PowerSystemDataModel/issues/515) - `TemperatureDependantLoadProfile`s for depiction of profile behavior of night storage heating and heat pumps [#601](https://github.com/ie3-institute/PowerSystemDataModel/issues/601) - `ThermalUnits` as a container to hold all thermal units [#134](https://github.com/ie3-institute/PowerSystemDataModel/issues/134) - `ThermalInput` as a distinct abstract class for all thermal models @@ -29,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Create JavaDoc with java 17 instead of java 8 - Let JavDoc pass, if there are warnings **ATTENTION:** Should be removed, when JavaDoc is fixed! (cf. Issue [#494](https://github.com/ie3-institute/PowerSystemDataModel/issues/494)) - `BufferedCsvWriter` writes columns in the order, that the headline elements are defined [#434](https://github.com/ie3-institute/PowerSystemDataModel/issues/393) +- Cleaned up `IndividualTimeSeriesMetaInformation`-related methods in `CsvFileConnector` [#544](https://github.com/ie3-institute/PowerSystemDataModel/issues/544) ### Changed - BREAKING: PvInput Model parameter name height changed to elevationAngle [#393](https://github.com/ie3-institute/PowerSystemDataModel/issues/393) :warning: diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java index 9df2348b9..59b040ebb 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/CsvFileConnector.java @@ -22,6 +22,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.io.FilenameUtils; @@ -42,8 +43,10 @@ public class CsvFileConnector implements DataConnector { new HashMap<>(); private final Map timeSeriesWriters = new HashMap<>(); // ATTENTION: Do not finalize. It's meant for lazy evaluation. + @Deprecated(since = "3.0", forRemoval = true) private Map individualTimeSeriesMetaInformation; + private final FileNamingStrategy fileNamingStrategy; private final String baseDirectoryName; @@ -221,87 +224,45 @@ public BufferedReader initReader(String filePath) throws FileNotFoundException { * * @param timeSeriesUuid The time series in question * @return An option on the queried information - * @deprecated since 3.0. Use {@link #getIndividualTimeSeriesMetaInformation()} instead + * @deprecated since 3.0. Use {@link #getCsvIndividualTimeSeriesMetaInformation(ColumnScheme...)} + * instead */ @Deprecated(since = "3.0", forRemoval = true) public Optional getIndividualTimeSeriesMetaInformation(UUID timeSeriesUuid) { if (Objects.isNull(individualTimeSeriesMetaInformation)) - individualTimeSeriesMetaInformation = buildIndividualTimeSeriesMetaInformation(); + individualTimeSeriesMetaInformation = getCsvIndividualTimeSeriesMetaInformation(); return Optional.ofNullable(individualTimeSeriesMetaInformation.get(timeSeriesUuid)) .map(edu.ie3.datamodel.io.csv.timeseries.IndividualTimeSeriesMetaInformation::new); } /** - * Get time series meta information - * - *

This method lazily evaluates the mapping from all time series files to their meta - * information. + * Receive the information for specific time series. They are given back filtered by the column + * scheme in order to allow for accounting the different content types. * - * @return All time series meta information + * @param columnSchemes the column schemes to initialize readers for. If no scheme is given, all + * possible readers will be initialized. + * @return A mapping from column scheme to the individual time series meta information */ public Map - getIndividualTimeSeriesMetaInformation() { - if (Objects.isNull(individualTimeSeriesMetaInformation)) - individualTimeSeriesMetaInformation = buildIndividualTimeSeriesMetaInformation(); - - return individualTimeSeriesMetaInformation; - } - - /** - * This method creates a map from time series uuid to it's meta information. - * - * @return Mapping from time series uuid to it's meta information. - */ - private Map - buildIndividualTimeSeriesMetaInformation() { + getCsvIndividualTimeSeriesMetaInformation(final ColumnScheme... columnSchemes) { return getIndividualTimeSeriesFilePaths().parallelStream() .map( filePath -> { /* Extract meta information from file path and enhance it with the file path itself */ - String filePathWithoutEnding = removeFileEnding(filePath); IndividualTimeSeriesMetaInformation metaInformation = - (IndividualTimeSeriesMetaInformation) - fileNamingStrategy.timeSeriesMetaInformation(filePathWithoutEnding); + fileNamingStrategy.individualTimeSeriesMetaInformation(filePath); return new edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation( - metaInformation, filePathWithoutEnding); - }) - .collect(Collectors.toMap(TimeSeriesMetaInformation::getUuid, v -> v)); - } - - /** - * Receive the information for specific time series. They are given back grouped by the column - * scheme in order to allow for accounting the different content types. - * - * @param columnSchemes the column schemes to initialize readers for. If no scheme is given, all - * possible readers will be initialized. - * @return A mapping from column scheme to the individual time series meta information - */ - public Map> - getCsvIndividualTimeSeriesMetaInformation(ColumnScheme... columnSchemes) { - return getIndividualTimeSeriesFilePaths().parallelStream() - .map( - pathString -> { - String filePathWithoutEnding = removeFileEnding(pathString); - return buildCsvTimeSeriesMetaInformation(filePathWithoutEnding, columnSchemes); + metaInformation, FileNamingStrategy.removeFileNameEnding(filePath)); }) - .filter(Optional::isPresent) - .map(Optional::get) - .collect( - Collectors.groupingBy( - edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation::getColumnScheme, - Collectors.toSet())); - } - - /** - * Removes the file ending from input string - * - * @param input String to manipulate - * @return input without possible ending - */ - private String removeFileEnding(String input) { - return input.replaceAll(FILE_ENDING + "$", ""); + .filter( + metaInformation -> + columnSchemes == null + || columnSchemes.length == 0 + || Stream.of(columnSchemes) + .anyMatch(scheme -> scheme.equals(metaInformation.getColumnScheme()))) + .collect(Collectors.toMap(TimeSeriesMetaInformation::getUuid, Function.identity())); } /** @@ -320,7 +281,7 @@ private Set getIndividualTimeSeriesFilePaths() { .map(baseDirectoryPath::relativize) .filter( path -> { - String withoutEnding = removeFileEnding(path.toString()); + String withoutEnding = FileNamingStrategy.removeFileNameEnding(path.toString()); return fileNamingStrategy .getIndividualTimeSeriesPattern() .matcher(withoutEnding) @@ -334,55 +295,6 @@ private Set getIndividualTimeSeriesFilePaths() { } } - /** - * Compose the needed information for reading in a single time series. If the file points to a - * non-individual time series or a time series of a column scheme other than the specified ones, - * or the initialisation of the reader does not work, an empty {@link Optional} is given back - * - * @param filePathString String describing the path to the time series file - * @param columnSchemes the allowed column schemes. If no scheme is specified, all schemes are - * allowed. - * @return An {@link Optional} to {@link IndividualTimeSeriesMetaInformation} - */ - private Optional - buildCsvTimeSeriesMetaInformation(String filePathString, ColumnScheme... columnSchemes) { - try { - TimeSeriesMetaInformation metaInformation = - fileNamingStrategy.timeSeriesMetaInformation(filePathString); - if (!IndividualTimeSeriesMetaInformation.class.isAssignableFrom(metaInformation.getClass())) { - log.error( - "The time series file '{}' does not represent an individual time series.", - filePathString); - return Optional.empty(); - } - - IndividualTimeSeriesMetaInformation individualMetaInformation = - (IndividualTimeSeriesMetaInformation) metaInformation; - - // If no column schemes are specified, we will include all. If there a specified schemes, we - // check if the file's column scheme matches any of them - if (columnSchemes != null - && columnSchemes.length > 0 - && Stream.of(columnSchemes) - .noneMatch(scheme -> scheme.equals(individualMetaInformation.getColumnScheme()))) { - log.warn( - "The column scheme of the time series file {} does not match any of the specified column schemes ({}), so it will not be processed.", - filePathString, - columnSchemes); - return Optional.empty(); - } - return Optional.of( - new edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation( - individualMetaInformation.getUuid(), - individualMetaInformation.getColumnScheme(), - filePathString)); - } catch (IllegalArgumentException e) { - log.error( - "Error during extraction of meta information from file name '{}'.", filePathString, e); - return Optional.empty(); - } - } - /** * Initialises a reader to get grip on the file that contains mapping information between * coordinate id and actual coordinate diff --git a/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesTypeFactory.java b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesMetaInformationFactory.java similarity index 64% rename from src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesTypeFactory.java rename to src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesMetaInformationFactory.java index 2b24841e2..fba7317a7 100644 --- a/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesTypeFactory.java +++ b/src/main/java/edu/ie3/datamodel/io/factory/timeseries/TimeSeriesMetaInformationFactory.java @@ -8,7 +8,7 @@ import edu.ie3.datamodel.io.factory.EntityFactory; import edu.ie3.datamodel.io.factory.SimpleEntityData; import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; -import edu.ie3.datamodel.io.source.TimeSeriesTypeSource; +import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; import java.util.Collections; import java.util.List; import java.util.Set; @@ -17,15 +17,16 @@ import java.util.stream.Stream; /** - * Factory that creates {@link TimeSeriesTypeSource.TypeEntry} entities from source field mappings + * Factory that creates {@link IndividualTimeSeriesMetaInformation} entities from source field + * mappings */ -public class TimeSeriesTypeFactory - extends EntityFactory { +public class TimeSeriesMetaInformationFactory + extends EntityFactory { private static final String TIME_SERIES = "timeSeries"; private static final String COLUMN_SCHEME = "columnScheme"; - public TimeSeriesTypeFactory() { - super(TimeSeriesTypeSource.TypeEntry.class); + public TimeSeriesMetaInformationFactory() { + super(IndividualTimeSeriesMetaInformation.class); } @Override @@ -35,9 +36,9 @@ protected List> getFields(SimpleEntityData data) { } @Override - protected TimeSeriesTypeSource.TypeEntry buildModel(SimpleEntityData data) { + protected IndividualTimeSeriesMetaInformation buildModel(SimpleEntityData data) { UUID timeSeries = data.getUUID(TIME_SERIES); ColumnScheme columnScheme = ColumnScheme.parse(data.getField(COLUMN_SCHEME)).orElseThrow(); - return new TimeSeriesTypeSource.TypeEntry(timeSeries, columnScheme); + return new IndividualTimeSeriesMetaInformation(timeSeries, columnScheme); } } diff --git a/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java b/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java index 593cf0238..7db3a984b 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/FileNamingStrategy.java @@ -274,7 +274,7 @@ public FileNameMetaInformation extractTimeSeriesMetaInformation(String fileName) */ public TimeSeriesMetaInformation timeSeriesMetaInformation(String fileName) { /* Remove the file ending (ending limited to 255 chars, which is the max file name allowed in NTFS and ext4) */ - String withoutEnding = fileName.replaceAll("(?:\\.[^\\\\/\\s]{1,255}){1,2}$", ""); + String withoutEnding = removeFileNameEnding(fileName); if (getIndividualTimeSeriesPattern().matcher(withoutEnding).matches()) return entityPersistenceNamingStrategy.individualTimesSeriesMetaInformation(withoutEnding); @@ -285,6 +285,15 @@ else if (getLoadProfileTimeSeriesPattern().matcher(withoutEnding).matches()) "Unknown format of '" + fileName + "'. Cannot extract meta information."); } + public IndividualTimeSeriesMetaInformation individualTimeSeriesMetaInformation(String fileName) { + return entityPersistenceNamingStrategy.individualTimesSeriesMetaInformation( + removeFileNameEnding(fileName)); + } + + public static String removeFileNameEnding(String fileName) { + return fileName.replaceAll("(?:\\.[^.\\\\/\\s]{1,255}){1,2}$", ""); + } + /** * Get the entity name for coordinates * diff --git a/src/main/java/edu/ie3/datamodel/io/naming/TimeSeriesMetaInformation.java b/src/main/java/edu/ie3/datamodel/io/naming/TimeSeriesMetaInformation.java index 8af6a38f1..d750eac44 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/TimeSeriesMetaInformation.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/TimeSeriesMetaInformation.java @@ -5,35 +5,13 @@ */ package edu.ie3.datamodel.io.naming; -import java.util.Objects; +import edu.ie3.datamodel.models.input.InputEntity; import java.util.UUID; /** Meta information, that describe a certain data source */ -public abstract class TimeSeriesMetaInformation { - private final UUID uuid; +public abstract class TimeSeriesMetaInformation extends InputEntity { protected TimeSeriesMetaInformation(UUID uuid) { - this.uuid = uuid; - } - - public UUID getUuid() { - return uuid; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof TimeSeriesMetaInformation that)) return false; - return uuid.equals(that.uuid); - } - - @Override - public int hashCode() { - return Objects.hash(uuid); - } - - @Override - public String toString() { - return "TimeSeriesMetaInformation{" + "uuid=" + uuid + '}'; + super(uuid); } } diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java index 66d718651..05f905728 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMappingSource.java @@ -38,7 +38,8 @@ default Optional getTimeSeriesUuid(UUID modelIdentifier) { * * @param timeSeriesUuid Unique identifier of the time series in question * @return An Option onto the meta information - * @deprecated since 3.0. Use {@link TimeSeriesTypeSource#getTimeSeriesMetaInformation()} instead + * @deprecated since 3.0. Use {@link + * TimeSeriesMetaInformationSource#getTimeSeriesMetaInformation()} instead */ @Deprecated(since = "3.0", forRemoval = true) Optional diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java new file mode 100644 index 000000000..b56221715 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java @@ -0,0 +1,32 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source; + +import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; +import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; + +/** Source for all available time series with their {@link UUID} and {@link ColumnScheme} */ +public interface TimeSeriesMetaInformationSource extends DataSource { + + /** + * Get a mapping from time series {@link UUID} to its meta information {@link + * IndividualTimeSeriesMetaInformation} + * + * @return that mapping + */ + Map getTimeSeriesMetaInformation(); + + /** + * Get an option on the given time series meta information + * + * @param timeSeriesUuid Unique identifier of the time series in question + * @return An Option on the meta information + */ + Optional getTimeSeriesMetaInformation(UUID timeSeriesUuid); +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesTypeSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesTypeSource.java deleted file mode 100644 index a8d0054c1..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesTypeSource.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * © 2022. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source; - -import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; -import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; -import edu.ie3.datamodel.models.input.InputEntity; -import java.util.Map; -import java.util.Objects; -import java.util.UUID; - -/** Source for all available time series with their {@link UUID} and {@link ColumnScheme} */ -public interface TimeSeriesTypeSource extends DataSource { - - /** - * Get a mapping from time series {@link UUID} to its meta information {@link - * IndividualTimeSeriesMetaInformation} - * - * @return that mapping - */ - Map getTimeSeriesMetaInformation(); - - /** Class to represent one entry within the participant to time series mapping */ - class TypeEntry extends InputEntity { - private final ColumnScheme columnScheme; - - public TypeEntry(UUID timeSeries, ColumnScheme columnScheme) { - super(timeSeries); - this.columnScheme = columnScheme; - } - - public UUID getTimeSeries() { - return getUuid(); - } - - public ColumnScheme getColumnScheme() { - return columnScheme; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof TypeEntry that)) return false; - if (!super.equals(o)) return false; - return columnScheme == that.columnScheme; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), columnScheme); - } - - @Override - public String toString() { - return "TypeEntry{" + "uuid=" + getUuid() + ", columnScheme=" + columnScheme + '}'; - } - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java index 7fb2e379b..125b666d7 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMappingSource.java @@ -43,8 +43,8 @@ public Map getMapping() { } /** - * @deprecated since 3.0. Use {@link CsvTimeSeriesTypeSource#getTimeSeriesMetaInformation()} - * instead + * @deprecated since 3.0. Use {@link + * CsvTimeSeriesMetaInformationSource#getTimeSeriesMetaInformation()} instead */ @Override @Deprecated(since = "3.0", forRemoval = true) diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java new file mode 100644 index 000000000..11926a7f4 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java @@ -0,0 +1,56 @@ +/* + * © 2022. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.source.csv; + +import edu.ie3.datamodel.io.naming.FileNamingStrategy; +import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; +import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; +import edu.ie3.datamodel.io.source.TimeSeriesMetaInformationSource; +import edu.ie3.datamodel.utils.TimeSeriesUtils; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; + +/** + * CSV implementation for retrieving {@link TimeSeriesMetaInformationSource} from input directory + * structures + */ +public class CsvTimeSeriesMetaInformationSource extends CsvDataSource + implements TimeSeriesMetaInformationSource { + + private final Map + timeSeriesMetaInformation; + + /** + * Creates a time series type source + * + * @param csvSep the CSV separator + * @param folderPath path that time series reside in + * @param fileNamingStrategy the file naming strategy + */ + public CsvTimeSeriesMetaInformationSource( + String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { + super(csvSep, folderPath, fileNamingStrategy); + + // retrieve only the desired time series + timeSeriesMetaInformation = + connector.getCsvIndividualTimeSeriesMetaInformation( + TimeSeriesUtils.getAcceptedColumnSchemes().toArray(new ColumnScheme[0])); + } + + @Override + public Map getTimeSeriesMetaInformation() { + return timeSeriesMetaInformation.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public Optional getTimeSeriesMetaInformation( + UUID timeSeriesUuid) { + return Optional.ofNullable(timeSeriesMetaInformation.get(timeSeriesUuid)); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesTypeSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesTypeSource.java deleted file mode 100644 index 2ec49aca7..000000000 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesTypeSource.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * © 2022. TU Dortmund University, - * Institute of Energy Systems, Energy Efficiency and Energy Economics, - * Research group Distribution grid planning and operation -*/ -package edu.ie3.datamodel.io.source.csv; - -import edu.ie3.datamodel.io.naming.FileNamingStrategy; -import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; -import edu.ie3.datamodel.io.source.TimeSeriesTypeSource; -import edu.ie3.datamodel.utils.TimeSeriesUtils; -import java.util.Map; -import java.util.UUID; -import java.util.stream.Collectors; - -/** - * CSV implementation for retrieving {@link TimeSeriesTypeSource} from input directory structures - */ -public class CsvTimeSeriesTypeSource extends CsvDataSource implements TimeSeriesTypeSource { - - /** - * Creates a time series type source - * - * @param csvSep the CSV separator - * @param folderPath path that time series reside in - * @param fileNamingStrategy the file naming strategy - */ - public CsvTimeSeriesTypeSource( - String csvSep, String folderPath, FileNamingStrategy fileNamingStrategy) { - super(csvSep, folderPath, fileNamingStrategy); - } - - @Override - public Map getTimeSeriesMetaInformation() { - return connector.getIndividualTimeSeriesMetaInformation().entrySet().stream() - .filter(entry -> TimeSeriesUtils.isSchemeAccepted(entry.getValue().getColumnScheme())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } -} diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java index c15d2765e..ece86e77c 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvWeatherSource.java @@ -95,14 +95,10 @@ public CsvWeatherSource( */ private Map> getWeatherTimeSeries() { /* Get only weather time series meta information */ - Map> colTypeToMetaData = - connector.getCsvIndividualTimeSeriesMetaInformation(ColumnScheme.WEATHER); + Collection weatherCsvMetaInformation = + connector.getCsvIndividualTimeSeriesMetaInformation(ColumnScheme.WEATHER).values(); - /* Reading in weather time series */ - Set weatherCsvMetaInformation = - colTypeToMetaData.get(ColumnScheme.WEATHER); - - return readWeatherTimeSeries(weatherCsvMetaInformation, connector); + return readWeatherTimeSeries(Set.copyOf(weatherCsvMetaInformation), connector); } @Override diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java index 03b517e37..f49a8aa4e 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMappingSource.java @@ -45,8 +45,8 @@ public Map getMapping() { } /** - * @deprecated since 3.0. Use {@link SqlTimeSeriesTypeSource#getTimeSeriesMetaInformation()} - * instead + * @deprecated since 3.0. Use {@link + * SqlTimeSeriesMetaInformationSource#getTimeSeriesMetaInformation()} instead */ @Override @Deprecated(since = "3.0", forRemoval = true) diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesTypeSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java similarity index 62% rename from src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesTypeSource.java rename to src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java index 6e1c7a210..f70a5e94f 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesTypeSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java @@ -7,34 +7,41 @@ import edu.ie3.datamodel.io.connectors.SqlConnector; import edu.ie3.datamodel.io.factory.SimpleEntityData; -import edu.ie3.datamodel.io.factory.timeseries.TimeSeriesTypeFactory; +import edu.ie3.datamodel.io.factory.timeseries.TimeSeriesMetaInformationFactory; import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy; import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation; -import edu.ie3.datamodel.io.source.TimeSeriesTypeSource; +import edu.ie3.datamodel.io.source.TimeSeriesMetaInformationSource; import edu.ie3.datamodel.utils.TimeSeriesUtils; import java.util.Map; import java.util.Optional; import java.util.UUID; +import java.util.function.Function; import java.util.stream.Collectors; -/** SQL implementation for retrieving {@link TimeSeriesTypeSource} from the SQL scheme */ -public class SqlTimeSeriesTypeSource extends SqlDataSource - implements TimeSeriesTypeSource { +/** SQL implementation for retrieving {@link TimeSeriesMetaInformationSource} from the SQL scheme */ +public class SqlTimeSeriesMetaInformationSource + extends SqlDataSource + implements TimeSeriesMetaInformationSource { - private static final TimeSeriesTypeFactory mappingFactory = new TimeSeriesTypeFactory(); - - /** Query to retrieve information on all time series that are available */ - private final String queryComplete; + private static final TimeSeriesMetaInformationFactory mappingFactory = + new TimeSeriesMetaInformationFactory(); private final DatabaseNamingStrategy namingStrategy; + private final Map mapping; - public SqlTimeSeriesTypeSource( + public SqlTimeSeriesMetaInformationSource( SqlConnector connector, String schemaName, DatabaseNamingStrategy namingStrategy) { super(connector); this.namingStrategy = namingStrategy; - this.queryComplete = createQueryComplete(schemaName); + String queryComplete = createQueryComplete(schemaName); + + this.mapping = + executeQuery(queryComplete, ps -> {}).stream() + .collect( + Collectors.toMap( + IndividualTimeSeriesMetaInformation::getUuid, Function.identity())); } /** @@ -71,18 +78,20 @@ private String createQueryComplete(String schemaName) { @Override public Map getTimeSeriesMetaInformation() { - return executeQuery(queryComplete, ps -> {}).stream() - .collect( - Collectors.toMap( - TypeEntry::getTimeSeries, - entry -> - new IndividualTimeSeriesMetaInformation( - entry.getTimeSeries(), entry.getColumnScheme()))); + return this.mapping; + } + + @Override + public Optional getTimeSeriesMetaInformation( + UUID timeSeriesUuid) { + return Optional.ofNullable(this.mapping.get(timeSeriesUuid)); } @Override - protected Optional createEntity(Map fieldToValues) { - SimpleEntityData entityData = new SimpleEntityData(fieldToValues, TypeEntry.class); + protected Optional createEntity( + Map fieldToValues) { + SimpleEntityData entityData = + new SimpleEntityData(fieldToValues, IndividualTimeSeriesMetaInformation.class); return mappingFactory.get(entityData); } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy index 01fb46b30..2bf4ff09e 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/connectors/CsvFileConnectorTest.groovy @@ -88,42 +88,28 @@ class CsvFileConnectorTest extends Specification { ] when: - def actual = cfc.individualTimeSeriesMetaInformation + def actual = cfc.getCsvIndividualTimeSeriesMetaInformation() then: actual == expected } - def "The csv file connector returns empty Optional of CsvTimeSeriesMetaInformation when pointed to non-individual time series"() { + def "The csv file connector is able to build correct uuid to meta information mapping when restricting column schemes"() { given: - def pathString = "lpts_h0_53990eea-1b5d-47e8-9134-6d8de36604bf" - - when: - def actual = cfc.buildCsvTimeSeriesMetaInformation(pathString) - - then: - !actual.present - } - - def "The csv file connector is able to build correct meta information from valid input"() { - given: - def pathString = "its_pq_53990eea-1b5d-47e8-9134-6d8de36604bf" - def expected = new CsvIndividualTimeSeriesMetaInformation( - UUID.fromString("53990eea-1b5d-47e8-9134-6d8de36604bf"), - ColumnScheme.APPARENT_POWER, - "" - ) + def expected = [ + (UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("b88dee50-5484-4136-901d-050d8c1c97d1"), ColumnScheme.ENERGY_PRICE, "its_c_b88dee50-5484-4136-901d-050d8c1c97d1"), + (UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), ColumnScheme.ENERGY_PRICE, "its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b"), + (UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226")): new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("fcf0b851-a836-4bde-8090-f44c382ed226"), ColumnScheme.ACTIVE_POWER, "its_p_fcf0b851-a836-4bde-8090-f44c382ed226") + ] when: - def actual = cfc.buildCsvTimeSeriesMetaInformation(pathString) + def actual = cfc.getCsvIndividualTimeSeriesMetaInformation( + ColumnScheme.ENERGY_PRICE, + ColumnScheme.ACTIVE_POWER + ) then: - actual.present - actual.get().with { - assert uuid == expected.uuid - assert columnScheme == expected.columnScheme - /* Don't check the reader explicitly */ - } + actual == expected } def "The csv file connector throws an Exception, if the foreseen file cannot be found"() { diff --git a/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy index 258e24841..e6decb325 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/naming/FileNamingStrategyTest.groovy @@ -827,7 +827,7 @@ class FileNamingStrategyTest extends Specification { ex.message == "Unknown format of 'foo'. Cannot extract meta information." } - def "The EntityPersistenceNamingStrategy extracts correct meta information from a valid individual time series file name"() { + def "The FileNamingStrategy extracts correct meta information from a valid time series file name"() { given: def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) def path = Paths.get(pathString) @@ -843,17 +843,17 @@ class FileNamingStrategyTest extends Specification { } where: - pathString || expectedColumnScheme - "/bla/foo/its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ENERGY_PRICE - "/bla/foo/its_p_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ACTIVE_POWER - "/bla/foo/its_pq_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.APPARENT_POWER - "/bla/foo/its_h_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.HEAT_DEMAND - "/bla/foo/its_ph_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND - "/bla/foo/its_pqh_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND + pathString || expectedColumnScheme + "/bla/foo/its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ENERGY_PRICE + "/bla/foo/its_p_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ACTIVE_POWER + "/bla/foo/its_pq_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.APPARENT_POWER + "/bla/foo/its_h_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.HEAT_DEMAND + "/bla/foo/its_ph_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND + "/bla/foo/its_pqh_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND "/bla/foo/its_weather_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.WEATHER } - def "The EntityPersistenceNamingStrategy extracts correct meta information from a valid individual time series file name with pre- and suffix"() { + def "The FileNamingStrategy extracts correct meta information from a valid time series file name with pre- and suffix"() { given: def fns = new FileNamingStrategy(new EntityPersistenceNamingStrategy("prefix", "suffix"), flatHierarchy) def path = Paths.get(pathString) @@ -869,17 +869,55 @@ class FileNamingStrategyTest extends Specification { } where: - pathString || expectedColumnScheme - "/bla/foo/prefix_its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.ENERGY_PRICE - "/bla/foo/prefix_its_p_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.ACTIVE_POWER - "/bla/foo/prefix_its_pq_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.APPARENT_POWER - "/bla/foo/prefix_its_h_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.HEAT_DEMAND - "/bla/foo/prefix_its_ph_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND - "/bla/foo/prefix_its_pqh_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND + pathString || expectedColumnScheme + "/bla/foo/prefix_its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.ENERGY_PRICE + "/bla/foo/prefix_its_p_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.ACTIVE_POWER + "/bla/foo/prefix_its_pq_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.APPARENT_POWER + "/bla/foo/prefix_its_h_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.HEAT_DEMAND + "/bla/foo/prefix_its_ph_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND + "/bla/foo/prefix_its_pqh_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND "/bla/foo/prefix_its_weather_4881fda2-bcee-4f4f-a5bb-6a09bf785276_suffix.csv" || ColumnScheme.WEATHER } - def "The EntityPersistenceNamingStrategy throw an IllegalArgumentException, if the column scheme is malformed."() { + def "The FileNamingStrategy extracts correct meta information from a valid individual time series file name"() { + given: + def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) + + when: + def metaInformation = fns.individualTimeSeriesMetaInformation(fileName) + + then: + IndividualTimeSeriesMetaInformation.isAssignableFrom(metaInformation.getClass()) + (metaInformation as IndividualTimeSeriesMetaInformation).with { + assert it.uuid == UUID.fromString("4881fda2-bcee-4f4f-a5bb-6a09bf785276") + assert it.columnScheme == expectedColumnScheme + } + + where: + fileName || expectedColumnScheme + "its_c_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ENERGY_PRICE + "its_p_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ACTIVE_POWER + "its_pq_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.APPARENT_POWER + "its_h_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.HEAT_DEMAND + "its_ph_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.ACTIVE_POWER_AND_HEAT_DEMAND + "its_pqh_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.APPARENT_POWER_AND_HEAT_DEMAND + "its_weather_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" || ColumnScheme.WEATHER + } + + def "The FileNamingStrategy throw an IllegalArgumentException, if the time series file path is malformed."() { + given: + def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) + def path = "erroneous_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv" + + when: + fns.individualTimeSeriesMetaInformation(path) + + then: + def ex = thrown(IllegalArgumentException) + ex.message == "Cannot extract meta information on individual time series from 'erroneous_4881fda2-bcee-4f4f-a5bb-6a09bf785276'." + } + + def "The FileNamingStrategy throw an IllegalArgumentException, if the column scheme is malformed."() { given: def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) def path = Paths.get("/bla/foo/its_whoops_4881fda2-bcee-4f4f-a5bb-6a09bf785276.csv") @@ -892,7 +930,7 @@ class FileNamingStrategyTest extends Specification { ex.message == "Cannot parse 'whoops' to valid column scheme." } - def "The EntityPersistenceNamingStrategy extracts correct meta information from a valid load profile time series file name"() { + def "The FileNamingStrategy extracts correct meta information from a valid load profile time series file name"() { given: def fns = new FileNamingStrategy(simpleEntityNaming, flatHierarchy) def path = Paths.get("/bla/foo/lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304.csv") @@ -908,7 +946,7 @@ class FileNamingStrategyTest extends Specification { } } - def "The EntityPersistenceNamingStrategy extracts correct meta information from a valid load profile time series file name with pre- and suffix"() { + def "The FileNamingStrategy extracts correct meta information from a valid load profile time series file name with pre- and suffix"() { given: def fns = new FileNamingStrategy(new EntityPersistenceNamingStrategy("prefix", "suffix"), flatHierarchy) def path = Paths.get("/bla/foo/prefix_lpts_g3_bee0a8b6-4788-4f18-bf72-be52035f7304_suffix.csv") diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesTypeSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSourceIT.groovy similarity index 59% rename from src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesTypeSourceIT.groovy rename to src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSourceIT.groovy index 9f42e805f..f314d9707 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesTypeSourceIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSourceIT.groovy @@ -11,15 +11,15 @@ import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme import spock.lang.Shared import spock.lang.Specification -class CsvTimeSeriesTypeSourceIT extends Specification implements CsvTestDataMeta { +class CsvTimeSeriesMetaInformationSourceIT extends Specification implements CsvTestDataMeta { @Shared - CsvTimeSeriesTypeSource source + CsvTimeSeriesMetaInformationSource source def setupSpec() { - source = new CsvTimeSeriesTypeSource(";", timeSeriesFolderPath, new FileNamingStrategy()) + source = new CsvTimeSeriesMetaInformationSource(";", timeSeriesFolderPath, new FileNamingStrategy()) } - def "A csv time series type source returns correct mapping of time series"() { + def "A CSV time series meta information source returns correct mapping of time series"() { given: def expectedTimeSeries = Set.of( new CsvIndividualTimeSeriesMetaInformation(UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), ColumnScheme.ENERGY_PRICE, 'its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1'), @@ -41,4 +41,33 @@ class CsvTimeSeriesTypeSourceIT extends Specification implements CsvTestDataMeta expectedTimeSeries.contains(it.value) } } + + def "The CSV time series meta information source returns correct meta information for a given time series UUID"() { + when: + def timeSeriesUuid = UUID.fromString(uuid) + def result = source.getTimeSeriesMetaInformation(timeSeriesUuid) + + then: + result.present + result.get().columnScheme.scheme == columnScheme + + where: + uuid || columnScheme + "2fcb3e53-b94a-4b96-bea4-c469e499f1a1" || "c" + "76c9d846-797c-4f07-b7ec-2245f679f5c7" || "ph" + "c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0" || "h" + "9185b8c1-86ba-4a16-8dea-5ac898e8caa5" || "p" + "3fbfaa97-cff4-46d4-95ba-a95665e87c26" || "pq" + "46be1e57-e4ed-4ef7-95f1-b2b321cb2047" || "pqh" + "1061af70-1c03-46e1-b960-940b956c429f" || "pq" + } + + def "The CSV time series meta information source returns an empty optional for an unknown time series UUID"() { + when: + def timeSeriesUuid = UUID.fromString("e9c13f5f-31da-44ea-abb7-59f616c3da16") + def result = source.getTimeSeriesMetaInformation(timeSeriesUuid) + + then: + result.empty + } } diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesTypeSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSourceIT.groovy similarity index 67% rename from src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesTypeSourceIT.groovy rename to src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSourceIT.groovy index 83560d702..db5b51073 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesTypeSourceIT.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSourceIT.groovy @@ -18,7 +18,7 @@ import spock.lang.Shared import spock.lang.Specification @Testcontainers -class SqlTimeSeriesTypeSourceIT extends Specification implements TestContainerHelper { +class SqlTimeSeriesMetaInformationSourceIT extends Specification implements TestContainerHelper { @Shared PostgreSQLContainer postgreSQLContainer = new PostgreSQLContainer("postgres:14.2") @@ -27,7 +27,7 @@ class SqlTimeSeriesTypeSourceIT extends Specification implements TestContainerHe SqlConnector connector @Shared - SqlTimeSeriesTypeSource source + SqlTimeSeriesMetaInformationSource source def setupSpec() { // Copy sql import script into docker @@ -48,10 +48,10 @@ class SqlTimeSeriesTypeSourceIT extends Specification implements TestContainerHe } connector = new SqlConnector(postgreSQLContainer.jdbcUrl, postgreSQLContainer.username, postgreSQLContainer.password) - source = new SqlTimeSeriesTypeSource(connector, "public", new DatabaseNamingStrategy()) + source = new SqlTimeSeriesMetaInformationSource(connector, "public", new DatabaseNamingStrategy()) } - def "The sql time series type source returns a correct mapping of time series"() { + def "The SQL time series meta information source returns a correct mapping of time series"() { when: def expectedTimeSeries = Set.of( new IndividualTimeSeriesMetaInformation(UUID.fromString("2fcb3e53-b94a-4b96-bea4-c469e499f1a1"), ColumnScheme.ENERGY_PRICE), @@ -72,4 +72,33 @@ class SqlTimeSeriesTypeSourceIT extends Specification implements TestContainerHe expectedTimeSeries.contains(it.value) } } + + def "The SQL time series meta information source returns correct meta information for a given time series UUID"() { + when: + def timeSeriesUuid = UUID.fromString(uuid) + def result = source.getTimeSeriesMetaInformation(timeSeriesUuid) + + then: + result.present + result.get().columnScheme.scheme == columnScheme + + where: + uuid || columnScheme + "2fcb3e53-b94a-4b96-bea4-c469e499f1a1" || "c" + "76c9d846-797c-4f07-b7ec-2245f679f5c7" || "ph" + "c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0" || "h" + "9185b8c1-86ba-4a16-8dea-5ac898e8caa5" || "p" + "3fbfaa97-cff4-46d4-95ba-a95665e87c26" || "pq" + "46be1e57-e4ed-4ef7-95f1-b2b321cb2047" || "pqh" + "b669e4bf-a351-4067-860d-d5f224b62247" || "p" + } + + def "The SQL time series meta information source returns an empty optional for an unknown time series UUID"() { + when: + def timeSeriesUuid = UUID.fromString("e9c13f5f-31da-44ea-abb7-59f616c3da16") + def result = source.getTimeSeriesMetaInformation(timeSeriesUuid) + + then: + result.empty + } } diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1.sql deleted file mode 100644 index 01a906285..000000000 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1.sql +++ /dev/null @@ -1,17 +0,0 @@ -CREATE TABLE public."its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1" -( - time timestamp with time zone, - price double precision, - uuid uuid, - CONSTRAINT its_c_pkey PRIMARY KEY (uuid) -) - WITH ( - OIDS = FALSE - ) - TABLESPACE pg_default; - -INSERT INTO - public."its_c_2fcb3e53-b94a-4b96-bea4-c469e499f1a1" (uuid, time, price) -VALUES -('45bd936f-524a-4d59-8978-31ccf37fa230', '2020-01-01T00:00:00Z', 100.0), -('41b8dbf6-3e75-4073-8359-89d015777dd6', '2020-01-01T00:15:00Z', 125.0); diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0.sql deleted file mode 100644 index 9c8ce188e..000000000 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0.sql +++ /dev/null @@ -1,17 +0,0 @@ -CREATE TABLE public."its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0" -( - time timestamp with time zone, - heat_demand double precision, - uuid uuid, - CONSTRAINT its_h_pkey PRIMARY KEY (uuid) -) - WITH ( - OIDS = FALSE - ) - TABLESPACE pg_default; - -INSERT INTO - public."its_h_c8fe6547-fd85-4fdf-a169-e4da6ce5c3d0" (uuid, time, heat_demand) -VALUES -('5ec4ddfe-addf-4f32-8fb5-fd4eaa5e5ced', '2020-01-01 00:00:00+0', 8.0), -('e82dd54c-9f6f-4451-9dcd-f4f41b8c9ee0', '2020-01-01 00:15:00+0', 12.0); diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5.sql deleted file mode 100644 index 1f956e06a..000000000 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5.sql +++ /dev/null @@ -1,17 +0,0 @@ -CREATE TABLE public."its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5" -( - time timestamp with time zone, - p double precision, - uuid uuid, - CONSTRAINT its_p_pkey PRIMARY KEY (uuid) -) - WITH ( - OIDS = FALSE - ) - TABLESPACE pg_default; - -INSERT INTO - public."its_p_9185b8c1-86ba-4a16-8dea-5ac898e8caa5" (uuid, time, p) -VALUES -('0245d599-9a5c-4c32-9613-5b755fac8ca0', '2020-01-01 00:00:00+0', 1000.0), -('a5e27652-9024-4a93-9d2a-590fbc3ab5a1', '2020-01-01 00:15:00+0', 1250.0); diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7.sql deleted file mode 100644 index 0afcf0363..000000000 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7.sql +++ /dev/null @@ -1,18 +0,0 @@ -CREATE TABLE public."its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7" -( - time timestamp with time zone, - p double precision, - heat_demand double precision, - uuid uuid, - CONSTRAINT its_ph_pkey PRIMARY KEY (uuid) -) - WITH ( - OIDS = FALSE - ) - TABLESPACE pg_default; - -INSERT INTO - public."its_ph_76c9d846-797c-4f07-b7ec-2245f679f5c7" (uuid, time, p, heat_demand) -VALUES -('5d1235b2-656c-43e8-9186-b4a703f6e467', '2020-01-01 00:00:00+0', 1000.0, 8.0), -('de6659b2-1545-4739-8d0a-e8ff79a6cb4b', '2020-01-01 00:15:00+0', 1250.0, 12.0); diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26.sql deleted file mode 100644 index 9ff8a378d..000000000 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26.sql +++ /dev/null @@ -1,18 +0,0 @@ -CREATE TABLE public."its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26" -( - time timestamp with time zone, - p double precision, - q double precision, - uuid uuid, - CONSTRAINT its_pq_pkey PRIMARY KEY (uuid) -) - WITH ( - OIDS = FALSE - ) - TABLESPACE pg_default; - -INSERT INTO - public."its_pq_3fbfaa97-cff4-46d4-95ba-a95665e87c26" (uuid, time, p, q) -VALUES -('da288786-d3e3-40aa-a34a-f67955d45ac8', '2020-01-01 00:00:00+0', 1000.0, 329.0), -('43dd0a7b-7a7e-4393-b516-a0ddbcbf073b', '2020-01-01 00:15:00+0', 1250.0, 411.0); diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047.sql deleted file mode 100644 index 230393eb5..000000000 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047.sql +++ /dev/null @@ -1,19 +0,0 @@ -CREATE TABLE public."its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047" -( - time timestamp with time zone, - p double precision, - q double precision, - heat_demand double precision, - uuid uuid, - CONSTRAINT its_pqh_pkey PRIMARY KEY (uuid) -) - WITH ( - OIDS = FALSE - ) - TABLESPACE pg_default; - -INSERT INTO - public."its_pqh_46be1e57-e4ed-4ef7-95f1-b2b321cb2047" (uuid, time, p, q, heat_demand) -VALUES -('661ac594-47f0-4442-8d82-bbeede5661f7', '2020-01-01 00:00:00+0', 1000.0, 329.0, 8.0), -('5adcd6c5-a903-433f-b7b5-5fe669a3ed30', '2020-01-01 00:15:00+0', 1250.0, 411.0, 12.0); diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_c.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_c.sql index 1a78a852d..a892a456b 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_c.sql +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_c.sql @@ -10,6 +10,9 @@ CREATE TABLE public.time_series_c CREATE INDEX time_series_c_series_id ON time_series_c USING hash (time_series); +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. CREATE UNIQUE INDEX time_series_c_series_time ON time_series_c USING btree (time_series, time); INSERT INTO @@ -17,4 +20,3 @@ INSERT INTO VALUES ('45bd936f-524a-4d59-8978-31ccf37fa230', '2fcb3e53-b94a-4b96-bea4-c469e499f1a1', '2020-01-01 00:00:00+0', 100.0), ('41b8dbf6-3e75-4073-8359-89d015777dd6', '2fcb3e53-b94a-4b96-bea4-c469e499f1a1', '2020-01-01 00:15:00+0', 125.0); - diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_h.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_h.sql index 3d1116c7b..1dc3edb99 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_h.sql +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_h.sql @@ -10,7 +10,10 @@ CREATE TABLE public.time_series_h CREATE INDEX time_series_h_series_id ON time_series_h USING hash (time_series); -CREATE UNIQUE INDEX time_series_h_series_time ON time_series_h USING btree (time_series, time); +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_h_series_time ON time_series_h USING btree (time_series, time); INSERT INTO public.time_series_h (uuid, time_series, time, heat_demand) diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_mapping.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_mapping.sql index df6bb8320..b3921f442 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_mapping.sql +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_mapping.sql @@ -4,9 +4,7 @@ CREATE TABLE public.time_series_mapping participant uuid, time_series uuid ) - WITH ( - OIDS = FALSE - ) + WITHOUT OIDS TABLESPACE pg_default; INSERT INTO diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_p.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_p.sql index 79beaf5e7..b17b091ea 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_p.sql +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_p.sql @@ -10,7 +10,10 @@ CREATE TABLE public.time_series_p CREATE INDEX time_series_p_series_id ON time_series_p USING hash (time_series); -CREATE UNIQUE INDEX time_series_p_series_time ON time_series_p USING btree (time_series, time); +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_p_series_time ON time_series_p USING btree (time_series, time); INSERT INTO public.time_series_p (uuid, time_series, time, p) diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_ph.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_ph.sql index 443db4af3..384ccba7b 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_ph.sql +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_ph.sql @@ -11,7 +11,10 @@ CREATE TABLE public.time_series_ph CREATE INDEX time_series_ph_series_id ON time_series_ph USING hash (time_series); -CREATE UNIQUE INDEX time_series_ph_series_time ON time_series_ph USING btree (time_series, time); +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_ph_series_time ON time_series_ph USING btree (time_series, time); INSERT INTO public.time_series_ph (uuid, time_series, time, p, heat_demand) diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pq.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pq.sql index 8183e1583..9195b83f0 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pq.sql +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pq.sql @@ -11,7 +11,10 @@ CREATE TABLE public.time_series_pq CREATE INDEX time_series_pq_series_id ON time_series_pq USING hash (time_series); -CREATE UNIQUE INDEX time_series_pq_series_time ON time_series_pq USING btree (time_series, time); +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_pq_series_time ON time_series_pq USING btree (time_series, time); INSERT INTO public.time_series_pq (uuid, time_series, time, p, q) diff --git a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pqh.sql b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pqh.sql index 8bd3a4890..fa23010ce 100644 --- a/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pqh.sql +++ b/src/test/resources/edu/ie3/datamodel/io/source/sql/_timeseries/time_series_pqh.sql @@ -12,7 +12,10 @@ CREATE TABLE public.time_series_pqh CREATE INDEX time_series_pqh_series_id ON time_series_pqh USING hash (time_series); -CREATE UNIQUE INDEX time_series_pqh_series_time ON time_series_pqh USING btree (time_series, time); +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_pqh_series_time ON time_series_pqh USING btree (time_series, time); INSERT INTO public.time_series_pqh (uuid, time_series, time, p, q, heat_demand)