diff --git a/CHANGELOG.md b/CHANGELOG.md
index fc76df9e8..f7b0423fc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Attributes `housingType` and `numberInhabitants` for `ThermalHouse`s [#1253](https://github.com/ie3-institute/PowerSystemDataModel/issues/1253)
- Added domestic hot water storage model [#1257](https://github.com/ie3-institute/PowerSystemDataModel/issues/1257)
- Validation for BDEW load profile values [#1243](https://github.com/ie3-institute/PowerSystemDataModel/issues/1243)
+- Added load profiles sources [#1106](https://github.com/ie3-institute/PowerSystemDataModel/issues/1106)
### Fixed
- Removing opened `SwitchInput` during connectivity check [#1221](https://github.com/ie3-institute/PowerSystemDataModel/issues/1221)
diff --git a/docs/readthedocs/io/csvfiles.md b/docs/readthedocs/io/csvfiles.md
index 3e96eb59d..67dadf6b3 100644
--- a/docs/readthedocs/io/csvfiles.md
+++ b/docs/readthedocs/io/csvfiles.md
@@ -94,11 +94,13 @@ Csv id coordinate sources can have two different ways to represent their coordin
* - Model
- File Name
* - individual time series
- - *prefix_* its *_columnScheme_UUID_suffix*
+ - *prefix_* its *_columnScheme_UUID* *_suffix*
* - load profile input
- - *prefix_* rts *_profileKey_UUID_suffix*
+ - *prefix_* lpts *_profileKey* *_suffix*
```
+#### Individual Time Series
+
Let's spend a few more words on the individual time series:
Those files are meant to carry different types of content - one might give information about wholesale market prices,
the other is a record of power values provided by a real system.
@@ -154,6 +156,27 @@ The following keys are supported until now:
```
+
+##### Load Profile Time Series
+
+The following profiles are supported until now:
+```{list-table}
+ :widths: auto
+ :class: wrapping
+ :header-rows: 1
+
+ * - Key
+ - Information
+ - Supported head line.
+ * - e.g.: H0
+ - BDEW standard load profiles ([source](https://www.bdew.de/energie/standardlastprofile-strom/))
+ - Permissible head line: ``SuSa,SuSu,SuWd,TrSa,TrSu,TrWd,WiSa,WiSu,WiWd,quarterHour``
+ * - random
+ - A random load proile based on: ``Kays - Agent-based simulation environment for improving the planning of distribution grids``
+ - Permissible head line: ``kSa,kSu,kWd,mySa,mySu,myWd,sigmaSa,sigmaSu,sigmaWd,quarterHour``
+
+```
+
### Results
```{list-table}
diff --git a/docs/readthedocs/models/input/additionaldata/timeseries.md b/docs/readthedocs/models/input/additionaldata/timeseries.md
index 991568b4b..eb24eec12 100644
--- a/docs/readthedocs/models/input/additionaldata/timeseries.md
+++ b/docs/readthedocs/models/input/additionaldata/timeseries.md
@@ -55,4 +55,9 @@ The following different values are available:
* - `WeatherValue`
- Combination of irradiance, temperature and wind information
+ * - `BdewLoadValues`
+ - Values for combination of seasons and day types
+
+ * - `RandomLoadValues`
+ - Parameters for a probability density function to draw random power consumptions
```
diff --git a/src/main/java/edu/ie3/datamodel/io/csv/CsvLoadProfileMetaInformation.java b/src/main/java/edu/ie3/datamodel/io/csv/CsvLoadProfileMetaInformation.java
new file mode 100644
index 000000000..905f1ee3f
--- /dev/null
+++ b/src/main/java/edu/ie3/datamodel/io/csv/CsvLoadProfileMetaInformation.java
@@ -0,0 +1,55 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+*/
+package edu.ie3.datamodel.io.csv;
+
+import edu.ie3.datamodel.io.naming.timeseries.LoadProfileMetaInformation;
+import java.nio.file.Path;
+import java.util.Objects;
+
+public class CsvLoadProfileMetaInformation extends LoadProfileMetaInformation {
+ private final Path fullFilePath;
+
+ public CsvLoadProfileMetaInformation(String profile, Path fullFilePath) {
+ super(profile);
+ this.fullFilePath = fullFilePath;
+ }
+
+ public CsvLoadProfileMetaInformation(
+ LoadProfileMetaInformation metaInformation, Path fullFilePath) {
+ this(metaInformation.getProfile(), fullFilePath);
+ }
+
+ public Path getFullFilePath() {
+ return fullFilePath;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (!(o instanceof CsvLoadProfileMetaInformation that)) return false;
+ if (!super.equals(o)) return false;
+ return fullFilePath.equals(that.fullFilePath);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), fullFilePath);
+ }
+
+ @Override
+ public String toString() {
+ return "CsvLoadProfileMetaInformation{"
+ + "uuid='"
+ + getUuid()
+ + '\''
+ + ", profile='"
+ + getProfile()
+ + '\''
+ + "fullFilePath="
+ + fullFilePath
+ + '}';
+ }
+}
diff --git a/src/main/java/edu/ie3/datamodel/io/source/LoadProfileSource.java b/src/main/java/edu/ie3/datamodel/io/source/LoadProfileSource.java
new file mode 100644
index 000000000..60e30dd4e
--- /dev/null
+++ b/src/main/java/edu/ie3/datamodel/io/source/LoadProfileSource.java
@@ -0,0 +1,130 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+*/
+package edu.ie3.datamodel.io.source;
+
+import static edu.ie3.datamodel.models.profile.LoadProfile.RandomLoadProfile.RANDOM_LOAD_PROFILE;
+
+import edu.ie3.datamodel.exceptions.FactoryException;
+import edu.ie3.datamodel.exceptions.SourceException;
+import edu.ie3.datamodel.io.csv.CsvLoadProfileMetaInformation;
+import edu.ie3.datamodel.io.factory.timeseries.BdewLoadProfileFactory;
+import edu.ie3.datamodel.io.factory.timeseries.LoadProfileData;
+import edu.ie3.datamodel.io.factory.timeseries.LoadProfileFactory;
+import edu.ie3.datamodel.io.factory.timeseries.RandomLoadProfileFactory;
+import edu.ie3.datamodel.io.source.csv.CsvDataSource;
+import edu.ie3.datamodel.io.source.csv.CsvLoadProfileSource;
+import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile;
+import edu.ie3.datamodel.models.profile.LoadProfile;
+import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileEntry;
+import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileTimeSeries;
+import edu.ie3.datamodel.models.timeseries.repetitive.RandomLoadProfileTimeSeries;
+import edu.ie3.datamodel.models.value.PValue;
+import edu.ie3.datamodel.models.value.Value;
+import edu.ie3.datamodel.models.value.load.BdewLoadValues;
+import edu.ie3.datamodel.models.value.load.LoadValues;
+import edu.ie3.datamodel.models.value.load.RandomLoadValues;
+import edu.ie3.datamodel.utils.Try;
+import java.time.ZonedDateTime;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import javax.measure.quantity.Energy;
+import javax.measure.quantity.Power;
+import tech.units.indriya.ComparableQuantity;
+
+public abstract class LoadProfileSource
+ extends EntitySource {
+ protected final Class entryClass;
+ protected final LoadProfileFactory entryFactory;
+
+ protected LoadProfileSource(Class entryClass, LoadProfileFactory entryFactory) {
+ this.entryClass = entryClass;
+ this.entryFactory = entryFactory;
+ }
+
+ /**
+ * Build a list of type {@code E}, whereas the underlying {@link Value} does not need any
+ * additional information.
+ *
+ * @param fieldToValues Mapping from field id to values
+ * @return {@link Try} of simple time based value
+ */
+ protected Try, FactoryException> createEntries(
+ Map fieldToValues) {
+ LoadProfileData factoryData = new LoadProfileData<>(fieldToValues, entryClass);
+ return entryFactory.get(factoryData);
+ }
+
+ public abstract LoadProfileTimeSeries getTimeSeries();
+
+ /**
+ * Method to return all time keys after a given timestamp.
+ *
+ * @param time given time
+ * @return a list of time keys
+ */
+ public abstract List getTimeKeysAfter(ZonedDateTime time);
+
+ /**
+ * Method to get the value for a given time.
+ *
+ * @param time for which a value is needed
+ * @return an optional
+ * @throws SourceException if an exception occurred
+ */
+ public abstract Optional getValue(ZonedDateTime time) throws SourceException;
+
+ /** Returns the load profile of this source. */
+ public abstract P getLoadProfile();
+
+ /** Returns the maximal power value of the time series */
+ public abstract Optional> getMaxPower();
+
+ /** Returns the load profile energy scaling for this load profile time series. */
+ public abstract Optional> getLoadProfileEnergyScaling();
+
+ /**
+ * Method to read in the build-in {@link BdewStandardLoadProfile}s.
+ *
+ * @return a map: load profile to load profile source
+ */
+ public static Map<
+ BdewStandardLoadProfile, CsvLoadProfileSource>
+ getBdewLoadProfiles() throws SourceException {
+ CsvDataSource buildInSource = getBuildInSource(LoadProfileSource.class, "/load");
+
+ BdewLoadProfileFactory factory = new BdewLoadProfileFactory();
+
+ return buildInSource
+ .getCsvLoadProfileMetaInformation(BdewStandardLoadProfile.values())
+ .values()
+ .stream()
+ .map(
+ metaInformation ->
+ new CsvLoadProfileSource<>(
+ buildInSource, metaInformation, BdewLoadValues.class, factory))
+ .collect(Collectors.toMap(CsvLoadProfileSource::getLoadProfile, Function.identity()));
+ }
+
+ /**
+ * Method to read in the build-in {@link RandomLoadProfileTimeSeries}.
+ *
+ * @return the random load profile source
+ */
+ public static CsvLoadProfileSource
+ getRandomLoadProfile() throws SourceException {
+ CsvDataSource buildInSource = getBuildInSource(LoadProfileSource.class, "/load");
+
+ CsvLoadProfileMetaInformation metaInformation =
+ buildInSource.getCsvLoadProfileMetaInformation(RANDOM_LOAD_PROFILE).values().stream()
+ .findAny()
+ .orElseThrow();
+ return new CsvLoadProfileSource<>(
+ buildInSource, metaInformation, RandomLoadValues.class, new RandomLoadProfileFactory());
+ }
+}
diff --git a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java
index d97c032ba..7bf442bca 100644
--- a/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java
+++ b/src/main/java/edu/ie3/datamodel/io/source/TimeSeriesMetaInformationSource.java
@@ -7,12 +7,17 @@
import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme;
import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation;
+import edu.ie3.datamodel.io.naming.timeseries.LoadProfileMetaInformation;
+import edu.ie3.datamodel.models.profile.LoadProfile;
+import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
/** Source for all available time series with their {@link UUID} and {@link ColumnScheme} */
-public interface TimeSeriesMetaInformationSource {
+public abstract class TimeSeriesMetaInformationSource {
+
+ protected Map loadProfileMetaInformation;
/**
* Get a mapping from time series {@link UUID} to its meta information {@link
@@ -20,7 +25,7 @@ public interface TimeSeriesMetaInformationSource {
*
* @return that mapping
*/
- Map getTimeSeriesMetaInformation();
+ public abstract Map getTimeSeriesMetaInformation();
/**
* Get an option on the given time series meta information
@@ -28,5 +33,26 @@ public interface TimeSeriesMetaInformationSource {
* @param timeSeriesUuid Unique identifier of the time series in question
* @return An Option on the meta information
*/
- Optional getTimeSeriesMetaInformation(UUID timeSeriesUuid);
+ public abstract Optional getTimeSeriesMetaInformation(
+ UUID timeSeriesUuid);
+
+ /**
+ * Gat a mapping from load profile to {@link LoadProfileMetaInformation}.
+ *
+ * @return that mapping
+ */
+ public Map getLoadProfileMetaInformation() {
+ return Collections.unmodifiableMap(loadProfileMetaInformation);
+ }
+
+ /**
+ * Get an option on the given time series meta information
+ *
+ * @param loadProfile load profile of the time series in question
+ * @return An Option on the meta information
+ */
+ public Optional getLoadProfileMetaInformation(
+ LoadProfile loadProfile) {
+ return Optional.ofNullable(loadProfileMetaInformation.get(loadProfile.getKey()));
+ }
}
diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java
index 3603aaa7e..78aa77f75 100644
--- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java
+++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvDataSource.java
@@ -8,12 +8,15 @@
import edu.ie3.datamodel.exceptions.SourceException;
import edu.ie3.datamodel.io.connectors.CsvFileConnector;
import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation;
+import edu.ie3.datamodel.io.csv.CsvLoadProfileMetaInformation;
import edu.ie3.datamodel.io.naming.FileNamingStrategy;
import edu.ie3.datamodel.io.naming.TimeSeriesMetaInformation;
import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme;
import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation;
+import edu.ie3.datamodel.io.naming.timeseries.LoadProfileMetaInformation;
import edu.ie3.datamodel.io.source.DataSource;
import edu.ie3.datamodel.models.Entity;
+import edu.ie3.datamodel.models.profile.LoadProfile;
import edu.ie3.datamodel.utils.Try;
import edu.ie3.datamodel.utils.Try.Failure;
import edu.ie3.datamodel.utils.Try.Success;
@@ -25,6 +28,7 @@
import java.nio.file.Path;
import java.util.*;
import java.util.function.Function;
+import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
@@ -118,7 +122,8 @@ public FileNamingStrategy getNamingStrategy() {
*/
public Map
getCsvIndividualTimeSeriesMetaInformation(final ColumnScheme... columnSchemes) {
- return getIndividualTimeSeriesFilePaths().parallelStream()
+ return getTimeSeriesFilePaths(fileNamingStrategy.getIndividualTimeSeriesPattern())
+ .parallelStream()
.map(
filePath -> {
/* Extract meta information from file path and enhance it with the file path itself */
@@ -136,13 +141,41 @@ public FileNamingStrategy getNamingStrategy() {
.collect(Collectors.toMap(TimeSeriesMetaInformation::getUuid, Function.identity()));
}
+ /**
+ * Receive the information for specific load profile time series. They are given back mapped to
+ * their uuid.
+ *
+ * @return A mapping from profile to the load profile time series meta information
+ */
+ public Map getCsvLoadProfileMetaInformation(
+ LoadProfile... profiles) {
+ return getTimeSeriesFilePaths(fileNamingStrategy.getLoadProfileTimeSeriesPattern())
+ .parallelStream()
+ .map(
+ filePath -> {
+ /* Extract meta information from file path and enhance it with the file path itself */
+ LoadProfileMetaInformation metaInformation =
+ fileNamingStrategy.loadProfileTimeSeriesMetaInformation(filePath.toString());
+ return new CsvLoadProfileMetaInformation(
+ metaInformation, FileNamingStrategy.removeFileNameEnding(filePath.getFileName()));
+ })
+ .filter(
+ metaInformation ->
+ profiles == null
+ || profiles.length == 0
+ || Stream.of(profiles)
+ .anyMatch(profile -> profile.getKey().equals(metaInformation.getProfile())))
+ .collect(Collectors.toMap(LoadProfileMetaInformation::getProfile, Function.identity()));
+ }
+
/**
* Returns a set of relative paths strings to time series files, with respect to the base folder
* path
*
+ * @param pattern for matching the time series
* @return A set of relative paths to time series files, with respect to the base folder path
*/
- protected Set getIndividualTimeSeriesFilePaths() {
+ protected Set getTimeSeriesFilePaths(Pattern pattern) {
Path baseDirectory = connector.getBaseDirectory();
try (Stream pathStream = Files.walk(baseDirectory)) {
return pathStream
@@ -151,10 +184,7 @@ protected Set getIndividualTimeSeriesFilePaths() {
path -> {
Path withoutEnding =
Path.of(FileNamingStrategy.removeFileNameEnding(path.toString()));
- return fileNamingStrategy
- .getIndividualTimeSeriesPattern()
- .matcher(withoutEnding.toString())
- .matches();
+ return pattern.matcher(withoutEnding.toString()).matches();
})
.collect(Collectors.toSet());
} catch (IOException e) {
diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvLoadProfileSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvLoadProfileSource.java
new file mode 100644
index 000000000..f3b271617
--- /dev/null
+++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvLoadProfileSource.java
@@ -0,0 +1,126 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+*/
+package edu.ie3.datamodel.io.source.csv;
+
+import edu.ie3.datamodel.exceptions.FactoryException;
+import edu.ie3.datamodel.exceptions.SourceException;
+import edu.ie3.datamodel.exceptions.ValidationException;
+import edu.ie3.datamodel.io.csv.CsvLoadProfileMetaInformation;
+import edu.ie3.datamodel.io.factory.timeseries.LoadProfileFactory;
+import edu.ie3.datamodel.io.source.LoadProfileSource;
+import edu.ie3.datamodel.models.profile.LoadProfile;
+import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileEntry;
+import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileTimeSeries;
+import edu.ie3.datamodel.models.value.PValue;
+import edu.ie3.datamodel.models.value.load.LoadValues;
+import edu.ie3.datamodel.utils.Try;
+import java.nio.file.Path;
+import java.time.ZonedDateTime;
+import java.util.*;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import javax.measure.quantity.Energy;
+import javax.measure.quantity.Power;
+import tech.units.indriya.ComparableQuantity;
+
+/**
+ * Source that is capable of providing information around load profile time series from csv files.
+ */
+public class CsvLoadProfileSource
+ extends LoadProfileSource
{
+ private final LoadProfileTimeSeries loadProfileTimeSeries;
+ private final CsvDataSource dataSource;
+ private final Path filePath;
+
+ public CsvLoadProfileSource(
+ CsvDataSource source,
+ CsvLoadProfileMetaInformation metaInformation,
+ Class entryClass,
+ LoadProfileFactory entryFactory) {
+ super(entryClass, entryFactory);
+ this.dataSource = source;
+ this.filePath = metaInformation.getFullFilePath();
+
+ /* Read in the full time series */
+ try {
+ this.loadProfileTimeSeries = buildLoadProfileTimeSeries(metaInformation, this::createEntries);
+ } catch (SourceException e) {
+ throw new IllegalArgumentException(
+ "Unable to obtain time series with UUID '"
+ + metaInformation.getUuid()
+ + "'. Please check arguments!",
+ e);
+ }
+ }
+
+ @Override
+ public void validate() throws ValidationException {
+ validate(entryClass, () -> dataSource.getSourceFields(filePath), entryFactory);
+ }
+
+ @Override
+ public LoadProfileTimeSeries getTimeSeries() {
+ return loadProfileTimeSeries;
+ }
+
+ @Override
+ public List getTimeKeysAfter(ZonedDateTime time) {
+ return loadProfileTimeSeries.getTimeKeysAfter(time);
+ }
+
+ @Override
+ public Optional getValue(ZonedDateTime time) throws SourceException {
+ return loadProfileTimeSeries.getValue(time);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public P getLoadProfile() {
+ return (P) getTimeSeries().getLoadProfile();
+ }
+
+ @Override
+ public Optional> getMaxPower() {
+ return loadProfileTimeSeries.maxPower();
+ }
+
+ @Override
+ public Optional> getLoadProfileEnergyScaling() {
+ return loadProfileTimeSeries.loadProfileScaling();
+ }
+
+ // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
+
+ /**
+ * Attempts to read a load profile time series with given unique identifier and file path. Single
+ * entries are obtained entries with the help of {@code fieldToValueFunction}. If the file does
+ * not exist, an empty Stream is returned.
+ *
+ * @param metaInformation containing an unique identifier of the time series, a path to the file
+ * to read as well as the profile
+ * @param fieldToValueFunction function, that is able to transfer a mapping (from field to value)
+ * onto a specific instance of the targeted entry class
+ * @throws SourceException If the file cannot be read properly
+ * @return an individual time series
+ */
+ protected LoadProfileTimeSeries buildLoadProfileTimeSeries(
+ CsvLoadProfileMetaInformation metaInformation,
+ Function, Try, FactoryException>>
+ fieldToValueFunction)
+ throws SourceException {
+ Set> entries =
+ dataSource
+ .buildStreamWithFieldsToAttributesMap(filePath, false)
+ .flatMap(
+ stream ->
+ Try.scanStream(stream.map(fieldToValueFunction), "LoadProfileEntry")
+ .transformF(SourceException::new))
+ .getOrThrow()
+ .collect(Collectors.toSet());
+
+ return entryFactory.build(metaInformation, entries);
+ }
+}
diff --git a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java
index dbf5114cf..786474e87 100644
--- a/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java
+++ b/src/main/java/edu/ie3/datamodel/io/source/csv/CsvTimeSeriesMetaInformationSource.java
@@ -9,19 +9,22 @@
import edu.ie3.datamodel.io.naming.FileNamingStrategy;
import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme;
import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation;
+import edu.ie3.datamodel.io.naming.timeseries.LoadProfileMetaInformation;
import edu.ie3.datamodel.io.source.TimeSeriesMetaInformationSource;
import edu.ie3.datamodel.utils.TimeSeriesUtils;
import java.nio.file.Path;
+import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
+import java.util.function.Function;
import java.util.stream.Collectors;
/**
* CSV implementation for retrieving {@link TimeSeriesMetaInformationSource} from input directory
* structures
*/
-public class CsvTimeSeriesMetaInformationSource implements TimeSeriesMetaInformationSource {
+public class CsvTimeSeriesMetaInformationSource extends TimeSeriesMetaInformationSource {
protected final CsvDataSource dataSource;
@@ -36,17 +39,29 @@ public class CsvTimeSeriesMetaInformationSource implements TimeSeriesMetaInforma
*/
public CsvTimeSeriesMetaInformationSource(
String csvSep, Path folderPath, FileNamingStrategy fileNamingStrategy) {
- this.dataSource = new CsvDataSource(csvSep, folderPath, fileNamingStrategy);
+ this(new CsvDataSource(csvSep, folderPath, fileNamingStrategy));
+ }
+
+ /**
+ * Creates a time series type source
+ *
+ * @param dataSource a csv data source
+ */
+ public CsvTimeSeriesMetaInformationSource(CsvDataSource dataSource) {
+ this.dataSource = dataSource;
// retrieve only the desired time series
this.timeSeriesMetaInformation =
dataSource.getCsvIndividualTimeSeriesMetaInformation(
TimeSeriesUtils.getAcceptedColumnSchemes().toArray(new ColumnScheme[0]));
+
+ this.loadProfileMetaInformation =
+ dataSource.getCsvLoadProfileMetaInformation().values().stream()
+ .collect(Collectors.toMap(LoadProfileMetaInformation::getProfile, Function.identity()));
}
@Override
public Map getTimeSeriesMetaInformation() {
- return timeSeriesMetaInformation.entrySet().stream()
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+ return Collections.unmodifiableMap(timeSeriesMetaInformation);
}
@Override
diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlLoadProfileSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlLoadProfileSource.java
new file mode 100644
index 000000000..28544f37e
--- /dev/null
+++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlLoadProfileSource.java
@@ -0,0 +1,205 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+*/
+package edu.ie3.datamodel.io.source.sql;
+
+import static edu.ie3.datamodel.io.source.sql.SqlDataSource.createBaseQueryString;
+
+import edu.ie3.datamodel.exceptions.SourceException;
+import edu.ie3.datamodel.exceptions.ValidationException;
+import edu.ie3.datamodel.io.connectors.SqlConnector;
+import edu.ie3.datamodel.io.factory.timeseries.LoadProfileFactory;
+import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy;
+import edu.ie3.datamodel.io.naming.timeseries.LoadProfileMetaInformation;
+import edu.ie3.datamodel.io.source.LoadProfileSource;
+import edu.ie3.datamodel.models.profile.LoadProfile;
+import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileEntry;
+import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileTimeSeries;
+import edu.ie3.datamodel.models.value.PValue;
+import edu.ie3.datamodel.models.value.Value;
+import edu.ie3.datamodel.models.value.load.LoadValues;
+import edu.ie3.datamodel.utils.TimeSeriesUtils;
+import java.time.ZonedDateTime;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+import javax.measure.quantity.Energy;
+import javax.measure.quantity.Power;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import tech.units.indriya.ComparableQuantity;
+
+/**
+ * Sql source for {@link LoadProfileTimeSeries}.
+ *
+ * @param type of load profile
+ * @param type of load values
+ */
+public class SqlLoadProfileSource
+ extends LoadProfileSource
{
+ protected static final Logger log = LoggerFactory.getLogger(SqlTimeSeriesSource.class);
+ private final SqlDataSource dataSource;
+ private final String tableName;
+
+ private final LoadProfileMetaInformation metaInformation;
+ private final P loadProfile;
+
+ // General fields
+ private static final String WHERE = " WHERE ";
+ private static final String LOAD_PROFILE = "load_profile";
+
+ /**
+ * Queries that are available within this source. Motivation to have them as field value is to
+ * avoid creating a new string each time, bc they're always the same.
+ */
+ private final String queryFull;
+
+ private final String queryTime;
+
+ public SqlLoadProfileSource(
+ SqlDataSource dataSource,
+ LoadProfileMetaInformation metaInformation,
+ Class entryClass,
+ LoadProfileFactory entryFactory) {
+ super(entryClass, entryFactory);
+ this.dataSource = dataSource;
+
+ this.tableName = "load_profiles";
+ this.metaInformation = metaInformation;
+ this.loadProfile = entryFactory.parseProfile(metaInformation.getProfile());
+
+ String dbTimeColumnName =
+ dataSource.getDbColumnName(entryFactory.getTimeFieldString(), tableName);
+
+ this.queryFull = createQueryFull(dataSource.schemaName, tableName);
+ this.queryTime = createQueryForTime(dataSource.schemaName, tableName, dbTimeColumnName);
+ }
+
+ public SqlLoadProfileSource(
+ SqlConnector connector,
+ String schemaName,
+ DatabaseNamingStrategy namingStrategy,
+ LoadProfileMetaInformation metaInformation,
+ Class entryClass,
+ LoadProfileFactory entryFactory) {
+ this(
+ new SqlDataSource(connector, schemaName, namingStrategy),
+ metaInformation,
+ entryClass,
+ entryFactory);
+ }
+
+ @Override
+ public void validate() throws ValidationException {
+ validate(entryClass, () -> dataSource.getSourceFields(tableName), entryFactory);
+ }
+
+ // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
+
+ @Override
+ public LoadProfileTimeSeries getTimeSeries() {
+ Set> entries = getEntries(queryFull, ps -> {});
+ return entryFactory.build(metaInformation, entries);
+ }
+
+ @Override
+ public List getTimeKeysAfter(ZonedDateTime time) {
+ return List.of(time.plusMinutes(15));
+ }
+
+ @Override
+ public Optional getValue(ZonedDateTime time) throws SourceException {
+ Set> entries =
+ getEntries(queryTime, ps -> ps.setInt(1, TimeSeriesUtils.calculateQuarterHourOfDay(time)));
+ if (entries.isEmpty()) return Optional.empty();
+ if (entries.size() > 1) log.warn("Retrieved more than one result value, using the first");
+ return Optional.of(entries.stream().toList().get(0).getValue().getValue(time, loadProfile));
+ }
+
+ @Override
+ public P getLoadProfile() {
+ return loadProfile;
+ }
+
+ @Override
+ public Optional> getMaxPower() {
+ return Optional.ofNullable(
+ entryFactory.calculateMaxPower(loadProfile, getEntries(queryFull, ps -> {})));
+ }
+
+ @Override
+ public Optional> getLoadProfileEnergyScaling() {
+ return Optional.ofNullable(entryFactory.getLoadProfileEnergyScaling(loadProfile));
+ }
+
+ // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
+
+ /**
+ * Creates a set of {@link LoadProfileEntry} from database.
+ *
+ * @param query to execute
+ * @param addParams additional parameters
+ * @return a set of {@link LoadProfileEntry}
+ */
+ private Set> getEntries(String query, SqlDataSource.AddParams addParams) {
+ return dataSource
+ .executeQuery(query, addParams)
+ .map(this::createEntity)
+ .flatMap(Optional::stream)
+ .collect(Collectors.toSet());
+ }
+
+ /**
+ * Build a {@link LoadProfileEntry} of type {@code V}, whereas the underlying {@link Value} does
+ * not need any additional information.
+ *
+ * @param fieldToValues Mapping from field id to values
+ * @return optional {@link LoadProfileEntry}
+ */
+ private Optional> createEntity(Map fieldToValues) {
+ fieldToValues.remove("timeSeries");
+ return createEntries(fieldToValues).getData();
+ }
+
+ /**
+ * Creates a base query to retrieve all entities for this time series:
+ * {@code WHERE time_series = $timeSeriesUuid AND BETWEEN ? AND ?;}
+ *
+ * @param schemaName the name of the database schema
+ * @param tableName the name of the database table
+ * @return the query string
+ */
+ private String createQueryFull(String schemaName, String tableName) {
+ return createBaseQueryString(schemaName, tableName)
+ + WHERE
+ + LOAD_PROFILE
+ + " = '"
+ + loadProfile.getKey()
+ + "'";
+ }
+
+ /**
+ * Creates a basic query to retrieve an entry for the given time series uuid and time with the
+ * following pattern:
+ * {@code WHERE time_series = $timeSeriesUuid AND =?;}
+ *
+ * @param schemaName the name of the database schema
+ * @param tableName the name of the database table
+ * @param timeColumnName the name of the column holding the timestamp info
+ * @return the query string
+ */
+ private String createQueryForTime(String schemaName, String tableName, String timeColumnName) {
+ return createBaseQueryString(schemaName, tableName)
+ + WHERE
+ + LOAD_PROFILE
+ + " = '"
+ + loadProfile.getKey()
+ + "' AND "
+ + timeColumnName
+ + "=?;";
+ }
+}
diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java
index 7d05bb462..ffdc47c7a 100644
--- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java
+++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSource.java
@@ -11,8 +11,10 @@
import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy;
import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme;
import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation;
+import edu.ie3.datamodel.io.naming.timeseries.LoadProfileMetaInformation;
import edu.ie3.datamodel.io.source.TimeSeriesMetaInformationSource;
import edu.ie3.datamodel.utils.TimeSeriesUtils;
+import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
@@ -20,14 +22,14 @@
import java.util.stream.Collectors;
/** SQL implementation for retrieving {@link TimeSeriesMetaInformationSource} from the SQL scheme */
-public class SqlTimeSeriesMetaInformationSource implements TimeSeriesMetaInformationSource {
+public class SqlTimeSeriesMetaInformationSource extends TimeSeriesMetaInformationSource {
- private static final TimeSeriesMetaInformationFactory mappingFactory =
+ private static final TimeSeriesMetaInformationFactory metaInformationFactory =
new TimeSeriesMetaInformationFactory();
- private final DatabaseNamingStrategy namingStrategy;
- private final Map mapping;
+ private final Map timeSeriesMetaInformation;
+ private final DatabaseNamingStrategy namingStrategy;
private final SqlDataSource dataSource;
public SqlTimeSeriesMetaInformationSource(
@@ -36,8 +38,9 @@ public SqlTimeSeriesMetaInformationSource(
this.namingStrategy = databaseNamingStrategy;
String queryComplete = createQueryComplete(schemaName);
+ String loadMetaInformationQuery = createLoadProfileQueryComplete(schemaName);
- this.mapping =
+ this.timeSeriesMetaInformation =
dataSource
.executeQuery(queryComplete)
.map(this::createEntity)
@@ -45,6 +48,13 @@ public SqlTimeSeriesMetaInformationSource(
.collect(
Collectors.toMap(
IndividualTimeSeriesMetaInformation::getUuid, Function.identity()));
+
+ this.loadProfileMetaInformation =
+ dataSource
+ .executeQuery(loadMetaInformationQuery)
+ .map(this::createLoadProfileEntity)
+ .flatMap(Optional::stream)
+ .collect(Collectors.toMap(LoadProfileMetaInformation::getProfile, Function.identity()));
}
/**
@@ -60,7 +70,7 @@ private String createQueryComplete(String schemaName) {
Collectors.toMap(
namingStrategy::getTimeSeriesEntityName, columnScheme -> columnScheme));
- Iterable selectQueries =
+ List selectQueries =
dataSource.getDbTables(schemaName, namingStrategy.getTimeSeriesPrefix() + "%").stream()
.map(
tableName ->
@@ -76,27 +86,51 @@ private String createQueryComplete(String schemaName) {
.flatMap(Optional::stream)
.toList();
- return String.join("\nUNION\n", selectQueries) + ";";
+ return selectQueries.isEmpty() ? ";" : String.join("\nUNION\n", selectQueries) + ";";
+ }
+
+ /**
+ * Creates a query that retrieves all time series uuid from existing time series tables.
+ *
+ * @param schemaName schema that the time series reside in
+ * @return query String
+ */
+ private String createLoadProfileQueryComplete(String schemaName) {
+ String tableName = namingStrategy.getLoadProfileTableName();
+ boolean isNotPresent = dataSource.getDbTables(schemaName, tableName).isEmpty();
+
+ return isNotPresent
+ ? ";"
+ : "SELECT DISTINCT load_profile FROM " + schemaName + "." + tableName + ";";
}
@Override
public Map getTimeSeriesMetaInformation() {
- return this.mapping;
+ return timeSeriesMetaInformation;
}
@Override
public Optional getTimeSeriesMetaInformation(
UUID timeSeriesUuid) {
- return Optional.ofNullable(this.mapping.get(timeSeriesUuid));
+ return Optional.ofNullable(timeSeriesMetaInformation.get(timeSeriesUuid));
}
private Optional createEntity(
Map fieldToValues) {
EntityData entityData =
new EntityData(fieldToValues, IndividualTimeSeriesMetaInformation.class);
- return mappingFactory
+ return metaInformationFactory
.get(entityData)
.map(IndividualTimeSeriesMetaInformation.class::cast)
.getData();
}
+
+ private Optional createLoadProfileEntity(
+ Map fieldToValues) {
+ EntityData entityData = new EntityData(fieldToValues, LoadProfileMetaInformation.class);
+ return metaInformationFactory
+ .get(entityData)
+ .map(LoadProfileMetaInformation.class::cast)
+ .getData();
+ }
}
diff --git a/src/main/java/edu/ie3/datamodel/models/value/load/RandomLoadValues.java b/src/main/java/edu/ie3/datamodel/models/value/load/RandomLoadValues.java
index 01f1f2c1b..3e9684e56 100644
--- a/src/main/java/edu/ie3/datamodel/models/value/load/RandomLoadValues.java
+++ b/src/main/java/edu/ie3/datamodel/models/value/load/RandomLoadValues.java
@@ -1,5 +1,5 @@
/*
- * © 2021. TU Dortmund University,
+ * © 2024. TU Dortmund University,
* Institute of Energy Systems, Energy Efficiency and Energy Economics,
* Research group Distribution grid planning and operation
*/
diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/LoadProfileSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/LoadProfileSourceTest.groovy
new file mode 100644
index 000000000..013e93e67
--- /dev/null
+++ b/src/test/groovy/edu/ie3/datamodel/io/source/LoadProfileSourceTest.groovy
@@ -0,0 +1,33 @@
+/*
+ * © 2024. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+package edu.ie3.datamodel.io.source
+
+import static edu.ie3.datamodel.models.profile.LoadProfile.RandomLoadProfile.RANDOM_LOAD_PROFILE
+
+import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile
+import spock.lang.Specification
+
+class LoadProfileSourceTest extends Specification {
+
+ def "A LoadProfileSourceTest should read in all build-in BDEWStandardLoadProfiles"() {
+ when:
+ def profiles = LoadProfileSource.bdewLoadProfiles
+
+ then:
+ profiles.size() == 11
+ BdewStandardLoadProfile.values().every { profiles.keySet().contains(it) }
+ profiles.values().every { it.timeSeries.entries.size() == 96 }
+ }
+
+ def "A LoadProfileSourceTest should read in the build-in RandomLoadProfile"() {
+ when:
+ def random = LoadProfileSource.randomLoadProfile.timeSeries
+
+ then:
+ random.loadProfile == RANDOM_LOAD_PROFILE
+ random.entries.size() == 96
+ }
+}
diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy
index 2bd412ade..9e4a8a614 100644
--- a/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy
+++ b/src/test/groovy/edu/ie3/datamodel/io/source/csv/CsvDataSourceTest.groovy
@@ -8,9 +8,11 @@ package edu.ie3.datamodel.io.source.csv
import edu.ie3.datamodel.exceptions.SourceException
import edu.ie3.datamodel.io.connectors.CsvFileConnector
import edu.ie3.datamodel.io.csv.CsvIndividualTimeSeriesMetaInformation
+import edu.ie3.datamodel.io.csv.CsvLoadProfileMetaInformation
import edu.ie3.datamodel.io.naming.FileNamingStrategy
import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme
import edu.ie3.datamodel.models.input.system.LoadInput
+import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile
import spock.lang.Shared
import spock.lang.Specification
@@ -51,6 +53,8 @@ class CsvDataSourceTest extends Specification implements CsvTestDataMeta {
FileNamingStrategy fileNamingStrategy
@Shared
Set timeSeriesPaths
+ @Shared
+ Set loadProfileTimeSeriesPaths
@Shared
DummyCsvSource dummyCsvSource
@@ -70,6 +74,13 @@ class CsvDataSourceTest extends Specification implements CsvTestDataMeta {
"its_c_c7b0d9d6-5044-4f51-80b4-f221d8b1f14b.csv"
].stream().map { file -> Path.of(file) }.collect(Collectors.toSet())
timeSeriesPaths.forEach { path -> Files.createFile(testBaseFolderPath.resolve(path)) }
+
+ loadProfileTimeSeriesPaths = [
+ "lpts_r1.csv",
+ "lpts_r2.csv",
+ "lpts_g0.csv"
+ ].stream().map { file -> Path.of(file) }.collect(Collectors.toSet())
+ loadProfileTimeSeriesPaths.forEach { path -> Files.createFile(testBaseFolderPath.resolve(path)) }
}
def "A DataSource should contain a valid connector after initialization"() {
@@ -346,13 +357,17 @@ class CsvDataSourceTest extends Specification implements CsvTestDataMeta {
def "The CsvDataSource is able to provide correct paths to time series files"() {
when:
- def actual = dummyCsvSource.getIndividualTimeSeriesFilePaths()
+ def actualIndividual = dummyCsvSource.getTimeSeriesFilePaths(fileNamingStrategy.individualTimeSeriesPattern)
+ def actualLoad = dummyCsvSource.getTimeSeriesFilePaths(fileNamingStrategy.loadProfileTimeSeriesPattern)
then:
noExceptionThrown()
- actual.size() == timeSeriesPaths.size()
- actual.containsAll(timeSeriesPaths)
+ actualIndividual.size() == timeSeriesPaths.size()
+ actualIndividual.containsAll(timeSeriesPaths)
+
+ actualLoad.size() == loadProfileTimeSeriesPaths.size()
+ actualLoad.containsAll(loadProfileTimeSeriesPaths)
}
def "The CsvDataSource is able to build correct uuid to meta information mapping"() {
@@ -389,4 +404,24 @@ class CsvDataSourceTest extends Specification implements CsvTestDataMeta {
then:
actual == expected
}
+
+ def "The CsvDataSource is able to build correct load profile meta information"() {
+ when:
+ def actual = dummyCsvSource.getCsvLoadProfileMetaInformation()
+
+ then:
+ actual.size() == 3
+ actual.get("r1").fullFilePath == Path.of("lpts_r1")
+ actual.get("r2").fullFilePath == Path.of("lpts_r2")
+ actual.get("g0").fullFilePath == Path.of("lpts_g0")
+ }
+
+ def "The CsvDataSource is able to build correct load profile meta information when restricting load profile"() {
+ when:
+ def actual = dummyCsvSource.getCsvLoadProfileMetaInformation(BdewStandardLoadProfile.G0)
+
+ then:
+ actual.size() == 1
+ actual.get("g0").fullFilePath == Path.of("lpts_g0")
+ }
}
diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlLoadProfileSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlLoadProfileSourceIT.groovy
new file mode 100644
index 000000000..c29dcca76
--- /dev/null
+++ b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlLoadProfileSourceIT.groovy
@@ -0,0 +1,96 @@
+/*
+ * © 2021. TU Dortmund University,
+ * Institute of Energy Systems, Energy Efficiency and Energy Economics,
+ * Research group Distribution grid planning and operation
+ */
+package edu.ie3.datamodel.io.source.sql
+
+
+import static edu.ie3.test.common.TimeSeriesSourceTestData.G3_VALUE_00MIN
+import static edu.ie3.test.common.TimeSeriesSourceTestData.TIME_00MIN
+
+import edu.ie3.datamodel.io.connectors.SqlConnector
+import edu.ie3.datamodel.io.factory.timeseries.BdewLoadProfileFactory
+import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy
+import edu.ie3.datamodel.io.naming.timeseries.LoadProfileMetaInformation
+import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile
+import edu.ie3.datamodel.models.value.load.BdewLoadValues
+import edu.ie3.test.helper.TestContainerHelper
+import edu.ie3.util.TimeUtil
+import org.testcontainers.containers.Container
+import org.testcontainers.containers.PostgreSQLContainer
+import org.testcontainers.spock.Testcontainers
+import org.testcontainers.utility.MountableFile
+import spock.lang.Shared
+import spock.lang.Specification
+
+@Testcontainers
+class SqlLoadProfileSourceIT extends Specification implements TestContainerHelper {
+
+ @Shared
+ PostgreSQLContainer postgreSQLContainer = new PostgreSQLContainer("postgres:14.2")
+
+ @Shared
+ SqlConnector connector
+
+ @Shared
+ SqlLoadProfileSource loadSource
+
+ @Shared
+ DatabaseNamingStrategy namingStrategy
+
+ static String schemaName = "public"
+
+ static UUID timeSeriesUuid = UUID.fromString("9b880468-309c-43c1-a3f4-26dd26266216")
+
+ def setupSpec() {
+ // Copy sql import scripts into docker
+ MountableFile sqlImportFile = getMountableFile("_timeseries/")
+ postgreSQLContainer.copyFileToContainer(sqlImportFile, "/home/")
+
+ // Execute import script
+ Iterable importFiles = Arrays.asList("load_profiles.sql")
+ for (String file: importFiles) {
+ Container.ExecResult res = postgreSQLContainer.execInContainer("psql", "-Utest", "-f/home/" + file)
+ assert res.stderr.empty
+ }
+
+ connector = new SqlConnector(postgreSQLContainer.jdbcUrl, postgreSQLContainer.username, postgreSQLContainer.password)
+ def metaInformation = new LoadProfileMetaInformation(timeSeriesUuid, "g3")
+
+ namingStrategy = new DatabaseNamingStrategy()
+
+ loadSource = new SqlLoadProfileSource<>(connector, schemaName, namingStrategy, metaInformation, BdewLoadValues, new BdewLoadProfileFactory())
+ }
+
+ def "A SqlTimeSeriesSource can read and correctly parse a single value for a specific date"() {
+ when:
+ def value = loadSource.getValue(TIME_00MIN)
+
+ then:
+ value.present
+ value.get().p.get() == G3_VALUE_00MIN.p.get()
+ }
+
+ def "A SqlTimeSeriesSource can read all value data"() {
+ when:
+ def timeSeries = loadSource.timeSeries
+
+ then:
+ timeSeries.uuid == timeSeriesUuid
+ timeSeries.entries.size() == 3
+ }
+
+ def "The SqlTimeSeriesSource returns the time keys after a given key correctly"() {
+ given:
+ def time = TimeUtil.withDefaults.toZonedDateTime("2020-01-01T00:00:00Z")
+
+ when:
+ def actual = loadSource.getTimeKeysAfter(time)
+
+ then:
+ actual == [
+ TimeUtil.withDefaults.toZonedDateTime("2020-01-01T00:15:00Z")
+ ]
+ }
+}
diff --git a/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSourceIT.groovy b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSourceIT.groovy
index 413328fa0..4f26fa36c 100644
--- a/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSourceIT.groovy
+++ b/src/test/groovy/edu/ie3/datamodel/io/source/sql/SqlTimeSeriesMetaInformationSourceIT.groovy
@@ -9,6 +9,7 @@ import edu.ie3.datamodel.io.connectors.SqlConnector
import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy
import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme
import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation
+import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile
import edu.ie3.test.helper.TestContainerHelper
import org.testcontainers.containers.Container
import org.testcontainers.containers.PostgreSQLContainer
@@ -41,7 +42,8 @@ class SqlTimeSeriesMetaInformationSourceIT extends Specification implements Test
"time_series_p.sql",
"time_series_ph.sql",
"time_series_pq.sql",
- "time_series_pqh.sql")
+ "time_series_pqh.sql",
+ "load_profiles.sql")
for (String file: importFiles) {
Container.ExecResult res = postgreSQLContainer.execInContainer("psql", "-Utest", "-f/home/" + file)
assert res.stderr.empty
@@ -101,4 +103,36 @@ class SqlTimeSeriesMetaInformationSourceIT extends Specification implements Test
then:
result.empty
}
+
+ def "The SQL time series meta information source returns correct meta information load profiles" () {
+ when:
+ def result = source.loadProfileMetaInformation
+
+ then:
+ result.size() == 2
+
+ result.keySet() == ["g2", "g3"] as Set
+ }
+
+ def "The SQL time series meta information source returns correct meta information for a given load profile"() {
+ when:
+ def result = source.getLoadProfileMetaInformation(profile)
+
+ then:
+ result.present == expected
+
+ where:
+ profile || expected
+ BdewStandardLoadProfile.G2 || true
+ BdewStandardLoadProfile.G3 || true
+ BdewStandardLoadProfile.L0 ||false
+ }
+
+ def "The SQL time series meta information source returns an empty optional for an unknown load profile"() {
+ when:
+ def result = source.getLoadProfileMetaInformation(BdewStandardLoadProfile.G1)
+
+ then:
+ result.empty
+ }
}