Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
It accepts a list of filters, transformation `map` functions a produces a result when calling
* the `reduce` method (or one of its shorthand versions like `sum`, `count`, etc.).
*
*
* You can set a list of filters that are applied on the raw OSM data, for example you can filter:
*
*
*
geometrically by an area of interest (bbox or polygon)
*
by osm tags (key only or key/value)
*
by OSM type
*
custom filter callback
*
*
*
Depending on the used data "view", the MapReducer produces either "snapshots" or evaluated
* all modifications ("contributions") of the matching raw OSM data.
*
*
These data can then be transformed arbitrarily by user defined `map` functions (which take one
* of these entity snapshots or modifications as input an produce an arbitrary output) or `flatMap`
* functions (which can return an arbitrary number of results per entity snapshot/contribution). It
* is possible to chain together any number of transformation functions.
*
*
Finally, one can either use one of the pre-defined result-generating functions (e.g. `sum`,
* `count`, `average`, `uniq`), or specify a custom `reduce` procedure.
*
*
If one wants to get results that are aggregated by timestamp (or some other index), one can
* use the `aggregateByTimestamp` or `aggregateBy` functionality that automatically handles the
* grouping of the output data.
*
*
For more complex analyses, it is also possible to enable the grouping of the input data by
* the respective OSM ID. This can be used to view at the whole history of entities at once.
*
* @param the type that is returned by the currently set of mapper function. the next added
* mapper function will be called with a parameter of this type as input
*/
public abstract class MapReducer implements
MapReducerSettings>, Mappable, MapReducerAggregations,
MapAggregatable, X>, X>, Serializable {
private static final Logger LOG = LoggerFactory.getLogger(MapReducer.class);
protected static final String TAG_KEY_NOT_FOUND =
"Tag key {} not found. No data will match this filter.";
protected static final String TAG_NOT_FOUND =
"Tag {}={} not found. No data will match this filter.";
protected static final String EMPTY_TAG_LIST =
"Empty tag value list. No data will match this filter.";
protected static final String UNIMPLEMENTED_DATA_VIEW = "Unimplemented data view: %s";
protected static final String UNSUPPORTED_GROUPING = "Unsupported grouping: %s";
protected transient OSHDBDatabase oshdb;
protected Long timeout = null;
/** the class representing the used OSHDB view: either {@link OSMContribution} or
* {@link OSMEntitySnapshot}. */
Class extends OSHDBMapReducible> viewClass;
enum Grouping {
NONE, BY_ID
}
Grouping grouping = Grouping.NONE;
/**
* Returns if the current backend can be canceled (e.g. in a query timeout).
*/
public boolean isCancelable() {
return false;
}
// utility objects
private TagInterpreter tagInterpreter = null;
// settings and filters
protected OSHDBTimestampList tstamps = new OSHDBTimestamps(
"2008-01-01",
currentDate(),
OSHDBTimestamps.Interval.MONTHLY
);
protected OSHDBBoundingBox bboxFilter = bboxWgs84Coordinates(-180.0, -90.0, 180.0, 90.0);
private Geometry polyFilter = null;
protected EnumSet typeFilter = EnumSet.of(OSMType.NODE, OSMType.WAY, OSMType.RELATION);
private final List> preFilters = new ArrayList<>();
private final List> filters = new ArrayList<>();
final LinkedList mappers = new LinkedList<>();
// basic constructor
protected MapReducer(OSHDBDatabase oshdb, Class extends OSHDBMapReducible> viewClass) {
this.oshdb = oshdb;
this.viewClass = viewClass;
}
// copy constructor
protected MapReducer(MapReducer> obj) {
this.oshdb = obj.oshdb;
this.viewClass = obj.viewClass;
this.grouping = obj.grouping;
this.tagInterpreter = obj.tagInterpreter;
this.tstamps = obj.tstamps;
this.bboxFilter = obj.bboxFilter;
this.polyFilter = obj.polyFilter;
this.typeFilter = obj.typeFilter.clone();
this.preFilters.addAll(obj.preFilters);
this.filters.addAll(obj.filters);
this.mappers.addAll(obj.mappers);
}
@NotNull
protected abstract MapReducer copy();
// -----------------------------------------------------------------------------------------------
// "Setting" methods and associated internal helpers
// -----------------------------------------------------------------------------------------------
/**
* Sets the tagInterpreter to use in the analysis. The tagInterpreter is used internally to
* determine the geometry type of osm entities (e.g. an osm way can become either a LineString or
* a Polygon, depending on its tags). Normally, this is generated automatically for the user. But
* for example, if one doesn't want to use the DefaultTagInterpreter, it is possible to use this
* function to supply their own tagInterpreter.
*
* @param tagInterpreter the tagInterpreter object to use in the processing of osm entities
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@SuppressWarnings("unused")
@Contract(pure = true)
public MapReducer tagInterpreter(TagInterpreter tagInterpreter) {
MapReducer ret = this.copy();
ret.tagInterpreter = tagInterpreter;
return ret;
}
// -----------------------------------------------------------------------------------------------
// Filtering methods
// -----------------------------------------------------------------------------------------------
/**
* Set the area of interest to the given bounding box. Only objects inside or clipped by this bbox
* will be passed on to the analysis' `mapper` function.
*
* @param bboxFilter the bounding box to query the data in
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Override
@Contract(pure = true)
public MapReducer areaOfInterest(@NotNull OSHDBBoundingBox bboxFilter) {
MapReducer ret = this.copy();
if (this.polyFilter == null) {
ret.bboxFilter = ret.bboxFilter.intersection(bboxFilter);
} else {
ret.polyFilter = Geo.clip(ret.polyFilter, bboxFilter);
ret.bboxFilter = OSHDBGeometryBuilder.boundingBoxOf(ret.polyFilter.getEnvelopeInternal());
}
return ret;
}
/**
* Set the area of interest to the given polygon. Only objects inside or clipped by this polygon
* will be passed on to the analysis' `mapper` function.
*
* @param polygonFilter the bounding box to query the data in
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Override
@Contract(pure = true)
public
MapReducer areaOfInterest(@NotNull P polygonFilter) {
MapReducer ret = this.copy();
if (this.polyFilter == null) {
ret.polyFilter = Geo.clip(polygonFilter, ret.bboxFilter);
} else {
ret.polyFilter = Geo.clip(polygonFilter, ret.getPolyFilter());
}
ret.bboxFilter = OSHDBGeometryBuilder.boundingBoxOf(ret.polyFilter.getEnvelopeInternal());
return ret;
}
/**
* Set the timestamps for which to perform the analysis.
*
*
* Depending on the *View*, this has slightly different semantics:
*
*
* For the OSMEntitySnapshotView it will set the time slices at which to take the "snapshots"
*
* For the OSMContributionView it will set the time interval in which to look for
* osm contributions (only the first and last timestamp of this list are contributing).
*
* Additionally, these timestamps are used in the `aggregateByTimestamp` functionality.
*
* @param tstamps an object (implementing the OSHDBTimestampList interface) which provides the
* timestamps to do the analysis for
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Contract(pure = true)
public MapReducer timestamps(OSHDBTimestampList tstamps) {
MapReducer ret = this.copy();
ret.tstamps = tstamps;
return ret;
}
/**
* Set the timestamps for which to perform the analysis in a regular interval between a start and
* end date.
*
*
See {@link #timestamps(OSHDBTimestampList)} for further information.
*
*
Supplied times are assumed to be in UTC (and the only allowed timezone designator is 'Z').
* If a date parameter does not include a time part, midnight (00:00:00Z) of the respective
* date is used.
*
* @param isoDateStart an ISO 8601 date string representing the start date of the analysis
* @param isoDateEnd an ISO 8601 date string representing the end date of the analysis
* @param interval the interval between the timestamps to be used in the analysis
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Contract(pure = true)
public MapReducer timestamps(
String isoDateStart, String isoDateEnd, OSHDBTimestamps.Interval interval
) {
return this.timestamps(new OSHDBTimestamps(isoDateStart, isoDateEnd, interval));
}
/**
* Sets a single timestamp for which to perform the analysis at.
*
*
Useful in combination with the OSMEntitySnapshotView when not performing further aggregation
* by timestamp.
*
*
See {@link #timestamps(OSHDBTimestampList)} for further information.
*
*
Supplied times are assumed to be in UTC (and the only allowed timezone designator is 'Z').
* If a date parameter does not include a time part, midnight (00:00:00Z) of the respective
* date is used.
*
* @param isoDate an ISO 8601 date string representing the date of the analysis
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Contract(pure = true)
public MapReducer timestamps(String isoDate) {
if (isOSMContributionViewQuery()) {
LOG.warn("OSMContributionView requires two or more timestamps, but only one was supplied.");
}
return this.timestamps(isoDate, isoDate, new String[] {});
}
/**
* Sets two timestamps (start and end date) for which to perform the analysis.
*
*
Useful in combination with the OSMContributionView when not performing further aggregation
* by timestamp.
*
*
See {@link #timestamps(OSHDBTimestampList)} for further information.
*
*
Supplied times are assumed to be in UTC (and the only allowed timezone designator is 'Z').
* If a date parameter does not include a time part, midnight (00:00:00Z) of the respective
* date is used.
*
* @param isoDateStart an ISO 8601 date string representing the start date of the analysis
* @param isoDateEnd an ISO 8601 date string representing the end date of the analysis
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Contract(pure = true)
public MapReducer timestamps(String isoDateStart, String isoDateEnd) {
return this.timestamps(isoDateStart, isoDateEnd, new String[] {});
}
/**
* Sets multiple arbitrary timestamps for which to perform the analysis.
*
*
Note for programmers wanting to use this method to supply an arbitrary number (n>=1) of
* timestamps: You may supply the same time string multiple times, which will be de-duplicated
* internally. E.g. you can call the method like this:
* .timestamps(dateArr[0], dateArr[0], dateArr)
*
*
*
See {@link #timestamps(OSHDBTimestampList)} for further information.
*
*
Supplied times are assumed to be in UTC (and the only allowed timezone designator is 'Z').
* If a date parameter does not include a time part, midnight (00:00:00Z) of the respective
* date is used.
*
* @param isoDateFirst an ISO 8601 date string representing the start date of the analysis
* @param isoDateSecond an ISO 8601 date string representing the second date of the analysis
* @param isoDateMore more ISO 8601 date strings representing the remaining timestamps of the
* analysis
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Contract(pure = true)
public MapReducer timestamps(
String isoDateFirst, String isoDateSecond, String... isoDateMore) {
SortedSet timestamps = new TreeSet<>();
timestamps.add(
new OSHDBTimestamp(IsoDateTimeParser.parseIsoDateTime(isoDateFirst).toEpochSecond()));
timestamps.add(
new OSHDBTimestamp(IsoDateTimeParser.parseIsoDateTime(isoDateSecond).toEpochSecond()));
for (String isoDate : isoDateMore) {
timestamps.add(
new OSHDBTimestamp(IsoDateTimeParser.parseIsoDateTime(isoDate).toEpochSecond()));
}
return this.timestamps(() -> timestamps);
}
@Contract(pure = true)
private MapReducer osmTypeInternal(Set typeFilter) {
MapReducer ret = this.copy();
typeFilter = Sets.intersection(ret.typeFilter, typeFilter);
if (typeFilter.isEmpty()) {
ret.typeFilter = EnumSet.noneOf(OSMType.class);
} else {
ret.typeFilter = EnumSet.copyOf(typeFilter);
}
return ret;
}
@Contract(pure = true)
private MapReducer osmTag(OSHDBTag tag) {
MapReducer ret = this.copy();
ret.preFilters.add(oshEntity -> oshEntity.hasTagKey(tag.getKey()));
ret.filters.add(osmEntity -> osmEntity.getTags().hasTag(tag));
return ret;
}
@Contract(pure = true)
private MapReducer osmTag(OSHDBTagKey tagKey) {
MapReducer ret = this.copy();
ret.preFilters.add(oshEntity -> oshEntity.hasTagKey(tagKey));
ret.filters.add(osmEntity -> osmEntity.getTags().hasTagKey(tagKey));
return ret;
}
// -----------------------------------------------------------------------------------------------
// "map", "flatMap" transformation methods
// -----------------------------------------------------------------------------------------------
/**
* Set an arbitrary `map` transformation function.
*
* @param mapper function that will be applied to each data entry (osm entity snapshot or
* contribution)
* @param an arbitrary data type which is the return type of the transformation `map` function
* @return a modified copy of this MapReducer object operating on the transformed type (<R>)
*/
@Override
@Contract(pure = true)
public MapReducer map(SerializableFunction mapper) {
return map((o, ignored) -> mapper.apply(o));
}
// Some internal methods can also map the "root" object of the mapreducer's view.
@Contract(pure = true)
protected MapReducer map(SerializableBiFunction mapper) {
MapReducer> ret = this.copy();
ret.mappers.add(new MapFunction(mapper, false));
@SuppressWarnings("unchecked") // after applying this mapper, we have a mapreducer of type R
MapReducer result = (MapReducer) ret;
return result;
}
/**
* Set an arbitrary `flatMap` transformation function, which returns list with an arbitrary number
* of results per input data entry. The results of this function will be "flattened", meaning that
* they can be for example transformed again by setting additional `map` functions.
*
* @param flatMapper function that will be applied to each data entry (osm entity snapshot or
* contribution) and returns a list of results
* @param an arbitrary data type which is the return type of the transformation `map` function
* @return a modified copy of this MapReducer object operating on the transformed type (<R>)
*/
@Override
@Contract(pure = true)
public MapReducer flatMap(SerializableFunction> flatMapper) {
return flatMap((o, ignored) -> flatMapper.apply(o));
}
// Some internal methods can also flatMap the "root" object of the mapreducer's view.
@Contract(pure = true)
protected MapReducer flatMap(SerializableBiFunction> flatMapper) {
MapReducer> ret = this.copy();
ret.mappers.add(new MapFunction(flatMapper, true));
@SuppressWarnings("unchecked") // after applying this mapper, we have a mapreducer of type R
MapReducer result = (MapReducer) ret;
return result;
}
/**
* Adds a custom arbitrary filter that gets executed in the current transformation chain.
*
* @param f the filter function that determines if the respective data should be passed on (when f
* returns true) or discarded (when f returns false)
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Override
@Contract(pure = true)
public MapReducer filter(SerializablePredicate f) {
MapReducer ret = this.copy();
ret.mappers.add(new FilterFunction(f));
return ret;
}
/**
* Apply a custom filter expression to this query.
*
* @see oshdb-filter
* readme and {@link org.heigit.ohsome.oshdb.filter} for further information about how
* to create such a filter expression object.
*
* @param f the {@link org.heigit.ohsome.oshdb.filter.FilterExpression} to apply
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Override
@Contract(pure = true)
public MapReducer filter(FilterExpression f) {
MapReducer ret = this.copy();
ret.preFilters.add(f::applyOSH);
ret.filters.add(f::applyOSM);
// apply geometry filter as first map function
final List remainingMappers = List.copyOf(ret.mappers);
ret.mappers.clear();
if (this.grouping == Grouping.NONE) {
// no grouping -> directly filter using the geometries of the snapshot / contribution
if (isOSMEntitySnapshotViewQuery()) {
ret = ret.filter(x -> {
OSMEntitySnapshot s = (OSMEntitySnapshot) x;
return f.applyOSMEntitySnapshot(s);
});
} else if (isOSMContributionViewQuery()) {
ret = ret.filter(x -> {
OSMContribution c = (OSMContribution) x;
return f.applyOSMContribution(c);
});
}
} else if (this.grouping == Grouping.BY_ID) {
// grouping by entity -> filter each list entry individually
if (isOSMEntitySnapshotViewQuery()) {
@SuppressWarnings("unchecked") MapReducer filteredListMapper = (MapReducer)
ret.map(x -> (Collection) x)
.map(snapshots -> snapshots.stream()
.filter(f::applyOSMEntitySnapshot)
.collect(Collectors.toCollection(ArrayList::new)))
.filter(snapshots -> !snapshots.isEmpty());
ret = filteredListMapper;
} else if (isOSMContributionViewQuery()) {
@SuppressWarnings("unchecked") MapReducer filteredListMapper = (MapReducer)
ret.map(x -> (Collection) x)
.map(contributions -> contributions.stream()
.filter(f::applyOSMContribution)
.collect(Collectors.toCollection(ArrayList::new)))
.filter(contributions -> !contributions.isEmpty());
ret = filteredListMapper;
}
} else {
throw new UnsupportedOperationException(
"filtering not implemented in grouping mode " + this.grouping.toString());
}
ret.mappers.addAll(remainingMappers);
return optimizeFilters(ret, f);
}
/**
* Apply a textual filter to this query.
*
* @see oshdb-filter
* readme for a description of the filter syntax.
*
* @param f the filter string to apply
* @return a modified copy of this mapReducer (can be used to chain multiple commands together)
*/
@Override
@Contract(pure = true)
public MapReducer filter(String f) {
return this.filter(new FilterParser(oshdb.getTagTranslator()).parse(f));
}
// -----------------------------------------------------------------------------------------------
// Grouping and Aggregation
// Sets how the input data is "grouped", or the output data is "aggregated" into separate chunks.
// -----------------------------------------------------------------------------------------------
/**
* Groups the input data (osm entity snapshot or contributions) by their respective entity's ids
* before feeding them into further transformation functions. This can be used to do more complex
* analysis on the osm data, that requires one to know about the full editing history of
* individual osm entities, e.g., when looking for contributions which got reverted at a later
* point in time.
*
*
The values in the returned lists of snapshot or contribution objects are returned in their
* natural order: i.e. sorted ascending by timestamp.
*
*
This needs to be called before any `map` or `flatMap` transformation functions have been
* set. Otherwise a runtime exception will be thrown.
*
* @return the MapReducer object which applies its transformations on (by entity id grouped) lists
* of the input data
* @throws UnsupportedOperationException if this is called after some map (or flatMap) functions
* have already been set
* @throws UnsupportedOperationException if this is called when a grouping has already been
* activated
*/
@Contract(pure = true)
public MapReducer> groupByEntity() throws UnsupportedOperationException {
if (this.grouping != Grouping.NONE) {
throw new UnsupportedOperationException("A grouping is already active on this MapReducer");
}
if (!this.mappers.isEmpty()) {
// for convenience, we allow one to set this function even after some map functions were set.
// if some map / flatMap functions were already set:
// "rewind" them first, apply the grouping and then re-apply the map/flatMap functions
// accordingly
MapReducer ret = this.copy();
List mapFunctions = new ArrayList<>(ret.mappers);
ret.mappers.clear();
ret.grouping = Grouping.BY_ID;
@SuppressWarnings("unchecked")
// now in the reduce step the backend will return a list of items
MapReducer> listMapReducer = (MapReducer>) ret;
for (MapFunction action : mapFunctions) {
if (action.isFlatMapper()) {
listMapReducer = listMapReducer.map((list, root) -> list.stream()
.flatMap(s -> Streams.stream((Iterable>) action.apply(s, root)))
.collect(Collectors.toList()));
} else {
@SuppressWarnings("StaticPseudoFunctionalStyleMethod")
MapReducer> mappedResult = listMapReducer.map((list, root) ->
Lists.transform(list, x -> action.apply(x, root)));
listMapReducer = mappedResult;
}
}
@SuppressWarnings("unchecked") // now in the reduce step the backend will return a list of X
MapReducer> result = listMapReducer.map(List.class::cast);
return result;
} else {
MapReducer ret = this.copy();
ret.grouping = Grouping.BY_ID;
@SuppressWarnings("unchecked") // now in the reduce step the backend will return a list of X
MapReducer> result = (MapReducer>) ret;
return result;
}
}
/**
* Sets a custom aggregation function that is used to group output results into.
*
* @param indexer a function that will be called for each input element and returns a value that
* will be used to group the results by
* @param the data type of the values used to aggregate the output. has to be a comparable
* type
* @param zerofill a collection of values that are expected to be present in the result
* @return a MapAggregator object with the equivalent state (settings, filters, map function,
* etc.) of the current MapReducer object
*/
@Contract(pure = true)
public & Serializable> MapAggregator aggregateBy(
SerializableFunction indexer,
Collection zerofill
) {
return new MapAggregator<>(this, (data, ignored) -> indexer.apply(data), zerofill);
}
/**
* Sets a custom aggregation function that is used to group output results into.
*
* @param indexer a function that will be called for each input element and returns a value that
* will be used to group the results by
* @param the data type of the values used to aggregate the output. has to be a comparable
* type
* @return a MapAggregator object with the equivalent state (settings, filters, map function,
* etc.) of the current MapReducer object
*/
@Override
@Contract(pure = true)
public & Serializable> MapAggregator aggregateBy(
SerializableFunction indexer
) {
return this.aggregateBy(indexer, Collections.emptyList());
}
/**
* Sets up automatic aggregation by timestamp.
*
*
In the OSMEntitySnapshotView, the snapshots' timestamp will be used directly to aggregate
* results into. In the OSMContributionView, the timestamps of the respective data modifications
* will be matched to corresponding time intervals (that are defined by the `timestamps` setting
* here).
*
*
Cannot be used together with the `groupByEntity()` setting enabled.
*
* @return a MapAggregator object with the equivalent state (settings, filters, map function,
* etc.) of the current MapReducer object
* @throws UnsupportedOperationException if this is called when the `groupByEntity()` mode has
* been activated
*/
@Contract(pure = true)
public MapAggregator aggregateByTimestamp()
throws UnsupportedOperationException {
if (this.grouping != Grouping.NONE) {
throw new UnsupportedOperationException(
"automatic aggregateByTimestamp() cannot be used together with the groupByEntity() "
+ "functionality -> try using aggregateByTimestamp(customTimestampIndex) instead");
}
// by timestamp indexing function -> for some views we need to match the input data to the list
SerializableBiFunction indexer;
if (isOSMContributionViewQuery()) {
final TreeSet timestamps = new TreeSet<>(this.tstamps.get());
indexer = (ignored, root) -> timestamps.floor(((OSMContribution) root).getTimestamp());
} else if (isOSMEntitySnapshotViewQuery()) {
indexer = (ignored, root) -> ((OSMEntitySnapshot) root).getTimestamp();
} else {
throw new UnsupportedOperationException(
"automatic aggregateByTimestamp() only implemented for OSMContribution and "
+ "OSMEntitySnapshot -> try using aggregateByTimestamp(customTimestampIndex) instead"
);
}
return new MapAggregator<>(this, indexer, this.getZerofillTimestamps());
}
/**
* Sets up aggregation by a custom time index.
*
*
The timestamps returned by the supplied indexing function are matched to the corresponding
* time intervals.
*
* @param indexer a callback function that return a timestamp object for each given data. Note
* that if this function returns timestamps outside of the supplied timestamps()
* interval results may be undefined
* @return a MapAggregator object with the equivalent state (settings,
* filters, map function, etc.) of the current MapReducer object
*/
public MapAggregator aggregateByTimestamp(
SerializableFunction indexer
) throws UnsupportedOperationException {
final TreeSet timestamps = new TreeSet<>(this.tstamps.get());
final OSHDBTimestamp minTime = timestamps.first();
final OSHDBTimestamp maxTime = timestamps.last();
return new MapAggregator<>(this, (data, ignored) -> {
// match timestamps to the given timestamp list
OSHDBTimestamp aggregationTimestamp = indexer.apply(data);
if (aggregationTimestamp == null
|| aggregationTimestamp.compareTo(minTime) < 0
|| aggregationTimestamp.compareTo(maxTime) > 0) {
throw new OSHDBInvalidTimestampException(
"Aggregation timestamp outside of time query interval.");
}
return timestamps.floor(aggregationTimestamp);
}, getZerofillTimestamps());
}
/**
* Sets up automatic aggregation by geometries.
*
*
Cannot be used together with the `groupByEntity()` setting enabled.
*
* @param geometries an associated list of polygons and identifiers
* @param the type of the identifers used to aggregate
* @param
a polygonal geometry type
* @return a MapAggregator object with the equivalent state (settings, filters, map function,
* etc.) of the current MapReducer object
* @throws UnsupportedOperationException if this is called when the `groupByEntity()` mode has
* been activated
* @throws UnsupportedOperationException when called after any map or flatMap functions are set
*/
@Contract(pure = true)
public & Serializable, P extends Geometry & Polygonal>
MapAggregator aggregateByGeometry(Map geometries)
throws UnsupportedOperationException {
if (this.grouping != Grouping.NONE) {
throw new UnsupportedOperationException(
"aggregateByGeometry() cannot be used together with the groupByEntity() functionality");
}
GeometrySplitter gs = new GeometrySplitter<>(geometries);
var prevMapper = this.getMapper();
SerializableFunction