Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Apache HTTPD & NGINX Access log parsing made easy
* Copyright (C) 2011-2019 Niels Basjes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.basjes.pig.input.apachehttpdlog;
import nl.basjes.hadoop.input.ApacheHttpdLogfileInputFormat;
import nl.basjes.hadoop.input.ApacheHttpdLogfileRecordReader;
import nl.basjes.hadoop.input.ParsedRecord;
import nl.basjes.parse.core.Casts;
import nl.basjes.parse.core.Dissector;
import nl.basjes.parse.core.Parser;
import nl.basjes.parse.core.exceptions.InvalidDissectorException;
import nl.basjes.parse.core.exceptions.MissingDissectorsException;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.pig.Expression;
import org.apache.pig.LoadFunc;
import org.apache.pig.LoadMetadata;
import org.apache.pig.LoadPushDown;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.util.UDFContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
public class Loader
extends LoadFunc
implements LoadMetadata,
LoadPushDown {
private static final Logger LOG = LoggerFactory.getLogger(Loader.class);
@SuppressWarnings("rawtypes")
private ApacheHttpdLogfileRecordReader reader;
// If we ONLY want the example or the list of fields we set this to true.
private boolean onlyWantListOfFields = false;
private boolean isBuildingExample = false;
private String logformat;
private List requestedFields = new ArrayList<>();
private List originalRequestedFields = null;
private RequiredFieldList requiredFieldList = null;
private final Map> typeRemappings = new HashMap<>();
private final List additionalDissectors = new ArrayList<>();
private final TupleFactory tupleFactory;
private ApacheHttpdLogfileInputFormat theInputFormat;
// These are purely retained to make it possible to create a working example
private final ArrayList specialParameters = new ArrayList<>();
private static final String PRUNE_PROJECTION_INFO = "prune.projection.info";
// ------------------------------------------
/**
* Pig Loaders only take string parameters. The CTOR is really the only
* interaction the user has with the Loader from the script.
*
* @param parameters specified from the call within the pig code
*/
public Loader(String... parameters) {
for (String param : parameters) {
if (logformat == null) {
logformat = param;
LOG.debug("Using logformat: {}", logformat);
continue;
}
if (param.startsWith("-map:")) {
specialParameters.add(param);
String[] mapParams = param.split(":");
if (mapParams.length != 3) {
throw new IllegalArgumentException("Found map with wrong number of parameters:" + param);
}
String mapField = mapParams[1];
String mapType = mapParams[2];
Set remapping = typeRemappings.computeIfAbsent(mapField, k -> new HashSet<>());
remapping.add(mapType);
LOG.debug("Add mapping for field \"{}\" to type \"{}\"", mapField, mapType);
continue;
}
if (param.startsWith("-load:")) {
specialParameters.add(param);
String[] loadParams = param.split(":", 3);
if (loadParams.length != 3) {
throw new IllegalArgumentException("Found load with wrong number of parameters:" + param);
}
String dissectorClassName = loadParams[1];
String dissectorParam = loadParams[2];
try {
Class> clazz = Class.forName(dissectorClassName);
Constructor> constructor = clazz.getConstructor();
Dissector instance = (Dissector) constructor.newInstance();
if (!instance.initializeFromSettingsParameter(dissectorParam)) {
throw new IllegalArgumentException("Initialization failed of dissector instance of class " + dissectorClassName);
}
additionalDissectors.add(instance);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Found load with bad specification: No such class:" + param, e);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Found load with bad specification: Class does not have the required constructor", e);
} catch (InvocationTargetException | InstantiationException e) {
throw new IllegalArgumentException("Unable to load specified dissector", e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Found load with bad specification: Required constructor is not public", e);
}
LOG.debug("Loaded additional dissector: {}(\"{}\")", dissectorClassName, dissectorParam);
continue;
}
if (ApacheHttpdLogfileRecordReader.FIELDS.equals(param.toLowerCase(Locale.ENGLISH))) {
onlyWantListOfFields = true;
requestedFields.add(ApacheHttpdLogfileRecordReader.FIELDS);
LOG.debug("Requested ONLY the possible field values");
continue;
}
if ("example".equals(param.toLowerCase(Locale.ENGLISH))) {
isBuildingExample = true;
requestedFields.add(ApacheHttpdLogfileRecordReader.FIELDS);
LOG.debug("Requested ONLY the possible field values in EXAMPLE format");
continue;
}
String cleanedFieldValue = Parser.cleanupFieldValue(param);
LOG.debug("Add Requested field: {} ", cleanedFieldValue);
requestedFields.add(cleanedFieldValue);
}
if (logformat == null) {
throw new IllegalArgumentException("Must specify the logformat");
}
if (requestedFields.isEmpty()) {
isBuildingExample = true;
requestedFields.add(ApacheHttpdLogfileRecordReader.FIELDS);
LOG.debug("Requested ONLY the possible field values in EXAMPLE format");
}
theInputFormat = new ApacheHttpdLogfileInputFormat(getLogformat(), getRequestedFields(), getTypeRemappings(), getAdditionalDissectors());
reader = theInputFormat.getRecordReader();
tupleFactory = TupleFactory.getInstance();
}
// ------------------------------------------
@Override
public InputFormat, ?> getInputFormat() {
return theInputFormat;
}
// ------------------------------------------
public final String getLogformat() {
return logformat;
}
public final List getRequestedFields() {
return requestedFields;
}
// ------------------------------------------
@Override
public Tuple getNext()
throws IOException {
Tuple tuple = null;
try {
if (isBuildingExample) {
isBuildingExample = false; // Terminate on the next iteration
return tupleFactory.newTuple(createPigExample());
}
boolean notDone = reader.nextKeyValue();
if (!notDone) {
return null;
}
ParsedRecord value = reader.getCurrentValue();
if (value != null) {
List