org.etlunit.io.file.FlatFile Maven / Gradle / Ivy
package org.etlunit.io.file;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
class FlatFile implements DataFile
{
private final File source;
private DataFileSchema dataFileSchema;
private static final String ORDER_KEY = "\0\0ORDER_KEY\0\0";
public static final String KEY_NULL = "\0\0\0~~~\0\0\0";
public static final String KEY_SEPARATOR = "\1\2\3";
public static final String KEY_PADDING;
private static final int KEY_PADDING_LENGTH;
static
{
StringBuilder stb = new StringBuilder();
for (int i = 0; i < 1024; i++)
{
stb.append(" ");
}
KEY_PADDING = stb.toString();
KEY_PADDING_LENGTH = KEY_PADDING.length();
}
public FlatFile(DataFileSchema schema, File source)
{
dataFileSchema = schema;
this.source = source;
}
public DataFileWriter getWriter() throws IOException
{
return getWriter(new PlainTextDataConverter());
}
public DataFileWriter getWriter(DataConverter converter) throws IOException
{
// if there is a flat file schema, return an ordered writer
return dataFileSchema != null
? new OrderedDataFileWriterImpl(converter, this)
: new RelationalDataFileWriterImpl(converter, this);
}
public static String readLine(BufferedReader bread, String rowDelimiter) throws IOException
{
char[] rowDelimiterChars = rowDelimiter.toCharArray();
StringBuffer lineBuffer = new StringBuffer();
int delimOffset = 0;
int charsRead = 0;
while (true)
{
int i = bread.read();
if (i == -1)
{
if (charsRead == 0)
{
return null;
}
if (delimOffset == 0)
{
break;
}
else
{
throw new IllegalStateException("Line ended in the middle of the delimiter");
}
}
charsRead++;
char d = (char) i;
if (rowDelimiterChars[delimOffset] == d)
{
delimOffset++;
if (delimOffset == rowDelimiterChars.length)
{
break;
}
}
else
{
lineBuffer.append(d);
}
}
return lineBuffer.toString();
}
/*
private void loadTypes(String typeHeaders)
{
String[] names = typeHeaders.split(dataFileSchema.getColumnDelimiter());
if (names.length != dataFileSchema.getColumns().size())
{
throw new IllegalArgumentException("Differing number of column types and column headers: types[" + names.length + "] headers[" + columnNames.size() + "]");
}
for (int i = 0; i < names.length; i++)
{
// look up the column type against static fields in java.sql.Types
columnTypes.put(columnNames.get(i), getTypeValue(names[i]));
}
}
*/
/*
private void loadHeader(String columnHeaders)
{
// informatica hack to drop leading #
if (columnHeaders.startsWith("#"))
{
columnHeaders = columnHeaders.substring(1);
}
String[] names = columnHeaders.split(dataFileSchema.getColumnDelimiter());
for (int i = 0; i < names.length; i++)
{
String n = names[i];
columnNames.add(n);
}
}
*/
public Map> getIndex()
{
return Collections.EMPTY_MAP;
}
public FileData getFileData() throws IOException
{
return getFileData(null);
}
public FileData getFileData(List columns) throws IOException
{
return new FileDataImpl(this, columns);
}
protected static OrderKey addOrderKey(DataFileSchema schema, Map plineData, List columnList)
{
if (columnList == null)
{
columnList = schema.getColumnNames();
}
// add the new row to the rowData, with a precomputed orderKey
OrderKey ok = new OrderKey();
for (String col : schema.getOrderColumnNames())
{
// only include columns referenced in the columnList
if (!columnList.contains(col))
{
continue;
}
Object key = plineData.get(col);
if (key == null)
{
key = KEY_NULL;
}
DataFileSchema.Column schCol = schema.getColumn(col);
ok.addColumn(schCol, key);
}
return ok;
}
public File getSource()
{
return source;
}
public DataFileSchema getDataFileSchema()
{
return dataFileSchema;
}
public void setFlatFileSchema(FlatFileSchema flatFileSchema)
{
this.dataFileSchema = flatFileSchema;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy