Skip to content
This repository has been archived by the owner on Jan 22, 2019. It is now read-only.

Commit

Permalink
Add a unit test, minor renaming of internal constants
Browse files Browse the repository at this point in the history
  • Loading branch information
cowtowncoder committed Jun 9, 2015
1 parent ff1626c commit 18dd546
Show file tree
Hide file tree
Showing 2 changed files with 53 additions and 26 deletions.
52 changes: 26 additions & 26 deletions src/main/java/com/fasterxml/jackson/dataformat/csv/CsvSchema.java
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,11 @@ public class CsvSchema
/**********************************************************************
*/

protected final static int FEATURE_USE_HEADER = 0x0001;
protected final static int FEATURE_SKIP_FIRST_DATA_ROW = 0x0002;
protected final static int FEATURE_ALLOW_COMMENTS = 0x0004;
protected final static int ENCODING_FEATURE_USE_HEADER = 0x0001;
protected final static int ENCODING_FEATURE_SKIP_FIRST_DATA_ROW = 0x0002;
protected final static int ENCODING_FEATURE_ALLOW_COMMENTS = 0x0004;

protected final static int DEFAULT_FEATURES = 0;
protected final static int DEFAULT_ENCODING_FEATURES = 0;

/*
/**********************************************************************
Expand Down Expand Up @@ -330,7 +330,7 @@ public static class Builder
*
* @since 2.5
*/
protected int _features = DEFAULT_FEATURES;
protected int _encodingFeatures = DEFAULT_ENCODING_FEATURES;

protected char _columnSeparator = DEFAULT_COLUMN_SEPARATOR;

Expand All @@ -349,7 +349,7 @@ public static class Builder
* @since 2.5
*/
protected char[] _nullValue = DEFAULT_NULL_VALUE;

public Builder() { }

/**
Expand All @@ -361,7 +361,7 @@ public Builder(CsvSchema src)
for (Column col : src._columns) {
_columns.add(col);
}
_features = src._features;
_encodingFeatures = src._features;
_columnSeparator = src._columnSeparator;
_arrayElementSeparator = src._arrayElementSeparator;
_quoteChar = src._quoteChar;
Expand Down Expand Up @@ -441,7 +441,7 @@ public Iterator<Column> getColumns() {
* used for reading and writing or not.
*/
public Builder setUseHeader(boolean b) {
_feature(FEATURE_USE_HEADER, b);
_feature(ENCODING_FEATURE_USE_HEADER, b);
return this;
}

Expand All @@ -451,7 +451,7 @@ public Builder setUseHeader(boolean b) {
* should be skipped in its entirety.
*/
public Builder setSkipFirstDataRow(boolean b) {
_feature(FEATURE_SKIP_FIRST_DATA_ROW, b);
_feature(ENCODING_FEATURE_SKIP_FIRST_DATA_ROW, b);
return this;
}

Expand All @@ -463,12 +463,12 @@ public Builder setSkipFirstDataRow(boolean b) {
* @since 2.5
*/
public Builder setAllowComments(boolean b) {
_feature(FEATURE_ALLOW_COMMENTS, b);
_feature(ENCODING_FEATURE_ALLOW_COMMENTS, b);
return this;
}

protected final void _feature(int feature, boolean state) {
_features = state ? (_features | feature) : (_features & ~feature);
_encodingFeatures = state ? (_encodingFeatures | feature) : (_encodingFeatures & ~feature);
}

/**
Expand Down Expand Up @@ -556,7 +556,7 @@ public Builder setNullValue(char[] nvl) {
public CsvSchema build()
{
Column[] cols = _columns.toArray(new Column[_columns.size()]);
return new CsvSchema(cols, _features,
return new CsvSchema(cols, _encodingFeatures,
_columnSeparator, _quoteChar, _escapeChar,
_lineSeparator, _arrayElementSeparator,
_nullValue);
Expand Down Expand Up @@ -588,7 +588,7 @@ protected void _checkIndex(int index) {
*
* @since 2.5
*/
protected int _features = DEFAULT_FEATURES;
protected int _features = DEFAULT_ENCODING_FEATURES;

protected final char _columnSeparator;

Expand All @@ -612,7 +612,7 @@ public CsvSchema(Column[] columns,
char[] lineSeparator)
{
this(columns,
(useHeader ? FEATURE_USE_HEADER : 0) + (skipFirstDataRow ? FEATURE_SKIP_FIRST_DATA_ROW : 0),
(useHeader ? ENCODING_FEATURE_USE_HEADER : 0) + (skipFirstDataRow ? ENCODING_FEATURE_SKIP_FIRST_DATA_ROW : 0),
columnSeparator, quoteChar, escapeChar, lineSeparator,
DEFAULT_ARRAY_ELEMENT_SEPARATOR, DEFAULT_NULL_VALUE);
}
Expand Down Expand Up @@ -760,27 +760,27 @@ public Builder rebuild() {
*/

public CsvSchema withUseHeader(boolean state) {
return _withFeature(FEATURE_USE_HEADER, state);
return _withFeature(ENCODING_FEATURE_USE_HEADER, state);
}

/**
* Helper method for constructing and returning schema instance that
* is similar to this one, except that it will be using header line.
*/
public CsvSchema withHeader() {
return _withFeature(FEATURE_USE_HEADER, true);
return _withFeature(ENCODING_FEATURE_USE_HEADER, true);
}

/**
* Helper method for construcing and returning schema instance that
* is similar to this one, except that it will not be using header line.
*/
public CsvSchema withoutHeader() {
return _withFeature(FEATURE_USE_HEADER, false);
return _withFeature(ENCODING_FEATURE_USE_HEADER, false);
}

public CsvSchema withSkipFirstDataRow(boolean state) {
return _withFeature(FEATURE_SKIP_FIRST_DATA_ROW, state);
return _withFeature(ENCODING_FEATURE_SKIP_FIRST_DATA_ROW, state);
}

/**
Expand All @@ -790,7 +790,7 @@ public CsvSchema withSkipFirstDataRow(boolean state) {
* @since 2.5
*/
public CsvSchema withAllowComments(boolean state) {
return _withFeature(FEATURE_ALLOW_COMMENTS, state);
return _withFeature(ENCODING_FEATURE_ALLOW_COMMENTS, state);
}

/**
Expand All @@ -800,7 +800,7 @@ public CsvSchema withAllowComments(boolean state) {
* @since 2.5
*/
public CsvSchema withComments() {
return _withFeature(FEATURE_ALLOW_COMMENTS, true);
return _withFeature(ENCODING_FEATURE_ALLOW_COMMENTS, true);
}

/**
Expand All @@ -810,7 +810,7 @@ public CsvSchema withComments() {
* @since 2.5
*/
public CsvSchema withoutComments() {
return _withFeature(FEATURE_ALLOW_COMMENTS, false);
return _withFeature(ENCODING_FEATURE_ALLOW_COMMENTS, false);
}

protected CsvSchema _withFeature(int feature, boolean state) {
Expand Down Expand Up @@ -956,21 +956,21 @@ public String getSchemaType() {
/**********************************************************************
*/

public boolean usesHeader() { return (_features & FEATURE_USE_HEADER) != 0; }
public boolean skipsFirstDataRow() { return (_features & FEATURE_SKIP_FIRST_DATA_ROW) != 0; }
public boolean allowsComments() { return (_features & FEATURE_ALLOW_COMMENTS) != 0; }
public boolean usesHeader() { return (_features & ENCODING_FEATURE_USE_HEADER) != 0; }
public boolean skipsFirstDataRow() { return (_features & ENCODING_FEATURE_SKIP_FIRST_DATA_ROW) != 0; }
public boolean allowsComments() { return (_features & ENCODING_FEATURE_ALLOW_COMMENTS) != 0; }

/**
* @deprecated Use {@link #usesHeader()} instead
*/
@Deprecated // since 2.5
public boolean useHeader() { return (_features & FEATURE_USE_HEADER) != 0; }
public boolean useHeader() { return (_features & ENCODING_FEATURE_USE_HEADER) != 0; }

/**
* @deprecated Use {@link #skipsFirstDataRow()} instead
*/
@Deprecated // since 2.5
public boolean skipFirstDataRow() { return (_features & FEATURE_SKIP_FIRST_DATA_ROW) != 0; }
public boolean skipFirstDataRow() { return (_features & ENCODING_FEATURE_SKIP_FIRST_DATA_ROW) != 0; }

public char getColumnSeparator() { return _columnSeparator; }
public int getArrayElementSeparator() { return _arrayElementSeparator; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,33 @@ public void testUntypedAsStringArray() throws Exception
assertEquals("", row[1]);
}

public void testUntypedViaReadValues() throws Exception
{
CsvMapper mapper = mapperForCsv();
mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY);
MappingIterator<String[]> it = mapper.readerFor(String[].class)
.readValues("1,\"xyz\"\n\ntrue,\n");
assertTrue(it.hasNextValue());
String[] row = it.nextValue();
assertEquals(2, row.length);
assertEquals("1",row[0]);
assertEquals("xyz", row[1]);

assertTrue(it.hasNextValue());
row = it.nextValue();
assertEquals(1, row.length);
assertEquals("", row[0]);

assertTrue(it.hasNextValue());
row = it.nextValue();
assertEquals(2, row.length);
assertEquals("true", row[0]);
assertEquals("", row[1]);

assertFalse(it.hasNextValue());
it.close();
}

public void testUntypedWithHeaderAsMap() throws Exception
{
CsvMapper mapper = mapperForCsv();
Expand Down

0 comments on commit 18dd546

Please sign in to comment.