Skip to content
This repository has been archived by the owner on Jan 22, 2019. It is now read-only.

Commit

Permalink
Fix #93
Browse files Browse the repository at this point in the history
  • Loading branch information
cowtowncoder committed Dec 15, 2015
1 parent c0b2d33 commit 93dfe48
Show file tree
Hide file tree
Showing 5 changed files with 133 additions and 11 deletions.
6 changes: 6 additions & 0 deletions release-notes/CREDITS
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,9 @@ Oliver Röss (cloudyday@github)

* Reported #98: Escape char is not being escaped during serialization
(2.6.4)

Rob Baily (rob-baily@github)

* Contributed fix for #93: CSV mapper does not support Views or filtering correctly
for serialization
(2.6.5)
5 changes: 5 additions & 0 deletions release-notes/VERSION
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ Project: jackson-dataformat-csv
=== Releases ===
------------------------------------------------------------------------

2.6.5 (not yet released)

#93: CSV mapper does not support Views or filtering correctly for serialization
(reported by triviski@github; fix contributed by Rob B)

2.6.4 (07-Dec-2015)

#90: Unexpected output with arrays starting with a null/empty element
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -793,16 +793,16 @@ public void writeNumber(String encodedValue) throws IOException
@Override
public void writeOmittedField(String fieldName) throws IOException
{
// basically combination of "writeFieldName()" and "writeNull()"
if (_writeContext.writeFieldName(fieldName) == JsonWriteContext.STATUS_EXPECT_VALUE) {
_reportError("Can not skip a field, expecting a value");
}
// Hmmh. Should we require a match? Actually, let's use logic: if field found,
// assumption is we must add a placeholder; if not, we can merely ignore
CsvSchema.Column col = _schema.column(fieldName);
if (col == null) {
// assumed to have been removed from schema too
} else {
// basically combination of "writeFieldName()" and "writeNull()"
if (_writeContext.writeFieldName(fieldName) == JsonWriteContext.STATUS_EXPECT_VALUE) {
_reportError("Can not skip a field, expecting a value");
}
// and all we do is just note index to use for following value write
_nextColumnByName = col.getIndex();
// We can basically copy what 'writeNull()' does...
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,12 @@ public static class Company {
/**********************************************************
*/

private final CsvMapper MAPPER = mapperForCsv();

public void testWithJsonView() throws Exception
{
CsvMapper mapper = mapperForCsv();
CsvSchema schema = mapper.schemaFor(Bean.class).withLineSeparator("\n").withHeader();
String actual = mapper.writer(schema).withView(ViewB.class).writeValueAsString(new Bean());
CsvSchema schema = MAPPER.schemaFor(Bean.class).withLineSeparator("\n").withHeader();
String actual = MAPPER.writer(schema).withView(ViewB.class).writeValueAsString(new Bean());
// System.out.println(actual);

BufferedReader br = new BufferedReader(new StringReader(actual.trim()));
Expand All @@ -69,7 +70,7 @@ public void testWithJsonView() throws Exception

// plus read back?
final String INPUT = "a,aa,b\n5,6,7\n";
Bean result = mapper.readerFor(Bean.class).with(schema).withView(ViewB.class).readValue(INPUT);
Bean result = MAPPER.readerFor(Bean.class).with(schema).withView(ViewB.class).readValue(INPUT);
assertEquals("5", result.a);
// due to filtering, ought to use default
assertEquals("2", result.aa);
Expand All @@ -78,8 +79,7 @@ public void testWithJsonView() throws Exception

public void testWithJsonFilter() throws Exception
{
CsvMapper mapper = mapperForCsv();
CsvSchema schema = mapper.schemaFor(Company.class).withLineSeparator("\n").withHeader();
CsvSchema schema = MAPPER.schemaFor(Company.class).withLineSeparator("\n").withHeader();

SimpleFilterProvider filterProvider = new SimpleFilterProvider()
.addFilter(COMPANY_FILTER, FilterExceptFilter.filterOutAllExcept("name", "ticker"));
Expand All @@ -88,7 +88,7 @@ public void testWithJsonFilter() throws Exception
new Company(1, "name1", "ticker1")
, new Company(2, "name2", "ticker2")
, new Company(3, "name3", "ticker3"));
String actual = mapper.writer(filterProvider).with(schema).writeValueAsString(companies);
String actual = MAPPER.writer(filterProvider).with(schema).writeValueAsString(companies);
// System.out.println(actual);

BufferedReader br = new BufferedReader(new StringReader(actual.trim()));
Expand All @@ -98,4 +98,29 @@ public void testWithJsonFilter() throws Exception
assertEquals(",name3,ticker3", br.readLine());
assertNull(br.readLine());
}

public void testWithJsonFilterFieldSuppressed() throws Exception
{
final CsvSchema schema = new CsvSchema.Builder()
.addColumn("name")
.addColumn("ticker")
.setLineSeparator("\n").setUseHeader(true)
.build();

SimpleFilterProvider filterProvider = new SimpleFilterProvider()
.addFilter(COMPANY_FILTER, FilterExceptFilter.filterOutAllExcept("name", "ticker"));

List<Company> companies = Arrays.asList(
new Company(1, "name1", "ticker1")
, new Company(2, "name2", "ticker2")
, new Company(3, "name3", "ticker3"));
String actual = MAPPER.writer(filterProvider).with(schema).writeValueAsString(companies);

BufferedReader br = new BufferedReader(new StringReader(actual.trim()));
assertEquals("name,ticker", br.readLine());
assertEquals("name1,ticker1", br.readLine());
assertEquals("name2,ticker2", br.readLine());
assertEquals("name3,ticker3", br.readLine());
assertNull(br.readLine());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
package com.fasterxml.jackson.dataformat.csv.ser;

import java.io.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;

import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.introspect.Annotated;
import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector;
import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import com.fasterxml.jackson.dataformat.csv.*;

@SuppressWarnings("serial")
public class FilteringTest extends ModuleTestBase
{
static class Entity {
public String name;
public String unusedFieldBetween;
public String description;
public String unusedField;

public Entity(String name, String description, String unusedField) {
this.name = name;
this.description = description;
this.unusedField = unusedField;
}
}

private final static String CSV_FILTER_NAME = "csvFilter";

static class CsvJacksonWriter {
public void writeObjects(OutputStream outputStream,
List<?> objects, CsvSchema csvSchema ) throws IOException
{
HashSet<String> columnNames = new HashSet<String>();
for (CsvSchema.Column column : csvSchema) {
columnNames.add( column.getName() );
}

SimpleBeanPropertyFilter csvReponseFilter =
new SimpleBeanPropertyFilter.FilterExceptFilter(columnNames);
FilterProvider filterProvider = new SimpleFilterProvider().addFilter( CSV_FILTER_NAME, csvReponseFilter );

CsvMapper csvMapper = new CsvMapper();
csvMapper.setFilterProvider( filterProvider );
csvMapper.setAnnotationIntrospector(new CsvAnnotationIntrospector());

ObjectWriter objectWriter = csvMapper.writer(csvSchema);
objectWriter.writeValue( outputStream, objects);
}
}

static class CsvAnnotationIntrospector extends JacksonAnnotationIntrospector {
@Override
public Object findFilterId(Annotated a) {
return CSV_FILTER_NAME;
}
}

public void testWriteObjects() throws Exception {
List<Entity> entities = new ArrayList<Entity>();
entities.add( new Entity("Test entity 1", "Test description 1", "Test unused field"));
entities.add(new Entity("Test entity 2", "Test description 2", "Test unused field"));

CsvSchema csvSchema = CsvSchema.builder()
.addColumn("name")
.addColumn("description")
.setUseHeader( true )
.build()
.withLineSeparator("\r\n");

CsvJacksonWriter csvWriter = new CsvJacksonWriter();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
csvWriter.writeObjects(outputStream, entities, csvSchema);

StringBuffer expectedResults = new StringBuffer();
expectedResults.append( "name,description\r\n" );
expectedResults.append( "\"Test entity 1\",\"Test description 1\"\r\n" );
expectedResults.append( "\"Test entity 2\",\"Test description 2\"\r\n");

assertEquals( expectedResults.toString(), outputStream.toString() );
}
}

0 comments on commit 93dfe48

Please sign in to comment.