=== modified file 'dhis-2/dhis-dxf2/pom.xml' --- dhis-2/dhis-dxf2/pom.xml 2013-05-31 08:27:38 +0000 +++ dhis-2/dhis-dxf2/pom.xml 2013-06-26 15:48:29 +0000 @@ -45,10 +45,9 @@ dhis-support-xml - net.sf.opencsv - opencsv + net.sourceforge.javacsv + javacsv - com.fasterxml.jackson.core jackson-core === modified file 'dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalue/StreamingCsvDataValue.java' --- dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalue/StreamingCsvDataValue.java 2013-05-23 10:27:27 +0000 +++ dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalue/StreamingCsvDataValue.java 2013-06-26 15:48:29 +0000 @@ -29,20 +29,21 @@ import static org.hisp.dhis.system.util.TextUtils.valueOf; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import au.com.bytecode.opencsv.CSVWriter; +import com.csvreader.CsvWriter; public class StreamingCsvDataValue extends DataValue { - private CSVWriter writer; + private CsvWriter writer; private List values; - public StreamingCsvDataValue( CSVWriter writer ) + public StreamingCsvDataValue( CsvWriter writer ) { this.writer = writer; this.values = new ArrayList(); @@ -183,7 +184,14 @@ { String[] row = new String[values.size()]; - writer.writeNext( values.toArray( row ) ); + try + { + writer.writeRecord( values.toArray( row ) ); + } + catch ( IOException ex ) + { + throw new RuntimeException( "Failed to write CSV record", ex ); + } } public static String[] getHeaders() === modified file 'dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java' --- dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java 2013-05-25 06:19:23 +0000 +++ dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetService.java 2013-06-26 15:48:29 +0000 @@ -27,17 +27,16 @@ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -import org.hisp.dhis.dxf2.importsummary.ImportSummary; -import org.hisp.dhis.dxf2.metadata.ImportOptions; -import org.hisp.dhis.scheduling.TaskId; - import java.io.InputStream; import java.io.OutputStream; -import java.io.Reader; import java.io.Writer; import java.util.Date; import java.util.Set; +import org.hisp.dhis.dxf2.importsummary.ImportSummary; +import org.hisp.dhis.dxf2.metadata.ImportOptions; +import org.hisp.dhis.scheduling.TaskId; + public interface DataValueSetService { void writeDataValueSet( String dataSet, String period, String orgUnit, OutputStream out ); @@ -58,7 +57,7 @@ ImportSummary saveDataValueSetJson( InputStream in, ImportOptions importOptions, TaskId taskId ); - ImportSummary saveDataValueSetCsv( Reader reader, ImportOptions importOptions, TaskId id ); + ImportSummary saveDataValueSetCsv( InputStream in, ImportOptions importOptions, TaskId id ); ImportSummary saveDataValueSetPdf( InputStream in, ImportOptions importOptions, TaskId id ); } === modified file 'dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java' --- dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java 2013-06-26 12:31:29 +0000 +++ dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/DefaultDataValueSetService.java 2013-06-26 15:48:29 +0000 @@ -27,7 +27,25 @@ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -import au.com.bytecode.opencsv.CSVReader; +import static org.hisp.dhis.importexport.ImportStrategy.NEW; +import static org.hisp.dhis.importexport.ImportStrategy.NEW_AND_UPDATES; +import static org.hisp.dhis.importexport.ImportStrategy.UPDATES; +import static org.hisp.dhis.system.notification.NotificationLevel.ERROR; +import static org.hisp.dhis.system.notification.NotificationLevel.INFO; +import static org.hisp.dhis.system.util.ConversionUtils.wrap; +import static org.hisp.dhis.system.util.DateUtils.getDefaultDate; + +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Writer; +import java.nio.charset.Charset; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + import org.amplecode.quick.BatchHandler; import org.amplecode.quick.BatchHandlerFactory; import org.amplecode.staxwax.factory.XMLFactory; @@ -65,22 +83,7 @@ import org.hisp.dhis.user.CurrentUserService; import org.springframework.beans.factory.annotation.Autowired; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Reader; -import java.io.Writer; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static org.hisp.dhis.importexport.ImportStrategy.*; -import static org.hisp.dhis.system.notification.NotificationLevel.ERROR; -import static org.hisp.dhis.system.notification.NotificationLevel.INFO; -import static org.hisp.dhis.system.util.ConversionUtils.wrap; -import static org.hisp.dhis.system.util.DateUtils.getDefaultDate; +import com.csvreader.CsvReader; /** * @author Lars Helge Overland @@ -124,7 +127,7 @@ @Autowired private Notifier notifier; - + //-------------------------------------------------------------------------- // DataValueSet implementation //-------------------------------------------------------------------------- @@ -223,11 +226,11 @@ } } - public ImportSummary saveDataValueSetCsv( Reader reader, ImportOptions importOptions, TaskId id ) + public ImportSummary saveDataValueSetCsv( InputStream in, ImportOptions importOptions, TaskId id ) { try { - DataValueSet dataValueSet = new StreamingCsvDataValueSet( new CSVReader( reader ) ); + DataValueSet dataValueSet = new StreamingCsvDataValueSet( new CsvReader( in, Charset.forName( "UTF-8" ) ) ); return saveDataValueSet( importOptions, id, dataValueSet ); } catch ( RuntimeException ex ) @@ -317,6 +320,8 @@ DataElementCategoryOptionCombo fallbackCategoryOptionCombo = categoryService.getDefaultDataElementCategoryOptionCombo(); + String currentUser = currentUserService.getCurrentUsername(); + BatchHandler batchHandler = batchHandlerFactory.createBatchHandler( DataValueBatchHandler.class ).init(); int importCount = 0; @@ -374,7 +379,7 @@ summary.getConflicts().add( new ImportConflict( DataValue.class.getSimpleName(), valueValid ) ); continue; } - + String commentValid = ValidationUtils.commentIsValid( dataValue.getComment() ); if ( commentValid != null ) @@ -401,7 +406,7 @@ if ( dataValue.getStoredBy() == null || dataValue.getStoredBy().trim().isEmpty() ) { - internalValue.setStoredBy( currentUserService.getCurrentUsername() ); + internalValue.setStoredBy( currentUser ); } else { @@ -448,6 +453,8 @@ notifier.notify( id, INFO, "Import done", true ).addTaskSummary( id, summary ); + dataValueSet.close(); + return summary; } === modified file 'dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java' --- dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java 2012-04-17 21:38:26 +0000 +++ dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/SpringDataValueSetStore.java 2013-06-26 15:48:29 +0000 @@ -49,11 +49,13 @@ import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.rowset.SqlRowSet; -import au.com.bytecode.opencsv.CSVWriter; +import com.csvreader.CsvWriter; public class SpringDataValueSetStore implements DataValueSetStore { + private static final char CSV_DELIM = ','; + @Autowired private JdbcTemplate jdbcTemplate; @@ -73,7 +75,7 @@ public void writeDataValueSetCsv( Set dataElements, Set periods, Set orgUnits, Writer writer ) { - DataValueSet dataValueSet = new StreamingCsvDataValueSet( new CSVWriter( writer ) ); + DataValueSet dataValueSet = new StreamingCsvDataValueSet( new CsvWriter( writer, CSV_DELIM ) ); writeDataValueSet( null, null, null, null, dataElements, periods, orgUnits, dataValueSet ); } === modified file 'dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/StreamingCsvDataValueSet.java' --- dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/StreamingCsvDataValueSet.java 2012-04-15 14:52:54 +0000 +++ dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/StreamingCsvDataValueSet.java 2013-06-26 15:48:29 +0000 @@ -32,36 +32,33 @@ import org.hisp.dhis.dxf2.datavalue.DataValue; import org.hisp.dhis.dxf2.datavalue.StreamingCsvDataValue; -import au.com.bytecode.opencsv.CSVReader; -import au.com.bytecode.opencsv.CSVWriter; +import com.csvreader.CsvReader; +import com.csvreader.CsvWriter; public class StreamingCsvDataValueSet extends DataValueSet { - private CSVWriter writer; - - private CSVReader reader; - - private String[] nextRow; - - public StreamingCsvDataValueSet( CSVWriter writer ) + private CsvWriter writer; + + private CsvReader reader; + + public StreamingCsvDataValueSet( CsvWriter writer ) { - this.writer = writer; - this.writer.writeNext( StreamingCsvDataValue.getHeaders() ); // Write headers + this.writer = writer; + + try + { + this.writer.writeRecord( StreamingCsvDataValue.getHeaders() ); // Write headers + } + catch ( IOException ex ) + { + throw new RuntimeException( "Failed to write CSV headers", ex ); + } } - public StreamingCsvDataValueSet( CSVReader reader ) + public StreamingCsvDataValueSet( CsvReader reader ) { this.reader = reader; - - try - { - this.reader.readNext(); // Skip first row / headers - } - catch ( IOException ex ) - { - throw new RuntimeException( ex ); - } } @Override @@ -69,7 +66,7 @@ { try { - return ( nextRow = reader.readNext() ) != null; + return reader.readRecord(); } catch ( IOException ex ) { @@ -80,7 +77,14 @@ @Override public DataValue getNextDataValue() { - return new StreamingCsvDataValue( nextRow ); + try + { + return new StreamingCsvDataValue( reader.getValues() ); + } + catch ( IOException ex ) + { + throw new RuntimeException( "Failed to get CSV values", ex ); + } } @Override @@ -92,13 +96,14 @@ @Override public void close() { - try + if ( writer != null ) { writer.close(); } - catch ( IOException ex ) + + if ( reader != null ) { - throw new RuntimeException( ex ); + reader.close(); } } } === modified file 'dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/StreamingDataValueSet.java' --- dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/StreamingDataValueSet.java 2012-04-15 20:48:08 +0000 +++ dhis-2/dhis-dxf2/src/main/java/org/hisp/dhis/dxf2/datavalueset/StreamingDataValueSet.java 2013-06-26 15:48:29 +0000 @@ -187,7 +187,10 @@ public void close() { - writer.closeElement(); - writer.closeDocument(); + if ( writer != null ) + { + writer.closeElement(); + writer.closeDocument(); + } } } === modified file 'dhis-2/dhis-dxf2/src/test/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetServiceTest.java' --- dhis-2/dhis-dxf2/src/test/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetServiceTest.java 2013-03-15 16:33:34 +0000 +++ dhis-2/dhis-dxf2/src/test/java/org/hisp/dhis/dxf2/datavalueset/DataValueSetServiceTest.java 2013-06-26 15:48:29 +0000 @@ -27,15 +27,14 @@ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; import static org.hisp.dhis.common.IdentifiableObject.IdentifiableProperty.CODE; import static org.hisp.dhis.common.IdentifiableObject.IdentifiableProperty.UID; import static org.hisp.dhis.importexport.ImportStrategy.NEW_AND_UPDATES; import static org.hisp.dhis.importexport.ImportStrategy.UPDATES; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; -import java.io.InputStreamReader; import java.util.Collection; import org.hisp.dhis.DhisTest; @@ -190,7 +189,7 @@ throws Exception { ImportSummary summary = dataValueSetService.saveDataValueSetCsv( - new InputStreamReader( new ClassPathResource( "datavalueset/dataValueSetB.csv" ).getInputStream() ), null, null ); + new ClassPathResource( "datavalueset/dataValueSetB.csv" ).getInputStream(), null, null ); assertImportDataValues( summary ); } === modified file 'dhis-2/dhis-web/dhis-web-commons-resources/src/main/webapp/WEB-INF/classes/log4j.properties' --- dhis-2/dhis-web/dhis-web-commons-resources/src/main/webapp/WEB-INF/classes/log4j.properties 2012-12-06 20:06:23 +0000 +++ dhis-2/dhis-web/dhis-web-commons-resources/src/main/webapp/WEB-INF/classes/log4j.properties 2013-06-26 15:48:29 +0000 @@ -31,3 +31,6 @@ # Spring framework logging level log4j.logger.org.springframework = WARN log4j.logger.org.springframework.web = WARN + +# Quick logging level +# log4j.logger.org.amplecode.quick = DEBUG \ No newline at end of file === modified file 'dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ImportDataValueAction.java' --- dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ImportDataValueAction.java 2013-03-15 16:33:34 +0000 +++ dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/datavalue/ImportDataValueAction.java 2013-06-26 15:48:29 +0000 @@ -27,14 +27,9 @@ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -import static org.hisp.dhis.importexport.action.util.ImportDataValueTask.FORMAT_CSV; - -import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -150,13 +145,11 @@ in = StreamUtils.wrapAndCheckCompressionFormat( in ); - Reader reader = FORMAT_CSV.equals( importFormat ) ? new BufferedReader( new InputStreamReader( in ) ) : null; - ImportOptions options = new ImportOptions( dataElementIdScheme, orgUnitIdScheme, dryRun, strategy, skipExistingCheck ); log.info( options ); - scheduler.executeTask( new ImportDataValueTask( dataValueSetService, in, reader, options, taskId, importFormat ) ); + scheduler.executeTask( new ImportDataValueTask( dataValueSetService, in, options, taskId, importFormat ) ); return SUCCESS; } === modified file 'dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/util/ImportDataValueTask.java' --- dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/util/ImportDataValueTask.java 2013-05-25 06:19:23 +0000 +++ dhis-2/dhis-web/dhis-web-importexport/src/main/java/org/hisp/dhis/importexport/action/util/ImportDataValueTask.java 2013-06-26 15:48:29 +0000 @@ -27,13 +27,12 @@ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ +import java.io.InputStream; + import org.hisp.dhis.dxf2.datavalueset.DataValueSetService; import org.hisp.dhis.dxf2.metadata.ImportOptions; import org.hisp.dhis.scheduling.TaskId; -import java.io.InputStream; -import java.io.Reader; - /** * @author Lars Helge Overland */ @@ -46,16 +45,14 @@ private DataValueSetService dataValueSetService; private InputStream in; - private Reader reader; private ImportOptions options; private TaskId taskId; private String format; - public ImportDataValueTask( DataValueSetService dataValueSetService, InputStream in, Reader reader, ImportOptions options, TaskId taskId, String format ) + public ImportDataValueTask( DataValueSetService dataValueSetService, InputStream in, ImportOptions options, TaskId taskId, String format ) { this.dataValueSetService = dataValueSetService; this.in = in; - this.reader = reader; this.options = options; this.taskId = taskId; this.format = format; @@ -66,7 +63,7 @@ { if ( FORMAT_CSV.equals( format ) ) { - dataValueSetService.saveDataValueSetCsv( reader, options, taskId ); + dataValueSetService.saveDataValueSetCsv( in, options, taskId ); } else if ( FORMAT_PDF.equals( format ) ) { === modified file 'dhis-2/pom.xml' --- dhis-2/pom.xml 2013-05-31 09:14:39 +0000 +++ dhis-2/pom.xml 2013-06-26 15:48:29 +0000 @@ -630,10 +630,11 @@ 1.2 - net.sf.opencsv - opencsv - 2.3 + net.sourceforge.javacsv + javacsv + 2.0 + com.h2database