Skip to content

Commit

Permalink
Merge pull request #54 from companieshouse/IDVA5-1808-csv-header
Browse files Browse the repository at this point in the history
make csv headers case-insensitive
  • Loading branch information
swalke-ch authored Feb 18, 2025
2 parents 0e274d9 + fa31b37 commit c75701f
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 24 deletions.
13 changes: 0 additions & 13 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
<private-api-sdk-java.version>4.0.248</private-api-sdk-java.version>
<api-sdk-manager-java-library.version>3.0.6</api-sdk-manager-java-library.version>
<api-security-java.version>2.0.7</api-security-java.version>
<org.mapstruct.version>1.6.0.RC1</org.mapstruct.version>
<spring-boot-dependencies.version>3.4.0</spring-boot-dependencies.version>
<spring-boot-maven-plugin.version>3.3.5</spring-boot-maven-plugin.version>
<dependency-check-plugin.version>8.3.1</dependency-check-plugin.version>
Expand Down Expand Up @@ -103,11 +102,6 @@
<artifactId>api-security-java</artifactId>
<version>${api-security-java.version}</version>
</dependency>
<dependency>
<groupId>org.mapstruct</groupId>
<artifactId>mapstruct</artifactId>
<version>${org.mapstruct.version}</version>
</dependency>
<dependency>
<groupId>uk.gov.companieshouse</groupId>
<artifactId>api-helper-java</artifactId>
Expand Down Expand Up @@ -223,13 +217,6 @@
<source>21</source>
<target>21</target>
<parameters>true</parameters>
<annotationProcessorPaths>
<path>
<groupId>org.mapstruct</groupId>
<artifactId>mapstruct-processor</artifactId>
<version>${org.mapstruct.version}</version>
</path>
</annotationProcessorPaths>
</configuration>
</plugin>
</plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import static uk.gov.companieshouse.filevalidationservice.utils.Constants.VALID_HEADERS;



@Component
public class CsvProcessor {

Expand Down Expand Up @@ -76,7 +75,13 @@ public void parseRecords(byte[] bytesToParse) {


private void isValidFieldHeaders(CSVRecord csvRecord) {
List<String> actualHeaders = csvRecord.toList();
List<String> actualHeaders = csvRecord.stream()
.map(header -> {
String withoutQuotes = header.replace("\"", "");
String trimmed = withoutQuotes.trim();
return trimmed.toLowerCase();
})
.toList();
if (!actualHeaders.equals(VALID_HEADERS)) {
throw new CSVDataValidationException("Headers did not match expected headers");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import java.util.List;

public class Constants {
public static final List<String> VALID_HEADERS = List.of("Unique ID", "Registered Company Name", "Company Number", "Trading Name", "First Name", "Last Name", "Date of Birth",
"Property Name or Number","Address Line 1","Address Line 2","City or Town","Postcode","Country");
public static final List<String> VALID_HEADERS = List.of("unique id", "registered company name", "company number", "trading name", "first name", "last name", "date of birth",
"property name or number","address line 1","address line 2","city or town","postcode","country");
public static final Integer NUMBER_OF_COLUMNS = 13 ;
public static final int INDEX_OF_UNIQUE_ID = 0;
public static final int INDEX_OF_COMPANY_NAME = 1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,9 @@ void csvRecordWithTooManyColumnsMustFailToParse() throws IOException {
@ValueSource( strings = {
"src/test/resources/oneGoodRecord.csv",
"src/test/resources/noUniqueId.csv",
"src/test/resources/good_multiple_records.csv"
"src/test/resources/good_multiple_records.csv",
"src/test/resources/good_multiple_records_with_case_mismatch_column_names.csv",
"src/test/resources/good_multiple_records_with_column_names_in_quotes.csv"
})
void validRecordsMustParse(String filePath) throws IOException{
File file = new File(filePath);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Unique ID,Registered Company Name,Company number,Trading name,First Name,Last Name,Date of Birth,Property Name or Number,Address Line 1,Address Line 2,City or Town,Postcode,Country
XGML00000190000,,,David's Trading,Jane,Brown,,46 Pine Lane,Maple Drive,,Sheffield,L1 4DD,United Kingdom
XGML00000190001,Charlie Taylor LP,12345678,Charlie's Trading,David,Doe,11111991,35 Maple Drive,Pine Lane,,Manchester,S1 5EE,United Kingdom
XGML00000190002,Emily Taylor Plc,12345678,Emily's Trading,Charlie,Brown,11111991,,Pine Lane,,Leeds,S1 5EE,United Kingdom
XGML00000190003,Jane Johnson LLP,12345678,Jane's Trading,Jane,Johnson,,21 Maple Drive,High Street,,Bristol,,United Kingdom
XGML00000190004,John Doe LP,,John's Trading,Alice,Doe,11111991,,Pine Lane,,Leeds,M1 2BB,United Kingdom
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"unique id","registered company name","company number","trading name","first name","last name","date of birth", "property name or number" , "address line 1","address line 2","city or town","postcode","country"
XGML00000190000,,,David's Trading,Jane,Brown,,46 Pine Lane,Maple Drive,,Sheffield,L1 4DD,United Kingdom
XGML00000190001,Charlie Taylor LP,12345678,Charlie's Trading,David,Doe,11111991,35 Maple Drive,Pine Lane,,Manchester,S1 5EE,United Kingdom
XGML00000190002,Emily Taylor Plc,12345678,Emily's Trading,Charlie,Brown,11111991,,Pine Lane,,Leeds,S1 5EE,United Kingdom
XGML00000190003,Jane Johnson LLP,12345678,Jane's Trading,Jane,Johnson,,21 Maple Drive,High Street,,Bristol,,United Kingdom
XGML00000190004,John Doe LP,,John's Trading,Alice,Doe,11111991,,Pine Lane,,Leeds,M1 2BB,United Kingdom
7 changes: 1 addition & 6 deletions suppress.xml
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.3.xsd">
<suppress until="2025-12-01Z">
<notes><![CDATA[
file name: logback-core-1.5.11.jar
]]></notes>
<cve>CVE-2024-12798</cve>
</suppress>

</suppressions>

0 comments on commit c75701f

Please sign in to comment.