Skip to content

Commit

Permalink
Uppercase
Browse files Browse the repository at this point in the history
  • Loading branch information
2b3c511 committed May 24, 2024
1 parent 49e24ac commit 44466c8
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@

import org.apache.iotdb.cli.utils.IoTPrinter;
import org.apache.iotdb.exception.ArgsErrorException;
import org.apache.iotdb.rpc.IoTDBConnectionException;
import org.apache.iotdb.rpc.StatementExecutionException;
import org.apache.iotdb.session.Session;

import org.apache.commons.cli.CommandLine;
Expand All @@ -37,7 +35,6 @@

import java.io.IOException;
import java.io.PrintWriter;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
Expand Down Expand Up @@ -74,14 +71,11 @@ public abstract class AbstractSchemaTool {
protected static String port;
protected static String username;
protected static String password;
protected static ZoneId zoneId;

protected static String timeZoneID;
protected static String aligned;
protected static Session session;

protected static final String systemPathPrefix = "root.__system";
protected static final List<String> headColumns =
protected static final List<String> HEAD_COLUMNS =
Arrays.asList("Timeseries", "Alias", "DataType", "Encoding", "Compression");
private static final IoTPrinter ioTPrinter = new IoTPrinter(System.out);
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSchemaTool.class);
Expand Down Expand Up @@ -156,13 +150,6 @@ protected static Options createNewOptions() {
return options;
}

protected static void setTimeZone() throws IoTDBConnectionException, StatementExecutionException {
if (timeZoneID != null) {
session.setTimeZone(timeZoneID);
}
zoneId = ZoneId.of(session.getTimeZone());
}

/**
* write data to CSV file.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@
import java.io.IOException;
import java.util.List;

import static org.apache.iotdb.commons.schema.SchemaConstant.SYSTEM_DATABASE;

/** Export Schema CSV file. */
public class ExportSchema extends AbstractSchemaTool {

Expand Down Expand Up @@ -76,9 +78,9 @@ public class ExportSchema extends AbstractSchemaTool {

private static final IoTPrinter ioTPrinter = new IoTPrinter(System.out);

private static final String baseViewType = "BASE";
private static final String headerViewType = "ViewType";
private static final String headerTimeseries = "Timeseries";
private static final String BASE_VIEW_TYPE = "BASE";
private static final String HEADER_VIEW_TYPE = "ViewType";
private static final String HEADER_TIMESERIES = "Timeseries";

@SuppressWarnings({
"squid:S3776",
Expand Down Expand Up @@ -285,25 +287,25 @@ private static void dumpResult(String pattern, int index) {
public static void writeCsvFile(
SessionDataSet sessionDataSet, String filePath, List<String> headers, int linesPerFile)
throws IOException, IoTDBConnectionException, StatementExecutionException {
int viewTypeIndex = headers.indexOf(headerViewType);
int timeseriesIndex = headers.indexOf(headerTimeseries);
int viewTypeIndex = headers.indexOf(HEADER_VIEW_TYPE);
int timeseriesIndex = headers.indexOf(HEADER_TIMESERIES);

int fileIndex = 0;
boolean hasNext = true;
while (hasNext) {
int i = 0;
final String finalFilePath = filePath + "_" + fileIndex + ".csv";
final CSVPrinterWrapper csvPrinterWrapper = new CSVPrinterWrapper(finalFilePath);
csvPrinterWrapper.printRecord(headColumns);
csvPrinterWrapper.printRecord(HEAD_COLUMNS);
while (i++ < linesPerFile) {
if (sessionDataSet.hasNext()) {
RowRecord rowRecord = sessionDataSet.next();
List<Field> fields = rowRecord.getFields();
if (fields.get(timeseriesIndex).getStringValue().startsWith(systemPathPrefix)
|| !fields.get(viewTypeIndex).getStringValue().equals(baseViewType)) {
if (fields.get(timeseriesIndex).getStringValue().startsWith(SYSTEM_DATABASE)
|| !fields.get(viewTypeIndex).getStringValue().equals(BASE_VIEW_TYPE)) {
continue;
}
headColumns.forEach(
HEAD_COLUMNS.forEach(
column -> {
Field field = fields.get(headers.indexOf(column));
String fieldStringValue = field.getStringValue();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static org.apache.iotdb.commons.schema.SchemaConstant.SYSTEM_DATABASE;

/** Import Schema CSV file. */
public class ImportSchema extends AbstractSchemaTool {

Expand Down Expand Up @@ -357,15 +359,15 @@ private static void writeScheme(
compressors.clear();
measurementAlias.clear();
}
String path = recordObj.get(headerNames.indexOf(headColumns.get(0)));
String alias = recordObj.get(headerNames.indexOf(headColumns.get(1)));
String dataTypeRaw = recordObj.get(headerNames.indexOf(headColumns.get(2)));
String path = recordObj.get(headerNames.indexOf(HEAD_COLUMNS.get(0)));
String alias = recordObj.get(headerNames.indexOf(HEAD_COLUMNS.get(1)));
String dataTypeRaw = recordObj.get(headerNames.indexOf(HEAD_COLUMNS.get(2)));
TSDataType dataType = typeInfer(dataTypeRaw);
String encodingTypeRaw = recordObj.get(headerNames.indexOf(headColumns.get(3)));
String encodingTypeRaw = recordObj.get(headerNames.indexOf(HEAD_COLUMNS.get(3)));
TSEncoding encodingType = encodingInfer(encodingTypeRaw);
String compressionTypeRaw = recordObj.get(headerNames.indexOf(headColumns.get(4)));
String compressionTypeRaw = recordObj.get(headerNames.indexOf(HEAD_COLUMNS.get(4)));
CompressionType compressionType = compressInfer(compressionTypeRaw);
if (StringUtils.isBlank(path) || path.trim().startsWith(systemPathPrefix)) {
if (StringUtils.isBlank(path) || path.trim().startsWith(SYSTEM_DATABASE)) {
ioTPrinter.println(
String.format(
"Line '%s', column '%s': illegal path %s",
Expand Down Expand Up @@ -429,9 +431,9 @@ private static void writeScheme(

private static boolean checkHeader(List<String> headerNames) {
if (CollectionUtils.isNotEmpty(headerNames)
&& new HashSet<>(headerNames).size() == headColumns.size()) {
&& new HashSet<>(headerNames).size() == HEAD_COLUMNS.size()) {
List<String> strangers =
headerNames.stream().filter(t -> !headColumns.contains(t)).collect(Collectors.toList());
headerNames.stream().filter(t -> !HEAD_COLUMNS.contains(t)).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(strangers)) {
ioTPrinter.println(
"The header of the CSV file to be imported is illegal. The correct format is \"Timeseries, Alibaba, DataType, Encoding, Compression\"!");
Expand Down

0 comments on commit 44466c8

Please sign in to comment.