Skip to content

Commit

Permalink
[Refactor] Remove useless analyze method and mini code format in RowD…
Browse files Browse the repository at this point in the history
…elimiter (StarRocks#13060)
  • Loading branch information
HangyuanLiu authored Nov 24, 2022
1 parent be97f6b commit 0545ab3
Show file tree
Hide file tree
Showing 34 changed files with 107 additions and 177 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import com.starrocks.catalog.Table;
import com.starrocks.common.AnalysisException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

/**
* An actual table, such as OLAP table or a MySQL table.
Expand Down

This file was deleted.

12 changes: 6 additions & 6 deletions fe/fe-core/src/main/java/com/starrocks/analysis/Delimiter.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
package com.starrocks.analysis;

import com.google.common.base.Strings;
import com.starrocks.common.AnalysisException;
import com.starrocks.sql.analyzer.SemanticException;

import java.io.StringWriter;

Expand All @@ -26,23 +26,23 @@ private static byte charToByte(char c) {
return (byte) HEX_STRING.indexOf(c);
}

public static String convertDelimiter(String originStr) throws AnalysisException {
public static String convertDelimiter(String originStr) {
if (Strings.isNullOrEmpty(originStr)) {
throw new AnalysisException("Delimiter cannot be empty or null");
throw new SemanticException("Delimiter cannot be empty or null");
}

if (originStr.toUpperCase().startsWith("\\X") || originStr.toUpperCase().startsWith("0X")) {
String hexStr = originStr.substring(2);
// check hex str
if (hexStr.isEmpty()) {
throw new AnalysisException("Invalid delimiter '" + originStr + ": empty hex string");
throw new SemanticException("Invalid delimiter '" + originStr + ": empty hex string");
}
if (hexStr.length() % 2 != 0) {
throw new AnalysisException("Invalid delimiter '" + originStr + ": hex length must be a even number");
throw new SemanticException("Invalid delimiter '" + originStr + ": hex length must be a even number");
}
for (char hexChar : hexStr.toUpperCase().toCharArray()) {
if (HEX_STRING.indexOf(hexChar) == -1) {
throw new AnalysisException("Invalid delimiter '" + originStr + "': invalid hex format");
throw new SemanticException("Invalid delimiter '" + originStr + "': invalid hex format");
}
}

Expand Down
3 changes: 2 additions & 1 deletion fe/fe-core/src/main/java/com/starrocks/analysis/Expr.java
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ public final void analyze(Analyzer analyzer) throws AnalysisException {
/**
* Does subclass-specific analysis. Subclasses should override analyzeImpl().
*/
protected void analyzeImpl(Analyzer analyzer) throws AnalysisException {
protected void analyzeImpl(Analyzer analyzer) throws AnalysisException {
}

/**
Expand Down Expand Up @@ -727,6 +727,7 @@ public TExpr normalize(FragmentNormalizer normalizer) {
public interface ExprVisitor {
void visit(Expr expr, TExprNode texprNode);
}

// Append a flattened version of this expr, including all children, to 'container'.
final void treeToThriftHelper(TExpr container, ExprVisitor visitor) {
TExprNode msg = new TExprNode();
Expand Down
2 changes: 0 additions & 2 deletions fe/fe-core/src/main/java/com/starrocks/analysis/ExprId.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import com.starrocks.common.Id;
import com.starrocks.common.IdGenerator;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

public class ExprId extends Id<ExprId> {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ private void init(Double value) {
}

private void checkValue(Double value) throws AnalysisException {
if (value.isInfinite()||value.isNaN()) {
if (value.isInfinite() || value.isNaN()) {
throw new AnalysisException("Invalid literal:" + value);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ public boolean isSelfMonotonic() {
@Override
public Expr uncheckedCastTo(Type targetType) throws AnalysisException {
Type type = getFn().getReturnType();
if (!type.equals(targetType)) {
if (!type.equals(targetType)) {
return super.uncheckedCastTo(targetType);
} else {
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ public void analyze(ConnectContext connCtx, Expr predicate, List<OrderByElement>
// analyze limit
analyzeLimit(limit);
}

/**
* Generated and connected predicates that is used in `List.stream().filter()` to filter instances(`<U>`).
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@

package com.starrocks.load;

import com.starrocks.analysis.ColumnSeparator;
import com.starrocks.analysis.Expr;
import com.starrocks.analysis.RowDelimiter;
import com.starrocks.analysis.SlotRef;
import com.starrocks.sql.ast.ColumnSeparator;
import com.starrocks.sql.ast.ImportColumnDesc;
import com.starrocks.sql.ast.ImportColumnsStmt;
import com.starrocks.sql.ast.ImportWhereStmt;
import com.starrocks.sql.ast.PartitionNames;
import com.starrocks.sql.ast.RowDelimiter;

import java.util.ArrayList;
import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,8 @@
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.starrocks.analysis.ColumnSeparator;
import com.starrocks.analysis.Expr;
import com.starrocks.analysis.RoutineLoadDataSourceProperties;
import com.starrocks.analysis.RowDelimiter;
import com.starrocks.catalog.Database;
import com.starrocks.catalog.MaterializedView;
import com.starrocks.catalog.OlapTable;
Expand Down Expand Up @@ -63,11 +61,13 @@
import com.starrocks.qe.SessionVariable;
import com.starrocks.qe.SqlModeHelper;
import com.starrocks.server.GlobalStateMgr;
import com.starrocks.sql.ast.ColumnSeparator;
import com.starrocks.sql.ast.CreateRoutineLoadStmt;
import com.starrocks.sql.ast.ImportColumnDesc;
import com.starrocks.sql.ast.ImportColumnsStmt;
import com.starrocks.sql.ast.LoadStmt;
import com.starrocks.sql.ast.PartitionNames;
import com.starrocks.sql.ast.RowDelimiter;
import com.starrocks.system.SystemInfoService;
import com.starrocks.thrift.TExecPlanFragmentParams;
import com.starrocks.thrift.TUniqueId;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,21 @@
package com.starrocks.load.streamload;

import com.google.common.collect.Lists;
import com.starrocks.analysis.ColumnSeparator;
import com.starrocks.analysis.Expr;
import com.starrocks.analysis.RowDelimiter;
import com.starrocks.catalog.Database;
import com.starrocks.common.AnalysisException;
import com.starrocks.common.Config;
import com.starrocks.common.UserException;
import com.starrocks.common.util.CompressionUtils;
import com.starrocks.common.util.TimeUtils;
import com.starrocks.load.routineload.RoutineLoadJob;
import com.starrocks.qe.SessionVariable;
import com.starrocks.qe.SqlModeHelper;
import com.starrocks.sql.ast.ColumnSeparator;
import com.starrocks.sql.ast.ImportColumnDesc;
import com.starrocks.sql.ast.ImportColumnsStmt;
import com.starrocks.sql.ast.ImportWhereStmt;
import com.starrocks.sql.ast.PartitionNames;
import com.starrocks.sql.ast.RowDelimiter;
import com.starrocks.sql.parser.ParsingException;
import com.starrocks.thrift.TCompressionType;
import com.starrocks.thrift.TFileFormatType;
Expand Down Expand Up @@ -198,10 +197,10 @@ private void setOptionalFromStreamLoadContext(StreamLoadParam context) throws Us
setWhereExpr(context.whereExpr);
}
if (context.columnSeparator != null) {
setColumnSeparator(context.columnSeparator);
columnSeparator = new ColumnSeparator(context.columnSeparator);
}
if (context.rowDelimiter != null) {
setRowDelimiter(context.rowDelimiter);
rowDelimiter = new RowDelimiter(context.rowDelimiter);
}
if (context.partitions != null) {
String[] partNames = context.partitions.trim().split("\\s*,\\s*");
Expand Down Expand Up @@ -272,10 +271,10 @@ private void setOptionalFromTSLPutRequest(TStreamLoadPutRequest request, Databas
setWhereExpr(request.getWhere());
}
if (request.isSetColumnSeparator()) {
setColumnSeparator(request.getColumnSeparator());
columnSeparator = new ColumnSeparator(request.getColumnSeparator());
}
if (request.isSetRowDelimiter()) {
setRowDelimiter(request.getRowDelimiter());
rowDelimiter = new RowDelimiter(request.getRowDelimiter());
}
if (request.isSetPartitions()) {
String[] partNames = request.getPartitions().trim().split("\\s*,\\s*");
Expand Down Expand Up @@ -325,7 +324,7 @@ private void setOptionalFromTSLPutRequest(TStreamLoadPutRequest request, Databas
if (request.isSetLoad_dop()) {
loadParallelRequestNum = request.getLoad_dop();
}

if (request.isSetEnable_replicated_storage()) {
enableReplicatedStorage = request.isEnable_replicated_storage();
}
Expand Down Expand Up @@ -419,16 +418,6 @@ private void setMergeConditionExpr(String mergeConditionStr) throws UserExceptio
// TODO:(caneGuy) use expr for update condition
}

private void setColumnSeparator(String oriSeparator) throws AnalysisException {
columnSeparator = new ColumnSeparator(oriSeparator);
columnSeparator.analyze();
}

private void setRowDelimiter(String orgDelimiter) throws AnalysisException {
rowDelimiter = new RowDelimiter(orgDelimiter);
rowDelimiter.analyze();
}

public long getLoadMemLimit() {
return loadMemLimit;
}
Expand Down
22 changes: 11 additions & 11 deletions fe/fe-core/src/main/java/com/starrocks/sql/LoadPlanner.java
Original file line number Diff line number Diff line change
Expand Up @@ -108,18 +108,18 @@ public class LoadPlanner {
List<ImportColumnDesc> columnDescs;
private StreamLoadInfo streamLoadInfo;
private boolean routimeStreamLoadNegative;

// Stream load related structs
private String label;
// Routine load related structs
TRoutineLoadTask routineLoadTask;

public LoadPlanner(long loadJobId, TUniqueId loadId, long txnId, long dbId, OlapTable destTable,
boolean strictMode, String timezone, long timeoutS,
long startTime, boolean partialUpdate, ConnectContext context,
Map<String, String> sessionVariables, long loadMemLimit, long execMemLimit,
BrokerDesc brokerDesc, List<BrokerFileGroup> brokerFileGroups,
List<List<TBrokerFileStatus>> fileStatusesList, int filesAdded) {
boolean strictMode, String timezone, long timeoutS,
long startTime, boolean partialUpdate, ConnectContext context,
Map<String, String> sessionVariables, long loadMemLimit, long execMemLimit,
BrokerDesc brokerDesc, List<BrokerFileGroup> brokerFileGroups,
List<List<TBrokerFileStatus>> fileStatusesList, int filesAdded) {
this.loadJobId = loadJobId;
this.loadId = loadId;
this.txnId = txnId;
Expand Down Expand Up @@ -182,13 +182,13 @@ public LoadPlanner(long loadJobId, TUniqueId loadId, long txnId, long dbId, Stri
this.descTable = analyzer.getDescTbl();
this.enableDictOptimize = Config.enable_dict_optimize_stream_load;
this.startTime = System.currentTimeMillis();
this.sessionVariables = sessionVariables;
this.sessionVariables = sessionVariables;
}

public LoadPlanner(long loadJobId, TUniqueId loadId, long txnId, long dbId, String dbName, OlapTable destTable,
public LoadPlanner(long loadJobId, TUniqueId loadId, long txnId, long dbId, String dbName, OlapTable destTable,
boolean strictMode, String timezone, boolean partialUpdate, ConnectContext context,
Map<String, String> sessionVariables, long loadMemLimit, long execMemLimit,
boolean routimeStreamLoadNegative, int parallelInstanceNum, List<ImportColumnDesc> columnDescs,
boolean routimeStreamLoadNegative, int parallelInstanceNum, List<ImportColumnDesc> columnDescs,
StreamLoadInfo streamLoadInfo, String label, long timeoutS) {
this(loadJobId, loadId, txnId, dbId, dbName, destTable, strictMode, timezone, partialUpdate, context,
sessionVariables, loadMemLimit, execMemLimit, routimeStreamLoadNegative, parallelInstanceNum,
Expand Down Expand Up @@ -319,7 +319,7 @@ private void generateTupleDescriptor(List<Column> destColumns, boolean isPrimary

if (col.getType().isVarchar() && enableDictOptimize
&& IDictManager.getInstance().hasGlobalDict(destTable.getId(),
col.getName())) {
col.getName())) {
Optional<ColumnDict> dict = IDictManager.getInstance().getGlobalDict(destTable.getId(), col.getName());
dict.ifPresent(columnDict -> globalDicts.add(new Pair<>(slotDesc.getId().asInt(), columnDict)));
}
Expand Down Expand Up @@ -360,7 +360,7 @@ private ScanNode prepareScanNodes() throws UserException {
}

private void prepareSinkFragment(PlanFragment sinkFragment, List<Long> partitionIds, boolean canUsePipeLine,
boolean completeTabletSink) throws UserException {
boolean completeTabletSink) throws UserException {
DataSink dataSink = null;
if (destTable instanceof OlapTable) {
// 4. Olap table sink
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ public void setObjectList(List<PEntryObject> objectList) {
public boolean hasPrivilegeObject() {
return this.objects != null;
}

@Override
public <R, C> R accept(AstVisitor<R, C> visitor, C context) {
return visitor.visitGrantRevokePrivilegeStatement(this, context);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
// This file is licensed under the Elastic License 2.0. Copyright 2021-present, StarRocks Inc.

package com.starrocks.sql.ast;

import com.starrocks.analysis.Delimiter;
import com.starrocks.analysis.ParseNode;

public class ColumnSeparator implements ParseNode {
private final String oriSeparator;
private final String separator;

public ColumnSeparator(String separator) {
this.oriSeparator = separator;
this.separator = Delimiter.convertDelimiter(oriSeparator);
}

public String getColumnSeparator() {
return separator;
}

public String toSql() {
return "'" + oriSeparator + "'";
}

@Override
public String toString() {
return toSql();
}
}
Loading

0 comments on commit 0545ab3

Please sign in to comment.