Skip to content

Commit

Permalink
[HOPSWORKS-2818] Serialize Date objects correctly in Query Constructor (
Browse files Browse the repository at this point in the history
  • Loading branch information
moritzmeister authored Nov 16, 2021
1 parent 58cc2a6 commit 674e791
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,17 @@
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.json.JSONTokener;
import org.apache.calcite.util.DateString;
import org.apache.calcite.util.TimestampString;
import org.json.JSONArray;
import org.json.JSONTokener;

import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
Expand Down Expand Up @@ -169,15 +171,20 @@ public SqlNode generateFilterNode(Filter filter, boolean online) {
filterValue = new SqlNodeList(operandList, SqlParserPos.ZERO);
} else {
// Value
filterValue = getSQLNode(filter.getFeature().getType(), json.toString());
filterValue = getSQLNode(filter.getFeature().getType(), filter.getValue());
}

return filter.getCondition().operator.createCall(SqlParserPos.ZERO, feature, filterValue);
}

private SqlNode getSQLNode(String type, String value){
protected SqlNode getSQLNode(String type, String value){
if (type.equalsIgnoreCase("string")) {
return SqlLiteral.createCharString(value, SqlParserPos.ZERO);
} else if (type.equalsIgnoreCase("date")) {
return SqlLiteral.createDate(new DateString(value), SqlParserPos.ZERO);
} else if (type.equalsIgnoreCase("timestamp")) {
// precision 3 should be milliseconds since we don't support more precision in parquet files
return SqlLiteral.createTimestamp(new TimestampString(value), 3, SqlParserPos.ZERO);
} else {
return new SqlIdentifier(value, SqlParserPos.ZERO);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
import io.hops.hopsworks.common.featurestore.feature.FeatureGroupFeatureDTO;
import io.hops.hopsworks.common.featurestore.query.ConstructorController;
import io.hops.hopsworks.common.featurestore.query.Feature;
import io.hops.hopsworks.common.featurestore.query.Query;
import io.hops.hopsworks.common.featurestore.query.SqlCondition;
import io.hops.hopsworks.common.featurestore.query.join.Join;
import io.hops.hopsworks.common.featurestore.query.Query;
import io.hops.hopsworks.exceptions.FeaturestoreException;
import io.hops.hopsworks.persistence.entity.featurestore.Featurestore;
import io.hops.hopsworks.persistence.entity.featurestore.featuregroup.Featuregroup;
Expand All @@ -30,6 +30,8 @@
import io.hops.hopsworks.persistence.entity.project.Project;
import org.apache.calcite.sql.JoinType;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.dialect.HiveSqlDialect;
import org.apache.calcite.sql.dialect.SparkSqlDialect;
import org.junit.Assert;
import org.junit.Before;
Expand Down Expand Up @@ -477,4 +479,52 @@ public void testBuildFilterNodeTripleJoin() throws Exception {

Assert.assertEquals(expected, result);
}
}

@Test
public void testGetSQLNodeString() throws Exception {
SqlNode node = filterController.getSQLNode("string", "value_string");
String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "'value_string'";

Assert.assertEquals(expected, result);

result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
Assert.assertEquals(expected, result);
}

@Test
public void testGetSQLNodeDate() throws Exception {
SqlNode node = filterController.getSQLNode("date", "2021-11-12");
String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "DATE '2021-11-12'";

Assert.assertEquals(expected, result);

result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
Assert.assertEquals(expected, result);
}

@Test
public void testGetSQLNodeTimestamp() throws Exception {
SqlNode node = filterController.getSQLNode("timestamp", "2021-11-12 09:55:32.084354");
String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "TIMESTAMP '2021-11-12 09:55:32.084'";

Assert.assertEquals(expected, result);

result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
Assert.assertEquals(expected, result);
}

@Test
public void testGetSQLNodeOther() throws Exception {
SqlNode node = filterController.getSQLNode("int", "5");
String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "5";

Assert.assertEquals(expected, result);

result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
Assert.assertEquals(expected, result);
}
}

0 comments on commit 674e791

Please sign in to comment.