Skip to content

Commit

Permalink
Merge latest trunk into branch. (Gunther Hagleitner)
Browse files Browse the repository at this point in the history
git-svn-id: https://svn.apache.org/repos/asf/hive/branches/tez@1541190 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
hagleitn committed Nov 12, 2013
2 parents 36d4cc9 + ca3a1a1 commit cda5ceb
Show file tree
Hide file tree
Showing 481 changed files with 16,346 additions and 1,752 deletions.
134 changes: 78 additions & 56 deletions ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion ant/src/org/apache/hadoop/hive/ant/GenVectorTestCode.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.hive.ant;

import java.io.File;
import java.io.IOException;
import java.util.HashMap;

Expand Down Expand Up @@ -235,7 +236,7 @@ public void generateTestSuites() throws IOException {

String outputFile = GenVectorCode.joinPath(this.testOutputDir, testClass + ".java");

GenVectorCode.writeFile(outputFile, templateString);
GenVectorCode.writeFile(new File(outputFile), templateString);
}
}

Expand Down
6 changes: 5 additions & 1 deletion beeline/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,14 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-shims</artifactId>
<version>${project.version}</version>
<classifier>uberjar</classifier>
<scope>runtime</scope>
</dependency>
<!-- inter-project -->
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>${commons-cli.version}</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
Expand Down
6 changes: 5 additions & 1 deletion beeline/src/java/org/apache/hive/beeline/Commands.java
Original file line number Diff line number Diff line change
Expand Up @@ -674,7 +674,10 @@ private boolean execute(String line, boolean call) {

// use multiple lines for statements not terminated by ";"
try {
while (!(line.trim().endsWith(";")) && beeLine.getOpts().isAllowMultiLineCommand()) {
//When using -e, console reader is not initialized and command is a single line
while (beeLine.getConsoleReader() != null && !(line.trim().endsWith(";"))
&& beeLine.getOpts().isAllowMultiLineCommand()) {

StringBuilder prompt = new StringBuilder(beeLine.getPrompt());
for (int i = 0; i < prompt.length() - 1; i++) {
if (prompt.charAt(i) != '>') {
Expand All @@ -691,6 +694,7 @@ private boolean execute(String line, boolean call) {
beeLine.handleException(e);
}


if (line.endsWith(";")) {
line = line.substring(0, line.length() - 1);
}
Expand Down
2 changes: 1 addition & 1 deletion beeline/src/java/org/apache/hive/beeline/SQLCompletor.java
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public SQLCompletor(BeeLine beeLine, boolean skipmeta)
// add the default SQL completions
String keywords = new BufferedReader(new InputStreamReader(
SQLCompletor.class.getResourceAsStream(
"sql-keywords.properties"))).readLine();
"/sql-keywords.properties"))).readLine();

// now add the keywords from the current connection
try {
Expand Down
1 change: 0 additions & 1 deletion cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-shims</artifactId>
<version>${project.version}</version>
<classifier>uberjar</classifier>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
Expand Down
2 changes: 1 addition & 1 deletion cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
Original file line number Diff line number Diff line change
Expand Up @@ -716,7 +716,7 @@ private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor o
}

// CLI remote mode is a thin client: only load auxJars in local mode
if (!ss.isRemoteMode() && !ShimLoader.getHadoopShims().usesJobShell()) {
if (!ss.isRemoteMode()) {
// hadoop-20 and above - we need to augment classpath using hiveconf
// components
// see also: code in ExecDriver.java
Expand Down
1 change: 0 additions & 1 deletion common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-shims</artifactId>
<version>${project.version}</version>
<classifier>uberjar</classifier>
</dependency>
<!-- inter-project -->
<dependency>
Expand Down
4 changes: 4 additions & 0 deletions common/src/java/org/apache/hadoop/hive/common/ObjectPair.java
Original file line number Diff line number Diff line change
Expand Up @@ -74,4 +74,8 @@ public boolean equals(ObjectPair<F, S> that) {
return this.getFirst().equals(that.getFirst()) &&
this.getSecond().equals(that.getSecond());
}

public String toString() {
return first + ":" + second;
}
}
43 changes: 22 additions & 21 deletions common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hive.common;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;



/**
Expand Down Expand Up @@ -66,33 +66,34 @@ public class StatsSetupConst {
/**
* @return List of all supported statistics
*/
public static List<String> getSupportedStats() {
List<String> supportedStats = new ArrayList<String>();
supportedStats.add(NUM_FILES);
supportedStats.add(ROW_COUNT);
supportedStats.add(TOTAL_SIZE);
supportedStats.add(RAW_DATA_SIZE);
return supportedStats;
}
public static final String[] supportedStats = new String[]
{NUM_FILES,ROW_COUNT,TOTAL_SIZE,RAW_DATA_SIZE};

/**
* @return List of all statistics that need to be collected during query execution. These are
* statistics that inherently require a scan of the data.
*/
public static List<String> getStatsToBeCollected() {
List<String> collectableStats = new ArrayList<String>();
collectableStats.add(ROW_COUNT);
collectableStats.add(RAW_DATA_SIZE);
return collectableStats;
}
public static final String[] statsRequireCompute = new String[] {ROW_COUNT,RAW_DATA_SIZE};

/**
* @return List of statistics that can be collected quickly without requiring a scan of the data.
*/
public static List<String> getStatsFastCollection() {
List<String> fastStats = new ArrayList<String>();
fastStats.add(NUM_FILES);
fastStats.add(TOTAL_SIZE);
return fastStats;
public static final String[] fastStats = new String[] {NUM_FILES,TOTAL_SIZE};

// This string constant is used by stats task to indicate to AlterHandler that
// alterPartition/alterTable is happening via statsTask.
public static final String STATS_GENERATED_VIA_STATS_TASK = "STATS_GENERATED_VIA_STATS_TASK";

// This string constant will be persisted in metastore to indicate whether corresponding
// table or partition's statistics are accurate or not.
public static final String COLUMN_STATS_ACCURATE = "COLUMN_STATS_ACCURATE";

public static final String TRUE = "true";

public static final String FALSE = "false";

public static boolean areStatsUptoDate(Map<String,String> params) {
String statsAcc = params.get(COLUMN_STATS_ACCURATE);
return statsAcc == null ? false : statsAcc.equals(TRUE);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

public abstract class HiveBaseChar {
protected String value;
protected int characterLength = -1;

protected HiveBaseChar() {
}
Expand All @@ -31,20 +30,11 @@ protected HiveBaseChar() {
* @param val new value
*/
public void setValue(String val, int maxLength) {
characterLength = -1;
value = HiveBaseChar.enforceMaxLength(val, maxLength);
}

public void setValue(HiveBaseChar val, int maxLength) {
if ((maxLength < 0)
|| (val.characterLength > 0 && val.characterLength <= maxLength)) {
// No length enforcement required, or source length is less than max length.
// We can copy the source value as-is.
value = val.value;
this.characterLength = val.characterLength;
} else {
setValue(val.value, maxLength);
}
setValue(val.value, maxLength);
}

public static String enforceMaxLength(String val, int maxLength) {
Expand All @@ -53,22 +43,48 @@ public static String enforceMaxLength(String val, int maxLength) {
if (maxLength > 0) {
int valLength = val.codePointCount(0, val.length());
if (valLength > maxLength) {
// Truncate the excess trailing spaces to fit the character length.
// Truncate the excess chars to fit the character length.
// Also make sure we take supplementary chars into account.
value = val.substring(0, val.offsetByCodePoints(0, maxLength));
}
}
return value;
}

public static String getPaddedValue(String val, int maxLength) {
if (maxLength < 0) {
return val;
}

int valLength = val.codePointCount(0, val.length());
if (valLength > maxLength) {
return enforceMaxLength(val, maxLength);
}

if (maxLength > valLength) {
// Make sure we pad the right amount of spaces; valLength is in terms of code points,
// while StringUtils.rpad() is based on the number of java chars.
int padLength = val.length() + (maxLength - valLength);
val = StringUtils.rightPad(val, padLength);
}
return val;
}

public String getValue() {
return value;
}

public int getCharacterLength() {
if (characterLength < 0) {
characterLength = value.codePointCount(0, value.length());
}
return characterLength;
return value.codePointCount(0, value.length());
}

@Override
public int hashCode() {
return getValue().hashCode();
}

@Override
public String toString() {
return getValue();
}
}
91 changes: 91 additions & 0 deletions common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.common.type;

import org.apache.commons.lang.StringUtils;

/**
* HiveChar.
* String values will be padded to full char length.
* Character legnth, comparison, hashCode should ignore trailing spaces.
*/
public class HiveChar extends HiveBaseChar
implements Comparable<HiveChar> {

public static final int MAX_CHAR_LENGTH = 255;

public HiveChar() {
}

public HiveChar(String val, int len) {
setValue(val, len);
}

public HiveChar(HiveChar hc, int len) {
setValue(hc.value, len);
}

/**
* Set char value, padding or truncating the value to the size of len parameter.
*/
public void setValue(String val, int len) {
super.setValue(HiveBaseChar.getPaddedValue(val, len), -1);
}

public void setValue(String val) {
setValue(val, -1);
}

public String getStrippedValue() {
return StringUtils.stripEnd(value, " ");
}

protected String getPaddedValue() {
return value;
}

public int getCharacterLength() {
String strippedValue = getStrippedValue();
return strippedValue.codePointCount(0, strippedValue.length());
}

public String toString() {
return getPaddedValue();
}

public int compareTo(HiveChar rhs) {
if (rhs == this) {
return 0;
}
return this.getStrippedValue().compareTo(rhs.getStrippedValue());
}

public boolean equals(Object rhs) {
if (rhs == this) {
return true;
}
if (rhs == null || rhs.getClass() != getClass()) {
return false;
}
return this.getStrippedValue().equals(((HiveChar) rhs).getStrippedValue());
}

public int hashCode() {
return getStrippedValue().hashCode();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,13 @@ private static BigDecimal normalize(BigDecimal bd, boolean allowRounding) {

int maxScale = Math.min(MAX_SCALE, Math.min(MAX_PRECISION - intDigits, bd.scale()));
if (bd.scale() > maxScale ) {
bd = allowRounding ? bd.setScale(maxScale, RoundingMode.HALF_UP) : null;
if (allowRounding) {
bd = bd.setScale(maxScale, RoundingMode.HALF_UP);
// Trimming is again necessary, because rounding may introduce new trailing 0's.
bd = trim(bd);
} else {
bd = null;
}
}

return bd;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,6 @@ public void setValue(HiveVarchar hc) {
super.setValue(hc.getValue(), -1);
}

@Override
public String toString() {
return getValue();
}

public int compareTo(HiveVarchar rhs) {
if (rhs == this) {
return 0;
Expand All @@ -69,9 +64,4 @@ public boolean equals(HiveVarchar rhs) {
}
return this.getValue().equals(rhs.getValue());
}

@Override
public int hashCode() {
return getValue().hashCode();
}
}
Loading

0 comments on commit cda5ceb

Please sign in to comment.