|
@@ -40,8 +40,13 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
|
|
import org.apache.hadoop.hive.metastore.api.Order;
|
|
import org.apache.hadoop.hive.metastore.api.Order;
|
|
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
|
|
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
|
|
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
|
|
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
|
|
-import org.apache.hadoop.hive.serde.SerDe;
|
|
|
|
-import org.apache.hadoop.hive.serde.SerDeField;
|
|
|
|
|
|
+import org.apache.hadoop.hive.serde2.Deserializer;
|
|
|
|
+import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
|
|
|
|
+import org.apache.hadoop.hive.serde2.SerDe;
|
|
|
|
+import org.apache.hadoop.hive.serde2.SerDeException;
|
|
|
|
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
|
|
|
|
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
|
|
|
|
+import org.apache.hadoop.hive.serde.Constants;
|
|
import org.apache.hadoop.io.Writable;
|
|
import org.apache.hadoop.io.Writable;
|
|
import org.apache.hadoop.io.WritableComparable;
|
|
import org.apache.hadoop.io.WritableComparable;
|
|
import org.apache.hadoop.mapred.InputFormat;
|
|
import org.apache.hadoop.mapred.InputFormat;
|
|
@@ -57,7 +62,7 @@ public class Table {
|
|
static final private Log LOG = LogFactory.getLog("hive.ql.metadata.Table");
|
|
static final private Log LOG = LogFactory.getLog("hive.ql.metadata.Table");
|
|
|
|
|
|
private Properties schema;
|
|
private Properties schema;
|
|
- private SerDe serDe;
|
|
|
|
|
|
+ private Deserializer deserializer;
|
|
private URI uri;
|
|
private URI uri;
|
|
private Class<? extends InputFormat> inputFormatClass;
|
|
private Class<? extends InputFormat> inputFormatClass;
|
|
private Class<? extends OutputFormat> outputFormatClass;
|
|
private Class<? extends OutputFormat> outputFormatClass;
|
|
@@ -83,73 +88,77 @@ public class Table {
|
|
*
|
|
*
|
|
* @exception HiveException on internal error. Note not possible now, but in the future reserve the right to throw an exception
|
|
* @exception HiveException on internal error. Note not possible now, but in the future reserve the right to throw an exception
|
|
*/
|
|
*/
|
|
- public Table(String name, Properties schema, SerDe serDe,
|
|
|
|
|
|
+ public Table(String name, Properties schema, Deserializer deserializer,
|
|
Class<? extends InputFormat<?, ?>> inputFormatClass,
|
|
Class<? extends InputFormat<?, ?>> inputFormatClass,
|
|
Class<? extends OutputFormat<?, ?>> outputFormatClass,
|
|
Class<? extends OutputFormat<?, ?>> outputFormatClass,
|
|
URI dataLocation, Hive hive) throws HiveException {
|
|
URI dataLocation, Hive hive) throws HiveException {
|
|
initEmpty();
|
|
initEmpty();
|
|
- this.getTTable().setTableName(name);
|
|
|
|
- this.getTTable().getSd().setLocation(dataLocation.toASCIIString());
|
|
|
|
- this.setInputFormatClass(inputFormatClass);
|
|
|
|
- this.setOutputFormatClass(outputFormatClass);
|
|
|
|
- this.setDataLocation(dataLocation);
|
|
|
|
this.schema = schema;
|
|
this.schema = schema;
|
|
- this.serDe = serDe; //TODO: convert to SerDeInfo format
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setSerializationLib(serDe.getClass().getName());
|
|
|
|
|
|
+ this.deserializer = deserializer; //TODO: convert to SerDeInfo format
|
|
|
|
+ this.getTTable().getSd().getSerdeInfo().setSerializationLib(deserializer.getShortName());
|
|
|
|
+ getTTable().setTableName(name);
|
|
|
|
+ getSerdeInfo().setSerializationLib(deserializer.getClass().getName());
|
|
|
|
+ setInputFormatClass(inputFormatClass);
|
|
|
|
+ setOutputFormatClass(outputFormatClass);
|
|
|
|
+ setDataLocation(dataLocation);
|
|
}
|
|
}
|
|
|
|
|
|
public Table(String name) {
|
|
public Table(String name) {
|
|
// fill in defaults
|
|
// fill in defaults
|
|
initEmpty();
|
|
initEmpty();
|
|
- this.getTTable().setTableName(name);
|
|
|
|
- this.getTTable().setDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe.class.getName());
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setSerializationFormat("1");
|
|
|
|
|
|
+ getTTable().setTableName(name);
|
|
|
|
+ getTTable().setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME);
|
|
|
|
+ getSerdeInfo().setSerializationLib(MetadataTypedColumnsetSerDe.shortName());
|
|
|
|
+ getSerdeInfo().getParameters().put(Constants.SERIALIZATION_FORMAT, "1");
|
|
}
|
|
}
|
|
|
|
|
|
void initEmpty() {
|
|
void initEmpty() {
|
|
- this.setTTable(new org.apache.hadoop.hive.metastore.api.Table());
|
|
|
|
- this.getTTable().setSd(new StorageDescriptor());
|
|
|
|
- this.getTTable().getSd().setSerdeInfo(new SerDeInfo());
|
|
|
|
- this.getTTable().getSd().setNumBuckets(-1);
|
|
|
|
- this.getTTable().getSd().setBucketCols(new ArrayList<String>());
|
|
|
|
- this.getTTable().getSd().setCols(new ArrayList<FieldSchema>());
|
|
|
|
- this.getTTable().setPartitionKeys(new ArrayList<FieldSchema>());
|
|
|
|
- this.getTTable().setParameters(new HashMap<String, String>());
|
|
|
|
- this.getTTable().getSd().setParameters(new HashMap<String, String>());
|
|
|
|
- this.getTTable().getSd().setSortCols(new ArrayList<Order>());
|
|
|
|
|
|
+ setTTable(new org.apache.hadoop.hive.metastore.api.Table());
|
|
|
|
+ getTTable().setSd(new StorageDescriptor());
|
|
|
|
+ getTTable().setPartitionKeys(new ArrayList<FieldSchema>());
|
|
|
|
+ getTTable().setParameters(new HashMap<String, String>());
|
|
|
|
+
|
|
|
|
+ StorageDescriptor sd = getTTable().getSd();
|
|
|
|
+ sd.setSerdeInfo(new SerDeInfo());
|
|
|
|
+ sd.setNumBuckets(-1);
|
|
|
|
+ sd.setBucketCols(new ArrayList<String>());
|
|
|
|
+ sd.setCols(new ArrayList<FieldSchema>());
|
|
|
|
+ sd.setParameters(new HashMap<String, String>());
|
|
|
|
+ sd.setSortCols(new ArrayList<Order>());
|
|
|
|
+
|
|
|
|
+ sd.getSerdeInfo().setParameters(new HashMap<String, String>());
|
|
}
|
|
}
|
|
|
|
|
|
protected void initSerDe() throws HiveException {
|
|
protected void initSerDe() throws HiveException {
|
|
- if(this.serDe == null) {
|
|
|
|
|
|
+ if (deserializer == null) {
|
|
try {
|
|
try {
|
|
- this.serDe = MetaStoreUtils.getSerDe(Hive.get().getConf(), this.getTTable());
|
|
|
|
|
|
+ deserializer = MetaStoreUtils.getDeserializer(Hive.get().getConf(), this.getTTable());
|
|
} catch (MetaException e) {
|
|
} catch (MetaException e) {
|
|
throw new HiveException(e);
|
|
throw new HiveException(e);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- public void checkValidity() throws HiveException {
|
|
|
|
|
|
|
|
|
|
+ public void checkValidity() throws HiveException {
|
|
// check for validity
|
|
// check for validity
|
|
- String name = this.getTTable().getTableName();
|
|
|
|
|
|
+ String name = getTTable().getTableName();
|
|
if (null == name || name.length() == 0 || !MetaStoreUtils.validateName(name)) {
|
|
if (null == name || name.length() == 0 || !MetaStoreUtils.validateName(name)) {
|
|
throw new HiveException("[" + name + "]: is not a valid table name");
|
|
throw new HiveException("[" + name + "]: is not a valid table name");
|
|
}
|
|
}
|
|
- if (null == this.getSerDe()) {
|
|
|
|
|
|
+ if (null == getDeserializer()) {
|
|
throw new HiveException("must specify a non-null serDe");
|
|
throw new HiveException("must specify a non-null serDe");
|
|
}
|
|
}
|
|
- if (null == this.getInputFormatClass()) {
|
|
|
|
|
|
+ if (null == getInputFormatClass()) {
|
|
throw new HiveException("must specify an InputFormat class");
|
|
throw new HiveException("must specify an InputFormat class");
|
|
}
|
|
}
|
|
- if (null == this.getOutputFormatClass()) {
|
|
|
|
|
|
+ if (null == getOutputFormatClass()) {
|
|
throw new HiveException("must specify an OutputFormat class");
|
|
throw new HiveException("must specify an OutputFormat class");
|
|
}
|
|
}
|
|
return;
|
|
return;
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
- * @param inputFormatClass the inputFormatClass to set
|
|
|
|
|
|
+ * @param inputFormatClass
|
|
*/
|
|
*/
|
|
public void setInputFormatClass(Class<? extends InputFormat> inputFormatClass) {
|
|
public void setInputFormatClass(Class<? extends InputFormat> inputFormatClass) {
|
|
this.inputFormatClass = inputFormatClass;
|
|
this.inputFormatClass = inputFormatClass;
|
|
@@ -157,7 +166,7 @@ public class Table {
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
- * @param outputFormatClass the outputFormatClass to set
|
|
|
|
|
|
+ * @param outputFormatClass
|
|
*/
|
|
*/
|
|
public void setOutputFormatClass(Class<? extends OutputFormat> outputFormatClass) {
|
|
public void setOutputFormatClass(Class<? extends OutputFormat> outputFormatClass) {
|
|
this.outputFormatClass = outputFormatClass;
|
|
this.outputFormatClass = outputFormatClass;
|
|
@@ -165,37 +174,37 @@ public class Table {
|
|
}
|
|
}
|
|
|
|
|
|
final public Properties getSchema() {
|
|
final public Properties getSchema() {
|
|
- return this.schema;
|
|
|
|
|
|
+ return schema;
|
|
}
|
|
}
|
|
|
|
|
|
final public Path getPath() {
|
|
final public Path getPath() {
|
|
- return new Path(this.getTTable().getSd().getLocation());
|
|
|
|
|
|
+ return new Path(getTTable().getSd().getLocation());
|
|
}
|
|
}
|
|
|
|
|
|
final public String getName() {
|
|
final public String getName() {
|
|
- return this.getTTable().getTableName();
|
|
|
|
|
|
+ return getTTable().getTableName();
|
|
}
|
|
}
|
|
|
|
|
|
final public URI getDataLocation() {
|
|
final public URI getDataLocation() {
|
|
- return this.uri;
|
|
|
|
|
|
+ return uri;
|
|
}
|
|
}
|
|
|
|
|
|
- final public SerDe getSerDe() {
|
|
|
|
- return this.serDe;
|
|
|
|
|
|
+ final public Deserializer getDeserializer() {
|
|
|
|
+ return deserializer;
|
|
}
|
|
}
|
|
|
|
|
|
final public Class<? extends InputFormat> getInputFormatClass() {
|
|
final public Class<? extends InputFormat> getInputFormatClass() {
|
|
- return this.inputFormatClass;
|
|
|
|
|
|
+ return inputFormatClass;
|
|
}
|
|
}
|
|
|
|
|
|
final public Class<? extends OutputFormat> getOutputFormatClass() {
|
|
final public Class<? extends OutputFormat> getOutputFormatClass() {
|
|
- return this.outputFormatClass;
|
|
|
|
|
|
+ return outputFormatClass;
|
|
}
|
|
}
|
|
|
|
|
|
final public boolean isValidSpec(AbstractMap<String, String> spec) throws HiveException {
|
|
final public boolean isValidSpec(AbstractMap<String, String> spec) throws HiveException {
|
|
|
|
|
|
// TODO - types need to be checked.
|
|
// TODO - types need to be checked.
|
|
- List<FieldSchema> partCols = this.getTTable().getPartitionKeys();
|
|
|
|
|
|
+ List<FieldSchema> partCols = getTTable().getPartitionKeys();
|
|
if(partCols== null || (partCols.size() == 0)) {
|
|
if(partCols== null || (partCols.size() == 0)) {
|
|
if (spec != null)
|
|
if (spec != null)
|
|
throw new HiveException("table is not partitioned but partition spec exists: " + spec);
|
|
throw new HiveException("table is not partitioned but partition spec exists: " + spec);
|
|
@@ -217,7 +226,7 @@ public class Table {
|
|
}
|
|
}
|
|
|
|
|
|
public void setProperty(String name, String value) {
|
|
public void setProperty(String name, String value) {
|
|
- this.getTTable().getParameters().put(name, value);
|
|
|
|
|
|
+ getTTable().getParameters().put(name, value);
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
@@ -225,36 +234,31 @@ public class Table {
|
|
*
|
|
*
|
|
*/
|
|
*/
|
|
public String getProperty(String name) {
|
|
public String getProperty(String name) {
|
|
- return this.getTTable().getParameters().get(name);
|
|
|
|
|
|
+ return getTTable().getParameters().get(name);
|
|
}
|
|
}
|
|
|
|
|
|
- public Vector<SerDeField> getFields(String [] components) {
|
|
|
|
|
|
+ public Vector<StructField> getFields() {
|
|
|
|
|
|
-
|
|
|
|
- Vector<SerDeField> fields = new Vector<SerDeField> ();
|
|
|
|
|
|
+ Vector<StructField> fields = new Vector<StructField> ();
|
|
try {
|
|
try {
|
|
- SerDe decoder = getSerDe();
|
|
|
|
- if (components == null || components.length == 0) {
|
|
|
|
- // Expand out all the columns of the table
|
|
|
|
- List<SerDeField> fld_lst = decoder.getFields(null);
|
|
|
|
- for(SerDeField field: fld_lst) {
|
|
|
|
- fields.add(field);
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- else {
|
|
|
|
- for (int i = 0; i < components.length; i++) {
|
|
|
|
- fields.add(decoder.getFieldFromExpression(null, components[i]));
|
|
|
|
- }
|
|
|
|
|
|
+ Deserializer decoder = getDeserializer();
|
|
|
|
+
|
|
|
|
+ // Expand out all the columns of the table
|
|
|
|
+ StructObjectInspector structObjectInspector = (StructObjectInspector)decoder.getObjectInspector();
|
|
|
|
+ List<? extends StructField> fld_lst = structObjectInspector.getAllStructFieldRefs();
|
|
|
|
+ for(StructField field: fld_lst) {
|
|
|
|
+ fields.add(field);
|
|
}
|
|
}
|
|
- } catch (Exception e) {
|
|
|
|
|
|
+ } catch (SerDeException e) {
|
|
throw new RuntimeException(e);
|
|
throw new RuntimeException(e);
|
|
}
|
|
}
|
|
return fields;
|
|
return fields;
|
|
}
|
|
}
|
|
|
|
|
|
- public SerDeField getField(String fld) {
|
|
|
|
|
|
+ public StructField getField(String fld) {
|
|
try {
|
|
try {
|
|
- return getSerDe().getFieldFromExpression(null, fld);
|
|
|
|
|
|
+ StructObjectInspector structObjectInspector = (StructObjectInspector)getDeserializer().getObjectInspector();
|
|
|
|
+ return structObjectInspector.getStructFieldRef(fld);
|
|
}
|
|
}
|
|
catch (Exception e) {
|
|
catch (Exception e) {
|
|
throw new RuntimeException(e);
|
|
throw new RuntimeException(e);
|
|
@@ -271,25 +275,25 @@ public class Table {
|
|
/**
|
|
/**
|
|
* @param serDe the serDe to set
|
|
* @param serDe the serDe to set
|
|
*/
|
|
*/
|
|
- public void setSerDe(SerDe serDe) {
|
|
|
|
- this.serDe = serDe;
|
|
|
|
|
|
+ public void setDeserializer(Deserializer deserializer) {
|
|
|
|
+ this.deserializer = deserializer;
|
|
}
|
|
}
|
|
|
|
|
|
public String toString() {
|
|
public String toString() {
|
|
- return this.getTTable().getTableName();
|
|
|
|
|
|
+ return getTTable().getTableName();
|
|
}
|
|
}
|
|
|
|
|
|
public List<FieldSchema> getPartCols() {
|
|
public List<FieldSchema> getPartCols() {
|
|
- List<FieldSchema> partKeys = this.getTTable().getPartitionKeys();
|
|
|
|
|
|
+ List<FieldSchema> partKeys = getTTable().getPartitionKeys();
|
|
if(partKeys == null) {
|
|
if(partKeys == null) {
|
|
partKeys = new ArrayList<FieldSchema>();
|
|
partKeys = new ArrayList<FieldSchema>();
|
|
- this.getTTable().setPartitionKeys(partKeys);
|
|
|
|
|
|
+ getTTable().setPartitionKeys(partKeys);
|
|
}
|
|
}
|
|
return partKeys;
|
|
return partKeys;
|
|
}
|
|
}
|
|
|
|
|
|
public boolean isPartitionKey(String colName) {
|
|
public boolean isPartitionKey(String colName) {
|
|
- for (FieldSchema key : this.getPartCols()) {
|
|
|
|
|
|
+ for (FieldSchema key : getPartCols()) {
|
|
if(key.getName().toLowerCase().equals(colName)) {
|
|
if(key.getName().toLowerCase().equals(colName)) {
|
|
return true;
|
|
return true;
|
|
}
|
|
}
|
|
@@ -299,7 +303,7 @@ public class Table {
|
|
|
|
|
|
//TODO merge this with getBucketCols function
|
|
//TODO merge this with getBucketCols function
|
|
public String getBucketingDimensionId() {
|
|
public String getBucketingDimensionId() {
|
|
- List<String> bcols = this.getTTable().getSd().getBucketCols();
|
|
|
|
|
|
+ List<String> bcols = getTTable().getSd().getBucketCols();
|
|
if(bcols == null || bcols.size() == 0) {
|
|
if(bcols == null || bcols.size() == 0) {
|
|
return null;
|
|
return null;
|
|
}
|
|
}
|
|
@@ -311,14 +315,6 @@ public class Table {
|
|
return bcols.get(0);
|
|
return bcols.get(0);
|
|
}
|
|
}
|
|
|
|
|
|
- public String getSerializationFormat() {
|
|
|
|
- return this.getTTable().getSd().getSerdeInfo().getSerializationFormat();
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public void setSerializationFormat(String f) {
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setSerializationFormat(f);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
/**
|
|
/**
|
|
* @return the tTable
|
|
* @return the tTable
|
|
*/
|
|
*/
|
|
@@ -334,16 +330,8 @@ public class Table {
|
|
}
|
|
}
|
|
|
|
|
|
public void setDataLocation(URI uri2) {
|
|
public void setDataLocation(URI uri2) {
|
|
- this.uri = uri2;
|
|
|
|
- this.getTTable().getSd().setLocation(uri2.toString());
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public void setSerializationClass(String cls) {
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setSerializationClass(cls);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public void setSerializationLib(String lib) {
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setSerializationLib(lib);
|
|
|
|
|
|
+ uri = uri2;
|
|
|
|
+ getTTable().getSd().setLocation(uri2.toString());
|
|
}
|
|
}
|
|
|
|
|
|
public void setBucketCols(List<String> bucketCols) throws HiveException {
|
|
public void setBucketCols(List<String> bucketCols) throws HiveException {
|
|
@@ -352,22 +340,18 @@ public class Table {
|
|
}
|
|
}
|
|
|
|
|
|
for (String col : bucketCols) {
|
|
for (String col : bucketCols) {
|
|
- if(!this.isField(col))
|
|
|
|
|
|
+ if(!isField(col))
|
|
throw new HiveException("Bucket columns " + col + " is not part of the table columns" );
|
|
throw new HiveException("Bucket columns " + col + " is not part of the table columns" );
|
|
}
|
|
}
|
|
- this.getTTable().getSd().setBucketCols(bucketCols);
|
|
|
|
|
|
+ getTTable().getSd().setBucketCols(bucketCols);
|
|
}
|
|
}
|
|
|
|
|
|
- public void setSortCols(List<String> sortCols) throws HiveException {
|
|
|
|
- List<Order> sortOrder = new ArrayList<Order>();
|
|
|
|
- for (String col : sortCols) {
|
|
|
|
- sortOrder.add(new Order(col, 1));
|
|
|
|
- }
|
|
|
|
- this.getTTable().getSd().setSortCols(sortOrder);
|
|
|
|
|
|
+ public void setSortCols(List<Order> sortOrder) throws HiveException {
|
|
|
|
+ getTTable().getSd().setSortCols(sortOrder);
|
|
}
|
|
}
|
|
|
|
|
|
private boolean isField(String col) {
|
|
private boolean isField(String col) {
|
|
- for (FieldSchema field : this.getCols()) {
|
|
|
|
|
|
+ for (FieldSchema field : getCols()) {
|
|
if(field.getName().equals(col)) {
|
|
if(field.getName().equals(col)) {
|
|
return true;
|
|
return true;
|
|
}
|
|
}
|
|
@@ -376,31 +360,30 @@ public class Table {
|
|
}
|
|
}
|
|
|
|
|
|
public List<FieldSchema> getCols() {
|
|
public List<FieldSchema> getCols() {
|
|
- return this.getTTable().getSd().getCols();
|
|
|
|
|
|
+ return getTTable().getSd().getCols();
|
|
}
|
|
}
|
|
|
|
|
|
public void setPartCols(List<FieldSchema> partCols) {
|
|
public void setPartCols(List<FieldSchema> partCols) {
|
|
- this.getTTable().setPartitionKeys(partCols);
|
|
|
|
|
|
+ getTTable().setPartitionKeys(partCols);
|
|
}
|
|
}
|
|
|
|
|
|
public String getDbName() {
|
|
public String getDbName() {
|
|
- return this.getTTable().getDatabase();
|
|
|
|
|
|
+ return getTTable().getDbName();
|
|
}
|
|
}
|
|
|
|
|
|
public int getNumBuckets() {
|
|
public int getNumBuckets() {
|
|
- return this.getTTable().getSd().getNumBuckets();
|
|
|
|
|
|
+ return getTTable().getSd().getNumBuckets();
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
- * Replaces files in the partition with new data set specifed by srcf. Works by moving files
|
|
|
|
- *
|
|
|
|
- * @param srcf Files to be moved. Leaf Directories or Globbed File Paths
|
|
|
|
|
|
+ * Replaces files in the partition with new data set specified by srcf. Works by moving files
|
|
|
|
+ * @param srcf Files to be replaced. Leaf directories or globbed file paths
|
|
*/
|
|
*/
|
|
protected void replaceFiles(Path srcf) throws HiveException {
|
|
protected void replaceFiles(Path srcf) throws HiveException {
|
|
FileSystem fs;
|
|
FileSystem fs;
|
|
try {
|
|
try {
|
|
- fs = FileSystem.get(this.getDataLocation(), Hive.get().getConf());
|
|
|
|
- Hive.get().replaceFiles(srcf, new Path(this.getDataLocation().getPath()), fs);
|
|
|
|
|
|
+ fs = FileSystem.get(getDataLocation(), Hive.get().getConf());
|
|
|
|
+ Hive.get().replaceFiles(srcf, new Path(getDataLocation().getPath()), fs);
|
|
} catch (IOException e) {
|
|
} catch (IOException e) {
|
|
throw new HiveException("addFiles: filesystem error in check phase", e);
|
|
throw new HiveException("addFiles: filesystem error in check phase", e);
|
|
}
|
|
}
|
|
@@ -408,14 +391,13 @@ public class Table {
|
|
|
|
|
|
/**
|
|
/**
|
|
* Inserts files specified into the partition. Works by moving files
|
|
* Inserts files specified into the partition. Works by moving files
|
|
- *
|
|
|
|
- * @param srcf Files to be moved. Leaf Directories or Globbed File Paths
|
|
|
|
|
|
+ * @param srcf Files to be moved. Leaf directories or globbed file paths
|
|
*/
|
|
*/
|
|
protected void copyFiles(Path srcf) throws HiveException {
|
|
protected void copyFiles(Path srcf) throws HiveException {
|
|
FileSystem fs;
|
|
FileSystem fs;
|
|
try {
|
|
try {
|
|
- fs = FileSystem.get(this.getDataLocation(), Hive.get().getConf());
|
|
|
|
- Hive.get().copyFiles(srcf, new Path(this.getDataLocation().getPath()), fs);
|
|
|
|
|
|
+ fs = FileSystem.get(getDataLocation(), Hive.get().getConf());
|
|
|
|
+ Hive.get().copyFiles(srcf, new Path(getDataLocation().getPath()), fs);
|
|
} catch (IOException e) {
|
|
} catch (IOException e) {
|
|
throw new HiveException("addFiles: filesystem error in check phase", e);
|
|
throw new HiveException("addFiles: filesystem error in check phase", e);
|
|
}
|
|
}
|
|
@@ -423,7 +405,7 @@ public class Table {
|
|
|
|
|
|
public void setInputFormatClass(String name) throws HiveException {
|
|
public void setInputFormatClass(String name) throws HiveException {
|
|
try {
|
|
try {
|
|
- this.setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>)Class.forName(name));
|
|
|
|
|
|
+ setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>)Class.forName(name));
|
|
} catch (ClassNotFoundException e) {
|
|
} catch (ClassNotFoundException e) {
|
|
throw new HiveException("Class not found: " + name, e);
|
|
throw new HiveException("Class not found: " + name, e);
|
|
}
|
|
}
|
|
@@ -431,7 +413,7 @@ public class Table {
|
|
|
|
|
|
public void setOutputFormatClass(String name) throws HiveException {
|
|
public void setOutputFormatClass(String name) throws HiveException {
|
|
try {
|
|
try {
|
|
- this.setOutputFormatClass((Class<? extends OutputFormat<WritableComparable, Writable>>)Class.forName(name));
|
|
|
|
|
|
+ setOutputFormatClass((Class<? extends OutputFormat<WritableComparable, Writable>>)Class.forName(name));
|
|
} catch (ClassNotFoundException e) {
|
|
} catch (ClassNotFoundException e) {
|
|
throw new HiveException("Class not found: " + name, e);
|
|
throw new HiveException("Class not found: " + name, e);
|
|
}
|
|
}
|
|
@@ -439,18 +421,18 @@ public class Table {
|
|
|
|
|
|
|
|
|
|
public boolean isPartitioned() {
|
|
public boolean isPartitioned() {
|
|
- if(this.getPartCols() == null) {
|
|
|
|
|
|
+ if(getPartCols() == null) {
|
|
return false;
|
|
return false;
|
|
}
|
|
}
|
|
- return (this.getPartCols().size() != 0);
|
|
|
|
|
|
+ return (getPartCols().size() != 0);
|
|
}
|
|
}
|
|
|
|
|
|
public void setFields(List<FieldSchema> fields) {
|
|
public void setFields(List<FieldSchema> fields) {
|
|
- this.getTTable().getSd().setCols(fields);
|
|
|
|
|
|
+ getTTable().getSd().setCols(fields);
|
|
}
|
|
}
|
|
|
|
|
|
public void setNumBuckets(int nb) {
|
|
public void setNumBuckets(int nb) {
|
|
- this.getTTable().getSd().setNumBuckets(nb);
|
|
|
|
|
|
+ getTTable().getSd().setNumBuckets(nb);
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
@@ -493,56 +475,32 @@ public class Table {
|
|
tTable.setRetention(retention);
|
|
tTable.setRetention(retention);
|
|
}
|
|
}
|
|
|
|
|
|
- public String getSerializationLib() {
|
|
|
|
- return this.getTTable().getSd().getSerdeInfo().getSerializationLib();
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public String getSerializationClass() {
|
|
|
|
- return this.getTTable().getSd().getSerdeInfo().getSerializationClass();
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public void setIsCompressed(boolean b) {
|
|
|
|
- this.getTTable().getSd().setIsCompressed(b);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public void setFieldDelim(String string) {
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setFieldDelim(string);
|
|
|
|
|
|
+ private SerDeInfo getSerdeInfo() {
|
|
|
|
+ return getTTable().getSd().getSerdeInfo();
|
|
}
|
|
}
|
|
|
|
|
|
- public void setCollectionItemDelim(String string) {
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setCollectionItemDelim(string);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public void setLineDelim(String string) {
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setLineDelim(string);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public void setMapKeyDelim(String string) {
|
|
|
|
- this.getTTable().getSd().getSerdeInfo().setMapKeyDelim(string);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- public String getFieldDelim() {
|
|
|
|
- return this.getTTable().getSd().getSerdeInfo().getFieldDelim();
|
|
|
|
|
|
+ public void setSerializationLib(String lib) {
|
|
|
|
+ getSerdeInfo().setSerializationLib(lib);
|
|
}
|
|
}
|
|
|
|
|
|
- public String getCollectionItemDelim() {
|
|
|
|
- return this.getTTable().getSd().getSerdeInfo().getCollectionItemDelim();
|
|
|
|
|
|
+ public String getSerializationLib() {
|
|
|
|
+ return getSerdeInfo().getSerializationLib();
|
|
}
|
|
}
|
|
|
|
|
|
- public String getLineDelim() {
|
|
|
|
- return this.getTTable().getSd().getSerdeInfo().getLineDelim();
|
|
|
|
|
|
+ public String getSerdeParam(String param) {
|
|
|
|
+ return getSerdeInfo().getParameters().get(param);
|
|
}
|
|
}
|
|
|
|
|
|
- public String getMapKeyDelim() {
|
|
|
|
- return this.getTTable().getSd().getSerdeInfo().getMapKeyDelim();
|
|
|
|
|
|
+ public String setSerdeParam(String param, String value) {
|
|
|
|
+ return getSerdeInfo().getParameters().put(param, value);
|
|
}
|
|
}
|
|
|
|
|
|
public List<String> getBucketCols() {
|
|
public List<String> getBucketCols() {
|
|
- return this.getTTable().getSd().getBucketCols();
|
|
|
|
|
|
+ return getTTable().getSd().getBucketCols();
|
|
}
|
|
}
|
|
|
|
|
|
public List<Order> getSortCols() {
|
|
public List<Order> getSortCols() {
|
|
- return this.getTTable().getSd().getSortCols();
|
|
|
|
|
|
+ return getTTable().getSd().getSortCols();
|
|
}
|
|
}
|
|
|
|
|
|
private static void getPartPaths(FileSystem fs, Path p, Vector<String> partPaths) throws IOException {
|
|
private static void getPartPaths(FileSystem fs, Path p, Vector<String> partPaths) throws IOException {
|
|
@@ -572,14 +530,14 @@ public class Table {
|
|
}
|
|
}
|
|
|
|
|
|
static final Pattern pat = Pattern.compile("([^/]+)=([^/]+)");
|
|
static final Pattern pat = Pattern.compile("([^/]+)=([^/]+)");
|
|
- protected List<Partition> getPartitionsFromHDFS() throws HiveException {
|
|
|
|
|
|
+ public List<Partition> getPartitionsFromHDFS() throws HiveException {
|
|
ArrayList<Partition> ret = new ArrayList<Partition> ();
|
|
ArrayList<Partition> ret = new ArrayList<Partition> ();
|
|
FileSystem fs = null;
|
|
FileSystem fs = null;
|
|
Vector<String> partPaths = new Vector<String>();
|
|
Vector<String> partPaths = new Vector<String>();
|
|
|
|
|
|
try {
|
|
try {
|
|
- fs = FileSystem.get(this.getDataLocation(), Hive.get().getConf());
|
|
|
|
- getPartPaths(fs, new Path(this.getDataLocation().getPath()), partPaths);
|
|
|
|
|
|
+ fs = FileSystem.get(getDataLocation(), Hive.get().getConf());
|
|
|
|
+ getPartPaths(fs, new Path(getDataLocation().getPath()), partPaths);
|
|
for(String partPath: partPaths) {
|
|
for(String partPath: partPaths) {
|
|
Path tmpPath = new Path(partPath);
|
|
Path tmpPath = new Path(partPath);
|
|
if(!fs.getFileStatus(tmpPath).isDir()) {
|
|
if(!fs.getFileStatus(tmpPath).isDir()) {
|
|
@@ -589,11 +547,10 @@ public class Table {
|
|
}
|
|
}
|
|
} catch (IOException e) {
|
|
} catch (IOException e) {
|
|
LOG.error(StringUtils.stringifyException(e));
|
|
LOG.error(StringUtils.stringifyException(e));
|
|
- throw new HiveException("DB Error: Table " + this.getDataLocation() + " message: " + e.getMessage());
|
|
|
|
|
|
+ throw new HiveException("DB Error: Table " + getDataLocation() + " message: " + e.getMessage());
|
|
}
|
|
}
|
|
|
|
|
|
return ret;
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
|
|
-
|
|
|
|
};
|
|
};
|