• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Java Constants类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Java中org.apache.hadoop.hive.serde.Constants的典型用法代码示例。如果您正苦于以下问题:Java Constants类的具体用法?Java Constants怎么用?Java Constants使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



Constants类属于org.apache.hadoop.hive.serde包,在下文中一共展示了Constants类的19个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。

示例1: getNumberObjectList

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
public Object getNumberObjectList(List<String> data, String hiveType) {
  List<Object> doubleValues = new ArrayList<Object>();
  if (data == null) {
    return null;
  }
  String hiveSubType;
  if (hiveType.equals(DerivedHiveTypeConstants.DOUBLE_ARRAY_TYPE_NAME)) {
    hiveSubType = Constants.DOUBLE_TYPE_NAME;
  } else {
    hiveSubType = Constants.BIGINT_TYPE_NAME;
  }
  for (String dataElement : data) {
    doubleValues.add(getNumberObject(dataElement, hiveSubType));
  }
  return doubleValues;
}
 
开发者ID:awslabs,项目名称:emr-dynamodb-connector,代码行数:17,代码来源:DynamoDBDataParser.java


示例2: initialize

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
@Override
public void initialize(final Configuration conf, final Properties tbl)
		throws SerDeException {
	log.debug("conf="+conf);
	log.debug("tblProperties="+tbl);
	final String facetType = tbl.getProperty(ConfigurationUtil.SOLR_FACET_MAPPING);
	final String columnString = tbl.getProperty(ConfigurationUtil.SOLR_COLUMN_MAPPING);
	if (StringUtils.isBlank(facetType)) {
		if (StringUtils.isBlank(columnString)) {
			throw new SerDeException("No facet mapping found, using "+ ConfigurationUtil.SOLR_COLUMN_MAPPING);
		}
		final String[] columnNamesArray = ConfigurationUtil.getAllColumns(columnString);
		colNames = Arrays.asList(columnNamesArray);
		log.debug(ConfigurationUtil.SOLR_COLUMN_MAPPING+" = " + colNames);
		row = new ArrayList<Object>(columnNamesArray.length);
	} else {
		row = new ArrayList<Object>(2);
		colNames = Arrays.asList(StringUtils.split(tbl.getProperty(Constants.LIST_COLUMNS),","));
	}
	
	colTypes = TypeInfoUtils.getTypeInfosFromTypeString(tbl.getProperty(Constants.LIST_COLUMN_TYPES));
	rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(colNames, colTypes);
	rowOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(rowTypeInfo);
	log.debug("colNames="+colNames+" rowIO="+rowOI);
}
 
开发者ID:vroyer,项目名称:hive-solr-search,代码行数:26,代码来源:SolrSerDe.java


示例3: readColumnarStruct

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
private ColumnarStruct readColumnarStruct(BytesRefArrayWritable buff, String schema) throws SerDeException {
     Pattern pcols = Pattern.compile("[a-zA-Z_0-9]*[ ]");
     List<String> types = HiveRCSchemaUtil.parseSchemaTypes(schema);
     List<String> cols = HiveRCSchemaUtil.parseSchema(pcols, schema);

     List<FieldSchema> fieldSchemaList = new ArrayList<FieldSchema>(
         cols.size());

     for (int i = 0; i < cols.size(); i++) {
         fieldSchemaList.add(new FieldSchema(cols.get(i), HiveRCSchemaUtil
             .findPigDataType(types.get(i))));
     }

     Properties props = new Properties();

     props.setProperty(Constants.LIST_COLUMNS,
         HiveRCSchemaUtil.listToString(cols));
     props.setProperty(Constants.LIST_COLUMN_TYPES,
         HiveRCSchemaUtil.listToString(types));

     Configuration hiveConf = new HiveConf(conf, SessionState.class);
     ColumnarSerDe serde = new ColumnarSerDe();
     serde.initialize(hiveConf, props);

     return (ColumnarStruct) serde.deserialize(buff);
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:27,代码来源:TestHiveColumnarStorage.java


示例4: parseOrCreateColumnMapping

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
 * Parse the column mappping from table properties. If
 * cassandra.columns.mapping is defined in the property, use it to create
 * the mapping. Otherwise, create the mapping from table columns using the
 * default mapping.
 *
 * @param tbl table properties
 * @return A list of column names
 * @throws org.apache.hadoop.hive.serde2.SerDeException
 *
 */
protected List<String> parseOrCreateColumnMapping(Properties tbl) throws SerDeException {
    String prop = tbl.getProperty(AbstractCassandraSerDe.CASSANDRA_COL_MAPPING);

    if (prop != null) {
        return parseColumnMapping(prop);
    } else {
        String tblColumnStr = tbl.getProperty(Constants.LIST_COLUMNS);

        if (tblColumnStr != null) {
            //auto-create
            String mappingStr = createColumnMappingString(tblColumnStr);

            if (LOG.isDebugEnabled()) {
                LOG.debug("table column string: " + tblColumnStr);
                LOG.debug("Auto-created mapping string: " + mappingStr);
            }

            return Arrays.asList(mappingStr.split(","));

        } else {
            throw new SerDeException("Can't find table column definitions");
        }
    }
}
 
开发者ID:2013Commons,项目名称:hive-cassandra,代码行数:36,代码来源:CqlSerDe.java


示例5: parseOrCreateColumnMapping

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
 * Parse the column mappping from table properties. If
 * cassandra.columns.mapping is defined in the property, use it to create
 * the mapping. Otherwise, create the mapping from table columns using the
 * default mapping.
 *
 * @param tbl table properties
 * @return A list of column names
 * @throws SerDeException
 */
protected List<String> parseOrCreateColumnMapping(Properties tbl) throws SerDeException {
    String prop = tbl.getProperty(AbstractCassandraSerDe.CASSANDRA_COL_MAPPING);

    if (prop != null) {
        return parseColumnMapping(prop);
    } else {
        String tblColumnStr = tbl.getProperty(Constants.LIST_COLUMNS);

        if (tblColumnStr != null) {
            //auto-create
            String mappingStr = createColumnMappingString(tblColumnStr);

            if (LOG.isDebugEnabled()) {
                LOG.debug("table column string: " + tblColumnStr);
                LOG.debug("Auto-created mapping string: " + mappingStr);
            }

            return Arrays.asList(mappingStr.split(","));

        } else {
            throw new SerDeException("Can't find table column definitions");
        }
    }
}
 
开发者ID:2013Commons,项目名称:hive-cassandra,代码行数:35,代码来源:CassandraColumnSerDe.java


示例6: getCassandraColumnFamily

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
 * Parse cassandra column family name from table properties.
 *
 * @param tbl table properties
 * @return cassandra column family name
 * @throws SerDeException error parsing column family name
 */
protected String getCassandraColumnFamily(Properties tbl) throws SerDeException {
  String result = tbl.getProperty(CASSANDRA_CF_NAME);

  if (result == null) {

    result = tbl
        .getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);

    if (result == null) {
      throw new SerDeException("CassandraColumnFamily not defined" + tbl.toString());
    }

    if (result.indexOf(".") != -1) {
      result = result.substring(result.indexOf(".") + 1);
    }
  }

  return result;
}
 
开发者ID:dvasilen,项目名称:Hive-Cassandra,代码行数:27,代码来源:AbstractColumnSerDe.java


示例7: parseOrCreateColumnMapping

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
 * Parse the column mappping from table properties. If cassandra.columns.mapping
 * is defined in the property, use it to create the mapping. Otherwise, create the mapping from table
 * columns using the default mapping.
 *
 * @param tbl table properties
 * @return A list of column names
 * @throws SerDeException
 */
protected List<String> parseOrCreateColumnMapping(Properties tbl) throws SerDeException {
  String prop = tbl.getProperty(CASSANDRA_COL_MAPPING);

  if (prop != null) {
    return parseColumnMapping(prop);
  } else {
    String tblColumnStr = tbl.getProperty(Constants.LIST_COLUMNS);

    if (tblColumnStr != null) {
      //auto-create
      String mappingStr = createColumnMappingString(tblColumnStr);

      if (LOG.isDebugEnabled()) {
        LOG.debug("table column string: " + tblColumnStr);
        LOG.debug("Auto-created mapping string: " + mappingStr);
      }

      return Arrays.asList(mappingStr.split(","));

    } else {
      throw new SerDeException("Can't find table column definitions");
    }
  }
}
 
开发者ID:dvasilen,项目名称:Hive-Cassandra,代码行数:34,代码来源:AbstractColumnSerDe.java


示例8: getNumberObject

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
public Object getNumberObject(String data, String hiveType) {
  if (data != null) {
    if (hiveType.equals(Constants.BIGINT_TYPE_NAME)) {
      return Long.parseLong(data);
    } else {
      return Double.parseDouble(data);
    }
  } else {
    return null;
  }
}
 
开发者ID:awslabs,项目名称:emr-dynamodb-connector,代码行数:12,代码来源:DynamoDBDataParser.java


示例9: initialize

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
@Override
public void initialize(Configuration conf, Properties tblProperties) throws SerDeException {
  colNames = Arrays.asList(tblProperties.getProperty(Constants.LIST_COLUMNS).split(","));
  colTypes = TypeInfoUtils.getTypeInfosFromTypeString(tblProperties.getProperty(Constants.LIST_COLUMN_TYPES));
  typeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(colNames, colTypes);
  inspector = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
  row = new ArrayList<>();
  enableFieldMapping = Boolean.valueOf(tblProperties.getProperty(ENABLE_FIELD_MAPPING, "false"));
}
 
开发者ID:lucidworks,项目名称:hive-solr,代码行数:10,代码来源:LWSerDe.java


示例10: initialize

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
@Override
public void initialize(Configuration conf, Properties tblProperties) throws SerDeException {

  colNames = Arrays.asList(tblProperties.getProperty(Constants.LIST_COLUMNS).split(","));
  colTypes = TypeInfoUtils
      .getTypeInfosFromTypeString(tblProperties.getProperty(Constants.LIST_COLUMN_TYPES));
  typeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(colNames, colTypes);
  inspector = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
  row = new ArrayList<>();
}
 
开发者ID:lucidworks,项目名称:hive-solr,代码行数:11,代码来源:LWSerDe.java


示例11: createTable

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
public void createTable() throws TException {
    HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveLocalMetaStore.getHiveConf());

    LOG.info("HIVE: Dropping hive table: " + propertyParser.getProperty(ConfigVars.HIVE_BOLT_TABLE_KEY));
    hiveClient.dropTable(propertyParser.getProperty(ConfigVars.HIVE_BOLT_DATABASE_KEY),
            propertyParser.getProperty(ConfigVars.HIVE_BOLT_TABLE_KEY),
            true, true);

    // Define the cols
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("id", Constants.INT_TYPE_NAME, ""));
    cols.add(new FieldSchema("msg", Constants.STRING_TYPE_NAME, ""));

    // Values for the StorageDescriptor
    String location = new File(propertyParser.getProperty(
            ConfigVars.HIVE_TEST_TABLE_LOCATION_KEY)).getAbsolutePath();
    String inputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat";
    String outputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat";
    int numBuckets = 16;
    Map<String,String> orcProps = new HashMap<String, String>();
    orcProps.put("orc.compress", "NONE");
    SerDeInfo serDeInfo = new SerDeInfo(OrcSerde.class.getSimpleName(), OrcSerde.class.getName(), orcProps);
    List<String> bucketCols = new ArrayList<String>();
    bucketCols.add("id");

    // Build the StorageDescriptor
    StorageDescriptor sd = new StorageDescriptor();
    sd.setCols(cols);
    sd.setLocation(location);
    sd.setInputFormat(inputFormat);
    sd.setOutputFormat(outputFormat);
    sd.setNumBuckets(numBuckets);
    sd.setSerdeInfo(serDeInfo);
    sd.setBucketCols(bucketCols);
    sd.setSortCols(new ArrayList<Order>());
    sd.setParameters(new HashMap<String, String>());

    // Define the table
    Table tbl = new Table();
    tbl.setDbName(propertyParser.getProperty(ConfigVars.HIVE_BOLT_DATABASE_KEY));
    tbl.setTableName(propertyParser.getProperty(ConfigVars.HIVE_BOLT_TABLE_KEY));
    tbl.setSd(sd);
    tbl.setOwner(System.getProperty("user.name"));
    tbl.setParameters(new HashMap<String, String>());
    tbl.setViewOriginalText("");
    tbl.setViewExpandedText("");
    tbl.setTableType(TableType.MANAGED_TABLE.name());
    List<FieldSchema> partitions = new ArrayList<FieldSchema>();
    partitions.add(new FieldSchema("dt", Constants.STRING_TYPE_NAME, ""));
    tbl.setPartitionKeys(partitions);

    // Create the table
    hiveClient.createTable(tbl);

    // Describe the table
    Table createdTable = hiveClient.getTable(propertyParser.getProperty(ConfigVars.HIVE_BOLT_DATABASE_KEY),
            propertyParser.getProperty(ConfigVars.HIVE_BOLT_TABLE_KEY));
    LOG.info("HIVE: Created Table: " + createdTable.toString());
}
 
开发者ID:sakserv,项目名称:storm-topology-examples,代码行数:60,代码来源:KafkaHiveHdfsTopologyTest.java


示例12: SolrReader

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
public SolrReader(JobConf conf, SolrSplit split) throws IOException {
	log.debug("jobConf=" + conf);

	List<Integer> readColIDs = getReadColumnIDs(conf);
	facetMapping = conf.get(ConfigurationUtil.SOLR_FACET_MAPPING);
	if (StringUtils.isBlank(facetMapping)) {
		String columnString = conf.get(ConfigurationUtil.SOLR_COLUMN_MAPPING);
		if (StringUtils.isBlank(columnString)) {
			throw new IOException("no column mapping found!");
		}
		solrColumns = ConfigurationUtil.getAllColumns(columnString);
		if (readColIDs.size() > solrColumns.length) {
			throw new IOException("read column count larger than that in column mapping string!");
		}
	} else {
		if (readColIDs.size() != 2) {
			throw new IOException("read column should be 2 with facet mapping");
		}
		solrColumns = conf.get(Constants.LIST_COLUMNS).split(",");
	}

	if (conf.get(Constants.LIST_COLUMNS) != null) {
		hiveColNames = Arrays.asList(StringUtils.split(conf.get(Constants.LIST_COLUMNS), ","));
	}

	if (conf.get(Constants.LIST_COLUMN_TYPES) != null) {
		hiveColTypes = TypeInfoUtils.getTypeInfosFromTypeString(conf.get(Constants.LIST_COLUMN_TYPES));
		for (TypeInfo ti : hiveColTypes) {
			rowOI.add(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(ti));
		}
	}

	this.split = split;
	SolrTable table = new SolrTable(conf);
	cursor = table.getCursor((int) split.getStart(), (int) split.getLength());
}
 
开发者ID:vroyer,项目名称:hive-solr-search,代码行数:37,代码来源:SolrReader.java


示例13: initialize

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
@Override
public void initialize(final Configuration conf, final Properties tbl) throws SerDeException {
    final String columnString = HiveCassandraUtils.getPropertyValue(tbl, HiveCassandraUtils.COLUMN_MAPPINGS, Constants.LIST_COLUMNS);
    if (!Strings.isNullOrEmpty(columnString))
        columnNames = Arrays.asList(columnString.split("[,:;]"));
    LOG.debug("column names in hive table: {}", columnNames);
    fieldCount = columnNames.size();

    String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES);
    if (!Strings.isNullOrEmpty(columnTypeProperty))
        columnTypesArray = Arrays.asList(columnTypeProperty.split("[,:;]"));
    LOG.debug("column types in hive table: {}", columnTypesArray);

    final List<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(fieldCount);
    for (int i = 0; i < fieldCount; i++) {
        if (HIVE_TYPE_INT.equalsIgnoreCase(columnTypesArray.get(i))) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
        } else if (Cql3SerDe.HIVE_TYPE_SMALLINT.equalsIgnoreCase(columnTypesArray.get(i))) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaShortObjectInspector);
        } else if (Cql3SerDe.HIVE_TYPE_TINYINT.equalsIgnoreCase(columnTypesArray.get(i))) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaByteObjectInspector);
        } else if (Cql3SerDe.HIVE_TYPE_BIGINT.equalsIgnoreCase(columnTypesArray.get(i))) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaLongObjectInspector);
        } else if (Cql3SerDe.HIVE_TYPE_BOOLEAN.equalsIgnoreCase(columnTypesArray.get(i))) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
        } else if (Cql3SerDe.HIVE_TYPE_FLOAT.equalsIgnoreCase(columnTypesArray.get(i))) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaFloatObjectInspector);
        } else if (Cql3SerDe.HIVE_TYPE_DOUBLE.equalsIgnoreCase(columnTypesArray.get(i))) {
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector);
        } else {
            // treat as string
            fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        }
    }
    objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, fieldOIs);
    row = new ArrayList<Object>(fieldCount);
}
 
开发者ID:kernel164,项目名称:hive-cassandra-dsc,代码行数:38,代码来源:Cql3SerDe.java


示例14: setup

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
    * Does the configuration setup and schema parsing and setup.
    *
    * @param table_schema
    *            String
    * @param columnsToRead
    *            String
    */
   private void setup(String table_schema) {

if (table_schema == null)
    throw new RuntimeException(
	    "The table schema must be defined as colname type, colname type.  All types are hive types");

// create basic configuration for hdfs and hive
conf = new Configuration();
hiveConf = new HiveConf(conf, SessionState.class);

// parse the table_schema string
List<String> types = HiveRCSchemaUtil.parseSchemaTypes(table_schema);
List<String> cols = HiveRCSchemaUtil.parseSchema(pcols, table_schema);

List<FieldSchema> fieldSchemaList = new ArrayList<FieldSchema>(
	cols.size());

for (int i = 0; i < cols.size(); i++) {
    fieldSchemaList.add(new FieldSchema(cols.get(i), HiveRCSchemaUtil
	    .findPigDataType(types.get(i))));
}

pigSchema = new ResourceSchema(new Schema(fieldSchemaList));

props = new Properties();

// setting table schema properties for ColumnarSerDe
// these properties are never changed by the columns to read filter,
// because the columnar serde needs to now the
// complete format of each record.
props.setProperty(Constants.LIST_COLUMNS,
	HiveRCSchemaUtil.listToString(cols));
props.setProperty(Constants.LIST_COLUMN_TYPES,
	HiveRCSchemaUtil.listToString(types));

   }
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:45,代码来源:HiveColumnarLoader.java


示例15: setup

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
    * Does the configuration setup and schema parsing and setup.
    * 
    * @param table_schema
    *            String
    * @param columnsToRead
    *            String
    */
   private void setup(String table_schema) {

if (table_schema == null)
    throw new RuntimeException(
	    "The table schema must be defined as colname type, colname type.  All types are hive types");

// create basic configuration for hdfs and hive
conf = new Configuration();
hiveConf = new HiveConf(conf, SessionState.class);

// parse the table_schema string
List<String> types = HiveRCSchemaUtil.parseSchemaTypes(table_schema);
List<String> cols = HiveRCSchemaUtil.parseSchema(pcols, table_schema);

List<FieldSchema> fieldSchemaList = new ArrayList<FieldSchema>(
	cols.size());

for (int i = 0; i < cols.size(); i++) {
    fieldSchemaList.add(new FieldSchema(cols.get(i), HiveRCSchemaUtil
	    .findPigDataType(types.get(i))));
}

pigSchema = new ResourceSchema(new Schema(fieldSchemaList));

props = new Properties();

// setting table schema properties for ColumnarSerDe
// these properties are never changed by the columns to read filter,
// because the columnar serde needs to now the
// complete format of each record.
props.setProperty(Constants.LIST_COLUMNS,
	HiveRCSchemaUtil.listToString(cols));
props.setProperty(Constants.LIST_COLUMN_TYPES,
	HiveRCSchemaUtil.listToString(types));

   }
 
开发者ID:kaituo,项目名称:sedge,代码行数:45,代码来源:HiveColumnarLoader.java


示例16: initCassandraSerDeParameters

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
 * Initialize the cassandra serialization and deserialization parameters
 * from table properties and configuration.
 *
 * @param job
 * @param tbl
 * @param serdeName
 * @throws org.apache.hadoop.hive.serde2.SerDeException
 *
 */
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
        throws SerDeException {
    cassandraKeyspace = parseCassandraKeyspace(tbl);
    cassandraColumnFamily = parseCassandraColumnFamily(tbl);
    cassandraColumnNames = parseOrCreateColumnMapping(tbl);

    cassandraColumnNamesText = new ArrayList<Text>();
    for (String columnName : cassandraColumnNames) {
        cassandraColumnNamesText.add(new Text(columnName));
    }

    serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);

    validatorType = parseOrCreateValidatorType(tbl);

    setTableMapping();

    if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
        throw new SerDeException(serdeName + ": columns has "
                + serdeParams.getColumnNames().size()
                + " elements while cassandra.columns.mapping has "
                + cassandraColumnNames.size() + " elements"
                + " (counting the key if implicit)");
    }

    // we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
    for (int i = 0; i < cassandraColumnNames.size(); i++) {
        String cassandraColName = cassandraColumnNames.get(i);
        if (cassandraColName.endsWith(":")) {
            TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
            if ((typeInfo.getCategory() != Category.MAP)
                    || (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
                    != Constants.STRING_TYPE_NAME)) {

                throw new SerDeException(
                        serdeName + ": Cassandra column family '"
                        + cassandraColName
                        + "' should be mapped to map<string,?> but is mapped to "
                        + typeInfo.getTypeName());
            }
        }
    }
}
 
开发者ID:2013Commons,项目名称:hive-cassandra,代码行数:55,代码来源:CqlSerDe.java


示例17: initCassandraSerDeParameters

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
 * Initialize the cassandra serialization and deserialization parameters
 * from table properties and configuration.
 *
 * @param job
 * @param tbl
 * @param serdeName
 * @throws SerDeException
 */
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
        throws SerDeException {
    cassandraKeyspace = parseCassandraKeyspace(tbl);
    cassandraColumnFamily = parseCassandraColumnFamily(tbl);
    cassandraColumnNames = parseOrCreateColumnMapping(tbl);

    cassandraColumnNamesBytes = new ArrayList<BytesWritable>();
    for (String columnName : cassandraColumnNames) {
        cassandraColumnNamesBytes.add(new BytesWritable(columnName.getBytes()));
    }

    iKey = cassandraColumnNames.indexOf(CassandraColumnSerDe.CASSANDRA_KEY_COLUMN);

    serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);

    validatorType = parseOrCreateValidatorType(tbl);

    setTableMapping();

    if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
        throw new SerDeException(serdeName + ": columns has "
                + serdeParams.getColumnNames().size()
                + " elements while cassandra.columns.mapping has "
                + cassandraColumnNames.size() + " elements"
                + " (counting the key if implicit)");
    }

    // we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
    for (int i = 0; i < cassandraColumnNames.size(); i++) {
        String cassandraColName = cassandraColumnNames.get(i);
        if (cassandraColName.endsWith(":")) {
            TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
            if ((typeInfo.getCategory() != Category.MAP)
                    || (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
                    != Constants.STRING_TYPE_NAME)) {

                throw new SerDeException(
                        serdeName + ": Cassandra column family '"
                        + cassandraColName
                        + "' should be mapped to map<string,?> but is mapped to "
                        + typeInfo.getTypeName());
            }
        }
    }
}
 
开发者ID:2013Commons,项目名称:hive-cassandra,代码行数:56,代码来源:CassandraColumnSerDe.java


示例18: initCassandraSerDeParameters

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
 * Initialize the cassandra serialization and deserialization parameters from table properties and configuration.
 *
 * @param job
 * @param tbl
 * @param serdeName
 * @throws SerDeException
 */
@Override
protected void initCassandraSerDeParameters(Configuration job, Properties tbl, String serdeName)
    throws SerDeException {
  cassandraColumnFamily = getCassandraColumnFamily(tbl);
  cassandraColumnNames = parseOrCreateColumnMapping(tbl);

  cassandraColumnNamesBytes = new ArrayList<BytesWritable>();
  for (String columnName : cassandraColumnNames) {
    cassandraColumnNamesBytes.add(new BytesWritable(columnName.getBytes()));
  }

  iKey = cassandraColumnNames.indexOf(AbstractColumnSerDe.CASSANDRA_KEY_COLUMN);

  serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);

  validatorType = parseOrCreateValidatorType(tbl);

  setTableMapping();

  if (cassandraColumnNames.size() != serdeParams.getColumnNames().size()) {
    throw new SerDeException(serdeName + ": columns has " +
        serdeParams.getColumnNames().size() +
        " elements while cassandra.columns.mapping has " +
        cassandraColumnNames.size() + " elements" +
        " (counting the key if implicit)");
  }

  // we just can make sure that "StandardColumn:" is mapped to MAP<String,?>
  for (int i = 0; i < cassandraColumnNames.size(); i++) {
    String cassandraColName = cassandraColumnNames.get(i);
    if (cassandraColName.endsWith(":")) {
      TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
      if ((typeInfo.getCategory() != Category.MAP) ||
          (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
              != Constants.STRING_TYPE_NAME)) {

        throw new SerDeException(
            serdeName + ": Cassandra column family '"
                + cassandraColName
                + "' should be mapped to map<string,?> but is mapped to "
                + typeInfo.getTypeName());
      }
    }
  }
}
 
开发者ID:dvasilen,项目名称:Hive-Cassandra,代码行数:54,代码来源:CassandraColumnSerDe.java


示例19: initialize

import org.apache.hadoop.hive.serde.Constants; //导入依赖的package包/类
/**
  * An initialization function used to gather information about the table.
  * Typically, a SerDe implementation will be interested in the list of
  * column names and their types. That information will be used to help
  * perform actual serialization and deserialization of data.
  */
 //@Override
 public void initialize(Configuration conf, Properties tbl) throws SerDeException {
 	if (Log.isDebugEnabled())
 		SpliceLogUtils.debug(Log, "initialize with conf=%s, tbl=%s",conf,tbl);
     // Get a list of the table's column names.
     tableName = tbl.getProperty(MRConstants.SPLICE_TABLE_NAME);
     String hbaseDir = null;
     if (conf != null) {
         hbaseDir = conf.get(HConstants.HBASE_DIR);
     }
     if (hbaseDir == null)
     	hbaseDir = System.getProperty(HConstants.HBASE_DIR);
     if (hbaseDir == null)
     	throw new SerDeException("hbase root directory not set, please include hbase.rootdir in config or via -D system property ...");
     if (conf != null) {
         conf.set(MRConstants.SPLICE_INPUT_TABLE_NAME, tableName);
         conf.set(MRConstants.SPLICE_JDBC_STR, tbl.getProperty(MRConstants.SPLICE_JDBC_STR));
         conf.set(HConstants.HBASE_DIR, hbaseDir);
         if (conf.get(HiveConf.ConfVars.POSTEXECHOOKS.varname) == null) {
             conf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "com.splicemachine.mrio.api.hive.PostExecHook");
         }
         if (conf.get(HiveConf.ConfVars.ONFAILUREHOOKS.varname) == null) {
             conf.set(HiveConf.ConfVars.ONFAILUREHOOKS.varname, "com.splicemachine.mrio.api.hive.FailureExecHook");
         }
     }

     if (sqlUtil == null)
         sqlUtil = SMSQLUtil.getInstance(tbl.getProperty(MRConstants.SPLICE_JDBC_STR));
     String colNamesStr = tbl.getProperty(Constants.LIST_COLUMNS);
     colNames.clear();
     for (String split: colNamesStr.split(","))
     	colNames.add(split.toUpperCase());
     String colTypesStr = tbl.getProperty(Constants.LIST_COLUMN_TYPES);
     colTypes = TypeInfoUtils.getTypeInfosFromTypeString(colTypesStr);
     objectCache = new ArrayList<Object>(colTypes.size());
     if (tableName != null) {
         tableName = tableName.trim().toUpperCase();
         try {
             if (!sqlUtil.checkTableExists(tableName))
             	throw new SerDeException(String.format("table %s does not exist...",tableName));
             if (conf != null) {
                 ScanSetBuilder tableScannerBuilder = sqlUtil.getTableScannerBuilder(tableName, colNames);
                 conf.set(MRConstants.SPLICE_SCAN_INFO, tableScannerBuilder.base64Encode());

               //  TableContext tableContext = sqlUtil.createTableContext(tableName, tableScannerBuilder);
               //  conf.set(MRConstants.SPLICE_TBLE_CONTEXT, tableContext.getTableContextBase64String());
             }
} catch (Exception e) {
	throw new SerDeException(e);
}
     } 
      
 	if (Log.isDebugEnabled())
 		SpliceLogUtils.debug(Log, "generating hive info colNames=%s, colTypes=%s",colNames,colTypes);

     
     rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(colNames, colTypes);
     rowOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(rowTypeInfo);
     //serdeParams = LazySimpleSerDe.initSerdeParams(conf, tbl, getClass().getName());
     Log.info("--------Finished initialize");
 }
 
开发者ID:splicemachine,项目名称:spliceengine,代码行数:68,代码来源:SMSerDe.java



注:本文中的org.apache.hadoop.hive.serde.Constants类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Java Main类代码示例发布时间:2022-05-21
下一篇:
Java KettleEnvironment类代码示例发布时间:2022-05-21
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap