forked from apache/hive
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build.properties
169 lines (142 loc) · 9.66 KB
/
build.properties
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Name=Hive
name=hive
version=0.13.0-SNAPSHOT
hcatalog.version=${version}
year=2012
javac.debug=on
javac.optimize=on
javac.deprecation=off
javac.args=
javac.args.warnings=
hadoop-0.20.version=0.20.2
hadoop-0.20S.version=1.1.2
hadoop-0.23.version=2.0.5-alpha
hadoop.version=${hadoop-0.20.version}
hadoop.security.version=${hadoop-0.20S.version}
# Used to determine which set of Hadoop artifacts we depend on.
# - 20: hadoop-core, hadoop-test
# - 23: hadoop-common, hadoop-mapreduce-*, etc
hadoop.mr.rev=20S
build.dir.hive=${hive.root}/build
build.dir.hadoop=${build.dir.hive}/hadoopcore
hadoop.version.ant-internal=${hadoop.version}
hadoop.root.default=${build.dir.hadoop}/hadoop-${hadoop.version.ant-internal}
hadoop.root=${hadoop.root.default}
test.hadoop.bin.path=${hive.root}/testutils/hadoop
java.net.preferIPv4Stack=true
# Newer versions of Hadoop name the jar as hadoop-{core,test}-VERSION instead of hadoop-VERSION-{core,test}
# We will add both styles to the classpath and it will pick up whichever is there
hadoop.oldstyle-name.jar=${hadoop.root}/hadoop-${hadoop.version.ant-internal}-core.jar
hadoop.oldstyle-name.tools.jar=${hadoop.root}/hadoop-${hadoop.version.ant-internal}-tools.jar
hadoop.oldstyle-name.test.jar=${hadoop.root}/hadoop-${hadoop.version.ant-internal}-test.jar
hadoop.newstyle-name.jar=${hadoop.root}/hadoop-core-${hadoop.version.ant-internal}.jar
hadoop.newstyle-name.test.jar=${hadoop.root}/hadoop-test-${hadoop.version.ant-internal}.jar
hadoop.newstyle-name.tools.jar=${hadoop.root}/hadoop-tools-${hadoop.version.ant-internal}.jar
# The following are used for versions of Hadoop that are broken into separate jars
# They are ignored if not present
hadoop.common.jar=${hadoop.root}/share/hadoop/common/hadoop-common-${hadoop.version.ant-internal}.jar
hadoop.common.test.jar=${hadoop.root}/share/hadoop/common/hadoop-common-${hadoop.version.ant-internal}-tests.jar
hadoop.hdfs.jar=${hadoop.root}/share/hadoop/hdfs/hadoop-hdfs-${hadoop.version.ant-internal}.jar
hadoop.hdfs.test.jar=${hadoop.root}/share/hadoop/hdfs/hadoop-hdfs-${hadoop.version.ant-internal}-tests.jar
hadoop.mapreduce.jar=${hadoop.root}/modules/hadoop-mapreduce-client-core-${hadoop.version.ant-internal}.jar
hadoop.mapreduce.test.jar=${hadoop.root}/hadoop-mapreduce-test-${hadoop.version.ant-internal}.jar
hadoop.mapreduce.tools.jar=${hadoop.root}/hadoop-mapreduce-tools-${hadoop.version.ant-internal}.jar
jetty.test.jar=${hadoop.root}/lib/jetty-5.1.4.jar
servlet.test.jar=${hadoop.root}/lib/servlet-api.jar
jasper.test.jar=${hadoop.root}/lib/jetty-ext/jasper-runtime.jar
jasperc.test.jar=${hadoop.root}/lib/jetty-ext/jasper-compiler.jar
jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar
common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
# module names needed for build process
# full profile
iterate.hive.full.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
iterate.hive.full.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
iterate.hive.full.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service,hcatalog
iterate.hive.full.thrift=ql,service,metastore,serde
iterate.hive.full.protobuf=ql
iterate.hive.full.cpp=odbc
# no hcatalog profile
iterate.hive.nohcat.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils
iterate.hive.nohcat.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils
iterate.hive.nohcat.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service
iterate.hive.nohcat.thrift=ql,service,metastore,serde
iterate.hive.nohcat.protobuf=ql
iterate.hive.nohcat.cpp=odbc
# core profile
iterate.hive.core.all=ant,shims,common,serde,metastore,ql,cli
iterate.hive.core.modules=shims,common,serde,metastore,ql,cli
iterate.hive.core.tests=ql
iterate.hive.core.thrift=ql
iterate.hive.core.protobuf=ql
iterate.hive.core.cpp=
#
# Test Properties
#
# Cancel the individual tests if they don't finish in the given time
# (measured in milliseconds). Ignored if fork is disabled. When running
# multiple tests inside the same Java VM (see forkMode), timeout
# applies to the time that all tests use together, not to an individual test.
test.junit.timeout=86400000
# Use this property to selectively disable tests from the command line:
# ant test -Dtest.junit.exclude="**/TestCliDriver.class"
# ant test -Dtest.junit.exclude="**/Test*CliDriver.class,**/TestPartitions.class"
test.junit.exclude=
test.continue.on.failure=true
test.submodule.exclude=
test.junit.maxmemory=512m
test.concurrency.num.threads=1
#test.beelinepositive.exclude=add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q
#
# Ivy Properties
#
build.ivy.dir=${build.dir.hive}/ivy
build.ivy.lib.dir=${build.ivy.dir}/lib
build.ivy.report.dir=${build.ivy.dir}/report
build.ivy.maven.dir=${build.ivy.dir}/maven
ivy.conf.dir=${hive.root}/ivy
ivy.version=2.3.0
ivy.jar=${build.ivy.lib.dir}/ivy-${ivy.version}.jar
ivy.changingPattern=.*SNAPSHOT
ivy.publish.pattern=[artifact]-[revision].[ext]
ivy.artifact.retrieve.pattern=[conf]/[artifact]-[revision](-[classifier]).[ext]
ivysettings.xml=${ivy.conf.dir}/ivysettings.xml
ivyresolvelog=default
ivy.mvn.repo=http://repo2.maven.org/maven2
ivy_repo_url=${ivy.mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar
hive.ivy.org=org.apache.hive
mvn.publish.repo=snapshots
mvn.jar.dir=${build.dir.hive}/maven/jars
mvn.pom.dir=${build.dir.hive}/maven/poms
mvn.license.dir=${build.dir.hive}/maven/licenses
mvn.deploy.id=apache.snapshots.https
mvn.deploy.url=https://repository.apache.org/content/repositories/snapshots
mvn.staging.repo.id=apache.staging.https
mvn.staging.repo.url=https://repository.apache.org/service/local/staging/deploy/maven2
#
# Data nucleus repository - needed for jdo2-api-2.3-ec.jar download
#
datanucleus.repo=http://www.datanucleus.org/downloads/maven2
# JVM arguments
jvm.args=-XX:-UseSplitVerifier
# junit jvm args
junit.jvm.args=-XX:-UseSplitVerifier -XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=128M
#
# Eclipse Properties
#
# JVM arguments for Eclipse launch configurations
eclipse.launch.jvm.args=-Xms256m -Xmx1024m ${jvm.args}