Skip to content

Commit f3da34d

Browse files
author
Ning Zhang
committed
HIVE-1475. .gitignore files being placed in test warehouse directories causing build failure (Joydeep Sen Sarma via Ning Zhang)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hive/trunk@966909 13f79535-47bb-0310-9956-ffa450edef68
1 parent c4bfdd8 commit f3da34d

File tree

444 files changed

+2804
-8494
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

444 files changed

+2804
-8494
lines changed

Diff for: CHANGES.txt

+4
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,10 @@ Trunk - Unreleased
7979
HIVE-1464. improve test query performance
8080
(Joydeep Sen Sarma via Ning Zhang)
8181

82+
HIVE-1475. .gitignore files being placed in test warehouse directories
83+
causing build failure
84+
(Joydeep Sen Sarma via Ning Zhang)
85+
8286
Release 0.6.0 - Unreleased
8387

8488
INCOMPATIBLE CHANGES

Diff for: build-common.xml

+5-8
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858
<path id="test.classpath">
5959
<pathelement location="${test.build.classes}" />
6060
<pathelement location="" />
61-
<pathelement location="${test.data.dir}/conf"/>
61+
<pathelement location="${test.src.data.dir}/conf"/>
6262
<pathelement location="${hive.conf.dir}"/>
6363
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
6464
<path refid="classpath"/>
@@ -254,11 +254,8 @@
254254
<mkdir dir="${test.log.dir}/clientnegative"/>
255255
<mkdir dir="${test.log.dir}/positive"/>
256256
<mkdir dir="${test.log.dir}/negative"/>
257-
<copy todir="${test.data.dir}">
258-
<fileset dir="${test.src.data.dir}">
259-
<exclude name="**/.svn"/>
260-
</fileset>
261-
</copy>
257+
<mkdir dir="${test.data.dir}/warehouse"/>
258+
<mkdir dir="${test.data.dir}/metadb"/>
262259
</target>
263260

264261
<target name="setup"/>
@@ -407,11 +404,11 @@
407404
<jvmarg value="-Xdebug"/>
408405
<jvmarg value="-Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=y"/> -->
409406
<env key="HADOOP_HOME" value="${hadoop.root}"/>
410-
<env key="HADOOP_CLASSPATH" value="${test.data.dir}/conf"/>
407+
<env key="HADOOP_CLASSPATH" value="${test.src.data.dir}/conf"/>
411408
<env key="TZ" value="US/Pacific"/>
412409
<sysproperty key="test.output.overwrite" value="${overwrite}"/>
413410
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
414-
<sysproperty key="log4j.configuration" value="file://${test.data.dir}/conf/hive-log4j.properties"/>
411+
<sysproperty key="log4j.configuration" value="file://${test.src.data.dir}/conf/hive-log4j.properties"/>
415412
<sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
416413
<sysproperty key="hive.aux.jars.path" value="file://${test.build.dir}/test-udfs.jar"/>
417414
<sysproperty key="ql.test.query.clientpositive.dir" value="${ql.test.query.clientpositive.dir}"/>

Diff for: contrib/build.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
<pathelement location="${test.build.classes}" />
3434
<pathelement location="" />
3535
<pathelement location="${hadoop.conf.dir}"/>
36-
<pathelement location="${test.data.dir}/conf"/>
36+
<pathelement location="${test.src.data.dir}/conf"/>
3737
<pathelement location="${hive.conf.dir}"/>
3838
<pathelement location="${hive.root}/cli/lib/jline-0.9.94.jar"/>
3939
<pathelement location="${hadoop.test.jar}"/>

Diff for: data/warehouse/src/.gitignore

-1
Original file line numberDiff line numberDiff line change
@@ -1 +0,0 @@
1-
# Dummy file to make Git recognize this empty directory

Diff for: hbase-handler/build.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
<pathelement location="${test.build.classes}" />
3535
<pathelement location="" />
3636
<pathelement location="${hadoop.conf.dir}"/>
37-
<pathelement location="${test.data.dir}/conf"/>
37+
<pathelement location="${test.src.data.dir}/conf"/>
3838
<pathelement location="${hive.conf.dir}"/>
3939
<pathelement location="${hive.root}/cli/lib/jline-0.9.94.jar"/>
4040
<pathelement location="${hadoop.test.jar}"/>

Diff for: hwi/build.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
<path id="test.classpath">
3535
<pathelement location="${test.build.classes}" />
3636
<pathelement location="" />
37-
<pathelement location="${test.data.dir}/conf"/>
37+
<pathelement location="${test.src.data.dir}/conf"/>
3838
<pathelement location="${hive.conf.dir}"/>
3939
<!-- We are running unit tests like the one inside ql -->
4040
<pathelement location="${build.dir.hive}/ql/test/classes"/>

Diff for: jdbc/build.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
<path id="test.classpath">
3333
<pathelement location="${test.build.classes}" />
3434
<pathelement location="" />
35-
<pathelement location="${test.data.dir}/conf"/>
35+
<pathelement location="${test.src.data.dir}/conf"/>
3636
<pathelement location="${hive.conf.dir}"/>
3737
<fileset dir="${test.src.data.dir}" includes="files/*.jar"/>
3838
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>

Diff for: odbc/build.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
<path id="test.classpath">
3333
<pathelement location="${test.build.classes}" />
3434
<pathelement location="" />
35-
<pathelement location="${test.data.dir}/conf"/>
35+
<pathelement location="${test.src.data.dir}/conf"/>
3636
<pathelement location="${hive.conf.dir}"/>
3737
<fileset dir="${test.src.data.dir}" includes="files/*.jar"/>
3838
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>

Diff for: ql/build.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
<pathelement location="${test.build.classes}" />
3737
<pathelement location="" />
3838
<pathelement location="${hadoop.conf.dir}"/>
39-
<pathelement location="${test.data.dir}/conf"/>
39+
<pathelement location="${test.src.data.dir}/conf"/>
4040
<pathelement location="${hive.conf.dir}"/>
4141
<pathelement location="${hive.root}/cli/lib/jline-0.9.94.jar"/>
4242
<pathelement location="${hadoop.test.jar}"/>

Diff for: ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -528,7 +528,7 @@ public void cliInit(String tname, boolean recreate) throws Exception {
528528
createSources();
529529
}
530530

531-
CliSessionState ss = new CliSessionState(new HiveConf(Driver.class));
531+
CliSessionState ss = new CliSessionState(conf);
532532
assert ss != null;
533533
ss.in = System.in;
534534

Diff for: ql/src/test/queries/clientnegative/addpart1.q

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
drop table addpart1;
1+
22
create table addpart1 (a int) partitioned by (b string, c string);
33

44
alter table addpart1 add partition (b='f', c='s');
@@ -9,4 +9,3 @@ alter table addpart1 add partition (b='f', c='');
99

1010
show prtitions addpart1;
1111

12-
drop table addpart1;

Diff for: ql/src/test/queries/clientnegative/alter_non_native.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE non_native1;
1+
22
CREATE TABLE non_native1(key int, value string)
33
STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler';
44

Diff for: ql/src/test/queries/clientnegative/altern1.q

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
drop table altern1;
1+
22
create table altern1(a int, b int) partitioned by (ds string);
33
alter table altern1 replace columns(a int, b int, ds string);
4-
drop table altern1;
4+
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE table_test_output_fomat;
1+
22

33
CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
44
INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
@@ -8,4 +8,4 @@ FROM src
88
INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10;
99

1010
describe table_test_output_format;
11-
DROP TABLE table_test_output_format;
11+

Diff for: ql/src/test/queries/clientnegative/create_view_failure1.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE xxx12;
1+
22
DROP VIEW xxx12;
33

44
-- views and tables share the same namespace

Diff for: ql/src/test/queries/clientnegative/create_view_failure2.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE xxx4;
1+
22
DROP VIEW xxx4;
33

44
-- views and tables share the same namespace

Diff for: ql/src/test/queries/clientnegative/ctas.q

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
drop table nzhang_ctas4;
1+
22

33
create external table nzhang_ctas4 as select key, value from src;
44

5-
drop table nzhang_ctas4;
5+

Diff for: ql/src/test/queries/clientnegative/ddltime.q

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
drop table T2;
1+
22
create table T2 like srcpart;
33

44
insert overwrite table T2 partition (ds = '2010-06-21', hr='1') select /*+ HOLD_DDLTIME */ key, value from src where key > 10;
55

6-
drop table T2;
6+

Diff for: ql/src/test/queries/clientnegative/deletejar.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE DELETEJAR;
1+
22
ADD JAR ../data/files/TestSerDe.jar;
33
DELETE JAR ../data/files/TestSerDe.jar;
44
CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;

Diff for: ql/src/test/queries/clientnegative/drop_view_failure1.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE xxx1;
1+
22

33
CREATE TABLE xxx1(key int);
44

Diff for: ql/src/test/queries/clientnegative/dyn_part1.q

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,10 @@ set hive.exec.dynamic.partition=true;
22
set hive.exec.dynamic.partition.mode=nostrict;
33
set hive.exec.max.dynamic.partitions=2;
44

5-
drop table dynamic_partition;
5+
66
create table dynamic_partition (key string) partitioned by (value string);
77

88
insert overwrite table dynamic_partition partition(hr) select key, value from src;
99

10-
drop table dynamic_partition;
10+
1111

Diff for: ql/src/test/queries/clientnegative/dyn_part2.q

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
drop table nzhang_part1;
1+
22
create table nzhang_part1 (key string, value string) partitioned by (ds string, hr string);
33

44
set hive.exec.dynamic.partition=true;
@@ -7,5 +7,5 @@ insert overwrite table nzhang_part1 partition(ds='11', hr) select key, value fro
77

88
show partitions nzhang_part1;
99

10-
drop table nzhang_part1;
10+
1111

Diff for: ql/src/test/queries/clientnegative/external1.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
set hive.cli.errors.ignore=true;
2-
drop table external1;
2+
33
create external table external1(a int, b int) location 'invalidscheme://data.s3ndemo.hive/kv';
44
describe external1;

Diff for: ql/src/test/queries/clientnegative/external2.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
set hive.cli.errors.ignore=true;
2-
drop table external2;
2+
33
create external table external2(a int, b int) partitioned by (ds string);
44
alter table external2 add partition (ds='2008-01-01') location 'invalidscheme://data.s3ndemo.hive/pkv/2008-01-01';
55
describe external2 partition (ds='2008-01-01');

Diff for: ql/src/test/queries/clientnegative/invalid_create_tbl1.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE inv_valid_tbl1;
1+
22
CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table'
33
PARTITIONED BY(aint DATETIME, country STRING)
44
CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS

Diff for: ql/src/test/queries/clientnegative/invalidate_view1.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
DROP VIEW xxx8;
22
DROP VIEW xxx9;
3-
DROP TABLE xxx10;
3+
44
-- create two levels of view reference, then invalidate intermediate view
55
-- by dropping a column from underlying table, and verify that
66
-- querying outermost view results in full error context

Diff for: ql/src/test/queries/clientnegative/load_non_native.q

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
DROP TABLE non_native2;
1+
22
CREATE TABLE non_native2(key int, value string)
33
STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler';
44

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
-- test for loading into tables with the correct file format
22
-- test for loading into partitions with the correct file format
33

4-
DROP TABLE load_wrong_fileformat_T1;
4+
55
CREATE TABLE load_wrong_fileformat_T1(name STRING) STORED AS SEQUENCEFILE;
66
LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE load_wrong_fileformat_T1;
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
-- test for loading into tables with the correct file format
22
-- test for loading into partitions with the correct file format
33

4-
DROP TABLE T1;
4+
55
CREATE TABLE T1(name STRING) STORED AS RCFILE;
66
LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1;
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
-- test for loading into tables with the correct file format
22
-- test for loading into partitions with the correct file format
33

4-
DROP TABLE T1;
4+
55
CREATE TABLE T1(name STRING) STORED AS TEXTFILE;
66
LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1;

Diff for: ql/src/test/queries/clientnegative/nopart_insert.q

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
1-
DROP TABLE nopart_insert;
1+
22
CREATE TABLE nopart_insert(a STRING, b STRING) PARTITIONED BY (ds STRING);
33

44
INSERT OVERWRITE TABLE nopart_insert
55
SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
66
FROM src;
77

8-
DROP TABLE nopart_insert;

Diff for: ql/src/test/queries/clientnegative/nopart_load.q

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
DROP TABLE nopart_load;
1+
22
CREATE TABLE nopart_load(a STRING, b STRING) PARTITIONED BY (ds STRING);
33

44
load data local inpath '../data/files/kv1.txt' overwrite into table nopart_load ;
55

6-
DROP TABLE nopart_load;

Diff for: ql/src/test/queries/clientnegative/smb_bucketmapjoin.q

+4-4
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,10 @@ set hive.enforce.bucketing = true;
22
set hive.enforce.sorting = true;
33
set hive.exec.reducers.max = 1;
44

5-
drop table smb_bucket4_1;
5+
66
CREATE TABLE smb_bucket4_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS;
77

8-
drop table smb_bucket4_2;
8+
99
CREATE TABLE smb_bucket4_2(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS;
1010

1111
insert overwrite table smb_bucket4_1
@@ -19,5 +19,5 @@ set hive.optimize.bucketmapjoin.sortedmerge = true;
1919

2020
select /*+mapjoin(a)*/ * from smb_bucket4_1 a left outer join smb_bucket4_2 b on a.key = b.key;
2121

22-
drop table smb_bucket4_1;
23-
drop table smb_bucket4_2;
22+
23+

Diff for: ql/src/test/queries/clientnegative/union2.q

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
drop table union2_t1;
2-
drop table union2_t2;
1+
2+
33
create table if not exists union2_t1(r string, c string, v string);
44
create table if not exists union2_t2(s string, c string, v string);
55

Diff for: ql/src/test/queries/clientpositive/add_part_exist.q

-2
Original file line numberDiff line numberDiff line change
@@ -12,5 +12,3 @@ SHOW PARTITIONS add_part_test;
1212

1313
ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03');
1414
SHOW PARTITIONS add_part_test;
15-
16-
DROP TABLE add_part_test;

Diff for: ql/src/test/queries/clientpositive/alter1.q

-3
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
drop table alter1;
21
create table alter1(a int, b int);
32
describe extended alter1;
43
alter table alter1 set tblproperties ('a'='1', 'c'='3');
@@ -25,5 +24,3 @@ describe extended alter1;
2524

2625
alter table alter1 replace columns (a int, b int, c string);
2726
describe alter1;
28-
29-
drop table alter1;

Diff for: ql/src/test/queries/clientpositive/alter2.q

-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
drop table alter2;
21
create table alter2(a int, b int) partitioned by (insertdate string);
32
describe extended alter2;
43
show partitions alter2;
@@ -19,4 +18,3 @@ show partitions alter2;
1918
alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02';
2019
describe extended alter2;
2120
show partitions alter2;
22-
drop table alter2;

Diff for: ql/src/test/queries/clientpositive/alter3.q

-10
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,3 @@
1-
drop table alter3_src;
2-
drop table alter3;
3-
drop table alter3_renamed;
4-
drop table alter3_like_renamed;
5-
61
create table alter3_src ( col1 string ) stored as textfile ;
72
load data local inpath '../data/files/test.dat' overwrite into table alter3_src ;
83

@@ -24,8 +19,3 @@ select col1 from alter3_src;
2419
alter table alter3_like rename to alter3_like_renamed;
2520

2621
describe extended alter3_like_renamed;
27-
28-
drop table alter3_src;
29-
drop table alter3;
30-
drop table alter3_renamed;
31-
drop table alter3_like_renamed;

Diff for: ql/src/test/queries/clientpositive/alter4.q

-4
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
1-
DROP TABLE set_bucketing_test;
2-
31
CREATE TABLE set_bucketing_test (key INT, value STRING) CLUSTERED BY (key) INTO 10 BUCKETS;
42
DESCRIBE EXTENDED set_bucketing_test;
53

64
ALTER TABLE set_bucketing_test NOT CLUSTERED;
75
DESCRIBE EXTENDED set_bucketing_test;
8-
9-
DROP TABLE set_bucketing_test;

Diff for: ql/src/test/queries/clientpositive/archive.q

-3
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key;
3333
ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12');
3434
SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key;
3535

36-
DROP TABLE harbucket;
3736

3837
CREATE TABLE old_name(key INT)
3938
PARTITIONED by (ds STRING);
@@ -45,5 +44,3 @@ FROM (SELECT * FROM old_name WHERE ds='1') subq1) subq2;
4544
ALTER TABLE old_name RENAME TO new_name;
4645
SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col
4746
FROM (SELECT * FROM new_name WHERE ds='1') subq1) subq2;
48-
49-
DROP TABLE new_name;

0 commit comments

Comments
 (0)