Skip to content

Commit e5e1fc1

Browse files
authored
Merge pull request #1 from ramach/master
Updated patch using Rest
2 parents eea2070 + 2173dd7 commit e5e1fc1

16 files changed

+1142
-2157
lines changed

hive-plugin/src/main/java/edu/berkeley/ground/plugins/hive/GroundDatabase.java

+161-283
Large diffs are not rendered by default.

hive-plugin/src/main/java/edu/berkeley/ground/plugins/hive/GroundMetaStore.java

-517
This file was deleted.

hive-plugin/src/main/java/edu/berkeley/ground/plugins/hive/GroundPartition.java

+42-97
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,18 @@
11
/**
2-
* Licensed under the Apache License, Version 2.0 (the "License");
3-
* you may not use this file except in compliance with the License.
4-
* You may obtain a copy of the License at
2+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
3+
* in compliance with the License. You may obtain a copy of the License at
54
*
6-
* http://www.apache.org/licenses/LICENSE-2.0
5+
* http://www.apache.org/licenses/LICENSE-2.0
76
*
8-
* Unless required by applicable law or agreed to in writing, software
9-
* distributed under the License is distributed on an "AS IS" BASIS,
10-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11-
* See the License for the specific language governing permissions and
12-
* limitations under the License.
7+
* Unless required by applicable law or agreed to in writing, software distributed under the License
8+
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
9+
* or implied. See the License for the specific language governing permissions and limitations under
10+
* the License.
1311
*/
1412

1513
package edu.berkeley.ground.plugins.hive;
1614

17-
import edu.berkeley.ground.exceptions.GroundException;
18-
import edu.berkeley.ground.model.models.Edge;
19-
import edu.berkeley.ground.model.models.Node;
20-
import edu.berkeley.ground.model.models.NodeVersion;
21-
import edu.berkeley.ground.model.models.Structure;
22-
import edu.berkeley.ground.model.models.StructureVersion;
23-
import edu.berkeley.ground.model.models.Tag;
24-
import edu.berkeley.ground.model.versions.GroundType;
25-
import edu.berkeley.ground.plugins.hive.util.JsonUtil;
26-
27-
import java.util.ArrayList;
2815
import java.util.HashMap;
29-
import java.util.List;
3016
import java.util.Map;
3117

3218
import org.apache.hadoop.hive.common.ObjectPair;
@@ -37,118 +23,77 @@
3723
import org.slf4j.Logger;
3824
import org.slf4j.LoggerFactory;
3925

26+
import edu.berkeley.ground.api.models.Edge;
27+
import edu.berkeley.ground.api.models.Node;
28+
import edu.berkeley.ground.api.models.NodeVersion;
29+
import edu.berkeley.ground.api.models.Structure;
30+
import edu.berkeley.ground.api.models.StructureVersion;
31+
import edu.berkeley.ground.api.models.Tag;
32+
import edu.berkeley.ground.api.versions.GroundType;
33+
import edu.berkeley.ground.exceptions.GroundException;
34+
import edu.berkeley.ground.plugins.hive.util.PluginUtil;
35+
4036
public class GroundPartition {
4137

42-
private static final Logger LOG = LoggerFactory.getLogger(GroundTable.class.getName());
38+
static final private Logger logger = LoggerFactory.getLogger(GroundTable.class.getName());
4339

44-
private GroundReadWrite groundReadWrite = null;
40+
private static final long DUMMY_NOT_USED = 1L;
41+
private final GroundReadWrite groundReadWrite;
4542

4643
public GroundPartition(GroundReadWrite ground) {
4744
groundReadWrite = ground;
4845
}
4946

50-
/**
51-
* Retrieve the partition node.
52-
*
53-
* @param partitionName the name of the partition
54-
* @return the partition node
55-
* @throws GroundException an error while retrieving the node
56-
*/
5747
public Node getNode(String partitionName) throws GroundException {
58-
try {
59-
LOG.debug("Fetching partition node: " + partitionName);
60-
return groundReadWrite.getNodeFactory().retrieveFromDatabase(partitionName);
61-
} catch (GroundException ge1) {
62-
LOG.debug("Not found - Creating partition node: {}", partitionName);
63-
64-
Node node = groundReadWrite.getNodeFactory().create(partitionName, null, new HashMap<>());
65-
Structure nodeStruct = groundReadWrite.getStructureFactory().create(node.getName(), null,
66-
new HashMap<>());
67-
68-
return node;
69-
}
48+
logger.debug("Fetching partition node: " + partitionName);
49+
return groundReadWrite.getGroundReadWriteNodeResource().getNode(partitionName);
7050
}
7151

72-
/**
73-
* Retrieve the structure for partitions.
74-
*
75-
* @param partitionName the name of the partition
76-
* @return the structure for partitions
77-
* @throws GroundException an error while retrieving the structure
78-
*/
7952
public Structure getNodeStructure(String partitionName) throws GroundException {
8053
try {
8154
Node node = this.getNode(partitionName);
82-
return groundReadWrite.getStructureFactory().retrieveFromDatabase(partitionName);
55+
return this.groundReadWrite.getGroundReadWriteStructureResource()
56+
.getStructure(node.getName());
8357
} catch (GroundException e) {
84-
LOG.error("Unable to fetch partition node structure");
85-
throw e;
58+
logger.error("Unable to fetch parition node structure");
59+
throw new GroundException(e);
8660
}
8761
}
8862

89-
/**
90-
* Retrieve a partition's edge.
91-
*
92-
* @param partitionName the name of the partition
93-
* @return the partition's edge
94-
* @throws GroundException an error retrieving the edge
95-
*/
9663
public Edge getEdge(String partitionName) throws GroundException {
97-
try {
98-
LOG.debug("Fetching table partition edge: " + partitionName);
99-
return groundReadWrite.getEdgeFactory().retrieveFromDatabase(partitionName);
100-
} catch (GroundException ge1) {
101-
LOG.debug("Not found - Creating table partition edge: {}", partitionName);
64+
logger.debug("Fetching table partition edge: " + partitionName);
65+
return groundReadWrite.getGroundReadWriteEdgeResource().getEdge(partitionName);
66+
}
10267

103-
Edge edge = groundReadWrite.getEdgeFactory().create(partitionName, null, 1, 2,
104-
new HashMap<>());
105-
Structure edgeStruct = groundReadWrite.getStructureFactory().create(partitionName, null,
106-
new HashMap<>());
107-
return edge;
108-
}
68+
public Structure getEdgeStructure(String partitionName) throws GroundException {
69+
Edge edge = getEdge(partitionName);
70+
return groundReadWrite.getGroundReadWriteStructureResource().getStructure(edge.getName());
10971
}
11072

111-
/**
112-
* Create a new partition.
113-
*
114-
* @param dbName the name of the database
115-
* @param tableName the name of the table
116-
* @param part the partition's data
117-
* @return the node version corresponding the partition
118-
* @throws InvalidObjectException an invalid partition
119-
* @throws MetaException an exception while creating the partition
120-
*/
12173
public NodeVersion createPartition(String dbName, String tableName, Partition part)
12274
throws InvalidObjectException, MetaException {
12375
try {
124-
ObjectPair<String, String> objectPair = new ObjectPair<>(HiveStringUtils
125-
.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(tableName));
76+
ObjectPair<String, String> objectPair =
77+
new ObjectPair<>(HiveStringUtils.normalizeIdentifier(dbName),
78+
HiveStringUtils.normalizeIdentifier(tableName));
12679
String partId = objectPair.toString();
12780
for (String value : part.getValues()) {
12881
partId += ":" + value;
12982
}
13083

131-
Tag partTag = new Tag(0, partId, JsonUtil.toJson(part), GroundType.STRING);
132-
133-
Node node = this.getNode(partId);
134-
long nodeId = node.getId();
135-
Structure partStruct = this.getNodeStructure(partId);
136-
Map<String, GroundType> structVersionAttribs = new HashMap<>();
137-
structVersionAttribs.put(partId, GroundType.STRING);
138-
StructureVersion sv = groundReadWrite.getStructureVersionFactory().create(partStruct.getId(),
139-
structVersionAttribs, new ArrayList<>());
140-
84+
Tag partTag = new Tag(DUMMY_NOT_USED, partId, PluginUtil.toJson(part), GroundType.STRING);
85+
Map<String, GroundType> structureVersionAttribs = new HashMap<>();
86+
structureVersionAttribs.put(GroundStore.EntityState.ACTIVE.name(), GroundType.STRING);
87+
StructureVersion sv = groundReadWrite.getGroundReadWriteStructureResource()
88+
.getStructureVersion(partId, structureVersionAttribs);
14189
String reference = part.getSd().getLocation();
14290
HashMap<String, Tag> tags = new HashMap<>();
14391
tags.put(partId, partTag);
14492

14593
long versionId = sv.getId();
146-
List<Long> parentId = new ArrayList<>();
147-
14894
Map<String, String> parameters = part.getParameters();
149-
150-
return groundReadWrite.getNodeVersionFactory().create(tags, versionId, reference, parameters,
151-
nodeId, parentId);
95+
return groundReadWrite.getGroundReadWriteNodeResource().createNodeVersion(1L, tags, versionId,
96+
reference, parameters, partId);
15297
} catch (GroundException e) {
15398
throw new MetaException("Unable to create partition " + e.getMessage());
15499
}

0 commit comments

Comments
 (0)