Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions dev/replace_sources_with_relocated.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/sh

# This script replaces the normal sources with the ones where the packages are replaced for HBase 3.0
# The purpose is to enable debugging from an IDE without having to change the source directory settings
# Make sure to never commit the changes this script makes.

# Usage:
# 1. Activate the Hadoop3 maven profile in your ide (and deactive the default)
# 2. Make sure that you have no uncommitted changes
# 3. Run "mvn clean package -am -pl phoenix-core -Dhbase.profile=3.0 -DskipTests"
# 4. Run this script
# 5. Work with the source in the IDE
# 6. get a diff of your fixes
# 7. Run "git reset --hard"
# 8. Re-apply your changes.

orig_dir=$(pwd)
cd "$(dirname "$0")"/..
cp -r phoenix-core/target/generated-sources/replaced/* phoenix-core/src
cp -r phoenix-core-client/target/generated-sources/replaced/* phoenix-core-client/src
cp -r phoenix-core-server/target/generated-sources/replaced/* phoenix-core-server/src

cd $orig_dir
184 changes: 174 additions & 10 deletions phoenix-core-client/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -68,18 +68,10 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop2-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol-shaded</artifactId>
Expand Down Expand Up @@ -279,7 +271,7 @@
hbaseMinor = Integer.parseInt(versionMatcher.group(2));
hbasePatch = Integer.parseInt(versionMatcher.group(3));

hbaseMajor == 2 &amp;&amp; (
(hbaseMajor == 2 &amp;&amp; (
("${hbase.compat.version}".equals("2.4.1")
&amp;&amp; hbaseMinor == 4
&amp;&amp; hbasePatch &gt;=1)
Expand All @@ -291,7 +283,11 @@
&amp;&amp; hbasePatch &gt;=4)
|| ("${hbase.compat.version}".equals("2.6.0")
&amp;&amp; hbaseMinor == 6
&amp;&amp; hbasePatch &gt;=0)
&amp;&amp; hbasePatch &gt;=0))
|| (hbaseMajor == 3 &amp;&amp; (
"${hbase.compat.version}".equals("3.0.0")
&amp;&amp; hbaseMinor == 0
&amp;&amp; hbasePatch &gt;=0))
)</condition>
</evaluateBeanshell>
</rules>
Expand Down Expand Up @@ -439,5 +435,173 @@
</executions>
</plugin>
</plugins>
<sourceDirectory>${actualSourceDirectory}</sourceDirectory>
</build>
<profiles>
<profile>
<id>hbase-2.x</id>
<activation>
<property>
<name>hbase.profile</name>
<!-- This works until we have more than 1 3.x profile -->
<value>!3.0</value>
</property>
</activation>
<properties>
<actualSourceDirectory>src/main/java</actualSourceDirectory>
</properties>
<dependencies>
<!-- <dependency>-->
<!-- <groupId>org.apache.hbase</groupId>-->
<!-- <artifactId>hbase-hadoop2-compat</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<execution>
<id>add-antlr-source</id>
<goals>
<goal>add-source</goal>
</goals>
<phase>generate-sources</phase>
<configuration>
<sources>
<source>${antlr-output.dir}</source>
<source>${antlr-input.dir}</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>hbase-3.x</id>
<activation>
<property>
<name>hbase.profile</name>
<value>3.0</value>
</property>
</activation>
<properties>
<protoc.version>4.31.1</protoc.version>
<actualSourceDirectory>target/generated-sources/replaced/main/java</actualSourceDirectory>

</properties>
<build>
<plugins>
<plugin>
<groupId>com.google.code.maven-replacer-plugin</groupId>
<artifactId>replacer</artifactId>
<executions>
<execution>
<id>replace-generated-sources</id>
<goals>
<goal>replace</goal>
</goals>
<phase>process-sources</phase>
<configuration>
<basedir>${basedir}/target/generated-sources/protobuf</basedir>
<includes>
<include>**/*.java</include>
</includes>
<ignoreErrors>true</ignoreErrors>
<replacements>
<replacement>
<token>([^\.])com.google.protobuf</token>
<value>$1org.apache.hbase.thirdparty.com.google.protobuf</value>
</replacement>
<replacement>
<token>([^\.])org.apache.hadoop.hbase.protobuf.generated</token>
<value>$1org.apache.hadoop.hbase.shaded.protobuf.generated</value>
</replacement>
</replacements>
</configuration>
</execution>
<execution>
<id>replace-sources</id>
<goals>
<goal>replace</goal>
</goals>
<phase>process-sources</phase>
<configuration>
<basedir>${basedir}/src</basedir>
<outputDir>../target/generated-sources/replaced</outputDir>
<includes>
<include>**/*.java</include>
</includes>
<!-- Omid uses unshaded protobuf 2.5 -->
<excludes>
<exclude>**/OmidTransactionContext*.java</exclude>
</excludes>
<!-- Ignore errors when missing files, because it means this build
was run with -Dprotoc.skip and there is no -Dreplacer.skip -->
<!-- <ignoreErrors>true</ignoreErrors>-->
<replacements>
<replacement>
<token>([^\.])com.google.protobuf</token>
<value>$1org.apache.hbase.thirdparty.com.google.protobuf</value>
</replacement>
<replacement>
<token>([^\.])org.apache.hadoop.hbase.protobuf.generated</token>
<value>$1org.apache.hadoop.hbase.shaded.protobuf.generated</value>
</replacement>
</replacements>
</configuration>
</execution>
<execution>
<id>copy-sources</id>
<goals>
<goal>replace</goal>
</goals>
<phase>process-sources</phase>
<configuration>
<basedir>${basedir}/src</basedir>
<outputDir>../target/generated-sources/replaced</outputDir>
<!-- Copy Omid files without change -->
<includes>
<include>**/OmidTransactionContext*.java</include>
</includes>
<!-- Ignore errors when missing files, because it means this build
was run with -Dprotoc.skip and there is no -Dreplacer.skip -->
<!-- <ignoreErrors>true</ignoreErrors>-->
<replacements>
<!-- Just copy -->
</replacements>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<execution>
<id>add-antlr-source</id>
<goals>
<goal>add-source</goal>
</goals>
<phase>generate-sources</phase>
<configuration>
<sources>
<source>${antlr-output.dir}</source>
<source>${antlr-input.dir}</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -17,42 +17,23 @@
*/
package org.apache.hadoop.hbase.ipc.controller;

import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.phoenix.compat.hbase.CompatRPCControllerFactory;

/**
* {@link RpcControllerFactory} that sets the priority of metadata rpc calls to be processed in its
* own queue.
*/
public class ClientRpcControllerFactory extends RpcControllerFactory {
public class ClientRpcControllerFactory extends CompatRPCControllerFactory {

public ClientRpcControllerFactory(Configuration conf) {
super(conf);
}

@Override
public HBaseRpcController newController() {
HBaseRpcController delegate = super.newController();
return getController(delegate);
}

@Override
public HBaseRpcController newController(CellScanner cellScanner) {
HBaseRpcController delegate = super.newController(cellScanner);
return getController(delegate);
}

@Override
public HBaseRpcController newController(List<CellScannable> cellIterables) {
HBaseRpcController delegate = super.newController(cellIterables);
return getController(delegate);
}

private HBaseRpcController getController(HBaseRpcController delegate) {
protected HBaseRpcController getController(HBaseRpcController delegate) {
return new MetadataRpcController(delegate, conf);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,43 +17,23 @@
*/
package org.apache.hadoop.hbase.ipc.controller;

import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.phoenix.compat.hbase.CompatRPCControllerFactory;

/**
* RpcControllerFactory that should only be used when creating Table for making remote RPCs to the
* region servers hosting global mutable index table regions. This controller factory shouldn't be
* globally configured anywhere and is meant to be used only internally by Phoenix indexing code.
*/
public class InterRegionServerIndexRpcControllerFactory extends RpcControllerFactory {
public class InterRegionServerIndexRpcControllerFactory extends CompatRPCControllerFactory {

public InterRegionServerIndexRpcControllerFactory(Configuration conf) {
super(conf);
}

@Override
public HBaseRpcController newController() {
HBaseRpcController delegate = super.newController();
return getController(delegate);
}

@Override
public HBaseRpcController newController(CellScanner cellScanner) {
HBaseRpcController delegate = super.newController(cellScanner);
return getController(delegate);
}

@Override
public HBaseRpcController newController(List<CellScannable> cellIterables) {
HBaseRpcController delegate = super.newController(cellIterables);
return getController(delegate);
}

private HBaseRpcController getController(HBaseRpcController delegate) {
protected HBaseRpcController getController(HBaseRpcController delegate) {
// construct a chain of controllers: metadata, index and standard controller
IndexRpcController indexRpcController = new IndexRpcController(delegate, conf);
return new MetadataRpcController(indexRpcController, conf);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,42 +17,22 @@
*/
package org.apache.hadoop.hbase.ipc.controller;

import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.phoenix.compat.hbase.CompatRPCControllerFactory;

/**
* {@link RpcControllerFactory} that sets the appropriate priority of server-server RPC calls
* destined for Phoenix SYSTEM tables.
*/
public class ServerRpcControllerFactory extends RpcControllerFactory {
public class ServerRpcControllerFactory extends CompatRPCControllerFactory {

public ServerRpcControllerFactory(Configuration conf) {
super(conf);
}

@Override
public HBaseRpcController newController() {
HBaseRpcController delegate = super.newController();
return getController(delegate);
}

@Override
public HBaseRpcController newController(CellScanner cellScanner) {
HBaseRpcController delegate = super.newController(cellScanner);
return getController(delegate);
}

@Override
public HBaseRpcController newController(List<CellScannable> cellIterables) {
HBaseRpcController delegate = super.newController(cellIterables);
return getController(delegate);
}

private HBaseRpcController getController(HBaseRpcController delegate) {
protected HBaseRpcController getController(HBaseRpcController delegate) {
return new ServerRpcController(delegate, conf);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;

/**
* {@link RpcControllerFactory} that should only be used when making server-server remote RPCs to
* the region servers hosting Phoenix SYSTEM tables.
* Factory that should only be used when making server-server remote RPCs to the region servers
* hosting Phoenix SYSTEM tables. Despite the name, this does NOT implement
* {@link RpcControllerFactory}
*/
public class ServerSideRPCControllerFactory {

Expand Down
Loading