diff --git a/.gitignore b/.gitignore
index 7528e0ce722..e1b7cf62568 100644
--- a/.gitignore
+++ b/.gitignore
@@ -75,25 +75,18 @@ target
h2o-test-accuracy/testng.xml
# stuff created by sphinx-build (python docs)
-./h2o-py/docs/docs/_static
-./h2o-py/docs/docs/_sources/
-./h2o-py/docs/docs/searchindex.js
-./h2o-py/docs/docs/search*
-./h2o-py/docs/docs/py-modindex.html
-./h2o-py/docs/docs/*html
-./h2o-py/docs/docs/*js
-./h2o-py/docs/docs/.buildinfo
-./h2o-py/docs/docs/.doctrees/
-./h2o-py/docs/docs/_modules/
-./h2o-py/docs/docs/objects.inv
+h2o-py/docs/_build/
git_private_jenkins.sh
make-java6.sh
# Ignore generated code
-src-gen/
+h2o-bindings/src-gen/
+h2o-grpc/proto-gen/
h2o-3-DESCRIPTION
gradle/buildnumber.properties
+*_pb2.py
+*_pb2_grpc.py
# Doc stuff
.Rapp.history
diff --git a/build.gradle b/build.gradle
index c26a47cd709..159f32cbcee 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,35 +1,16 @@
-//
-// The top-level h2o-3 project does not have any java pieces itself, but
-// apply from the standard java.gradle so that 'gradle idea' generates IDE
-// files with the right settings.
-//
-// The top-level jar file that gets produced is empty and not usable
-// for anything. Use the jar file produced by the h2o-assembly subproject.
-//
-apply from: 'gradle/java.gradle'
-
-// For multiproject setup we have to apply release plugin here (we share same release number cross all modules)
-if (project.hasProperty("doRelease")) {
- apply from: 'gradle/release.gradle'
-}
-
-// Print out time taken for each task so we find things that are slow.
-apply from: 'gradle/timing.gradle'
-
// The build script settings to fetch plugins and put them on
// classpath
buildscript {
repositories {
- maven {
- url 'https://plugins.gradle.org/m2/'
- }
+ maven { url 'https://plugins.gradle.org/m2/' }
mavenCentral()
jcenter()
}
+ //noinspection GroovyAssignabilityCheck
dependencies {
classpath 'org.ow2.asm:asm:5.1'
- classpath 'com.github.jengelman.gradle.plugins:shadow:1.2.2'
+ classpath 'com.github.jengelman.gradle.plugins:shadow:1.2.3'
classpath 'org.gradle.api.plugins:gradle-nexus-plugin:0.7.1'
classpath 'com.github.townsfolk:gradle-release:1.2'
classpath 'de.undercouch:gradle-download-task:2.1.0'
@@ -42,6 +23,28 @@ buildscript {
}
}
+plugins {
+ id "java"
+}
+
+//
+// The top-level h2o-3 project does not have any java pieces itself, but
+// apply from the standard java.gradle so that 'gradle idea' generates IDE
+// files with the right settings.
+//
+// The top-level jar file that gets produced is empty and not usable
+// for anything. Use the jar file produced by the :h2o-assemblies:main subproject.
+//
+apply from: 'gradle/java.gradle'
+
+// For multiproject setup we have to apply release plugin here (we share same release number cross all modules)
+if (project.hasProperty("doRelease")) {
+ apply from: 'gradle/release.gradle'
+}
+
+// Print out time taken for each task so we find things that are slow.
+apply from: 'gradle/timing.gradle'
+
//
// Common configuration
//
@@ -107,10 +110,15 @@ ext {
//
// Versions of libraries shared cross all projects
+ // The version of protoc must match protobuf-java. If you don't depend on
+ // protobuf-java directly, you will be transitively depending on the
+ // protobuf-java version that grpc depends on.
//
junitVersion = '4.12'
jets3tVersion = '0.7.1'
awsJavaSdkVersion = '1.8.3'
+ protocVersion = '3.0.2'
+ grpcVersion = '1.0.3'
//
// H2O's REST API version
@@ -237,7 +245,7 @@ subprojects {
}
task wrapper(type: Wrapper) {
- gradleVersion = '2.9'
+ gradleVersion = '3.3'
}
//
diff --git a/gradle/jacoco.gradle b/gradle/jacoco.gradle
index d49ef228d3f..16f7e886576 100644
--- a/gradle/jacoco.gradle
+++ b/gradle/jacoco.gradle
@@ -57,7 +57,7 @@ jacocoTestReport {
}
// Collect class files
- FileTree classLocation = fileTree(dir: "$rootDir", include: "**/build/libs/**/*.jar", excludes: ["build/", "h2o-assembly/"])
+ FileTree classLocation = fileTree(dir: "$rootDir", include: "**/build/libs/**/*.jar", excludes: ["build/", "h2o-assemblies/"])
classDirectories = classLocation
reports {
@@ -75,4 +75,4 @@ task cleanCoverageData (type: Delete) {
delete file("${buildDir}/reports/jacoco/report.exec")
}
-jacocoTestReport.dependsOn jacocoMergeExecs
\ No newline at end of file
+jacocoTestReport.dependsOn jacocoMergeExecs
diff --git a/gradle/scala.gradle b/gradle/scala.gradle
index 2b904af29ee..1fd3ad445ba 100644
--- a/gradle/scala.gradle
+++ b/gradle/scala.gradle
@@ -21,9 +21,9 @@ sourceSets {
// Activate Zinc compiler and configure scalac
tasks.withType(ScalaCompile) {
- scalaCompileOptions.useCompileDaemon = false
- scalaCompileOptions.useAnt = false
- scalaCompileOptions.additionalParameters = ['-target:jvm-1.6']
+// scalaCompileOptions.useCompileDaemon = false
+// scalaCompileOptions.useAnt = false
+// scalaCompileOptions.additionalParameters = ['-target:jvm-1.6']
}
// Create jar
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index 13372aef5e2..26deb5f9b8b 100644
Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 818ecb510bc..0483812945e 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
-#Thu May 26 17:50:53 PDT 2016
+#Thu Feb 02 14:15:44 PST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-2.9-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
diff --git a/gradlew b/gradlew
index 9d82f789151..4453ccea33d 100755
--- a/gradlew
+++ b/gradlew
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/usr/bin/env sh
##############################################################################
##
@@ -6,12 +6,30 @@
##
##############################################################################
-# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-DEFAULT_JVM_OPTS=""
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
@@ -30,6 +48,7 @@ die ( ) {
cygwin=false
msys=false
darwin=false
+nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
@@ -40,26 +59,11 @@ case "`uname`" in
MINGW* )
msys=true
;;
+ NONSTOP* )
+ nonstop=true
+ ;;
esac
-# Attempt to set APP_HOME
-# Resolve links: $0 may be a link
-PRG="$0"
-# Need this for relative symlinks.
-while [ -h "$PRG" ] ; do
- ls=`ls -ld "$PRG"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG=`dirname "$PRG"`"/$link"
- fi
-done
-SAVED="`pwd`"
-cd "`dirname \"$PRG\"`/" >/dev/null
-APP_HOME="`pwd -P`"
-cd "$SAVED" >/dev/null
-
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
@@ -85,7 +89,7 @@ location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
-if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
@@ -150,11 +154,19 @@ if $cygwin ; then
esac
fi
-# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
-function splitJvmOpts() {
- JVM_OPTS=("$@")
+# Escape application args
+save ( ) {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
}
-eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
-JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
-exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
+exec "$JAVACMD" "$@"
diff --git a/gradlew.bat b/gradlew.bat
index aec99730b4e..e95643d6a2c 100644
--- a/gradlew.bat
+++ b/gradlew.bat
@@ -8,14 +8,14 @@
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
-@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-set DEFAULT_JVM_OPTS=
-
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
@@ -46,10 +46,9 @@ echo location of your Java installation.
goto fail
:init
-@rem Get command-line arguments, handling Windowz variants
+@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
-if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
@@ -60,11 +59,6 @@ set _SKIP=2
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
-goto execute
-
-:4NT_args
-@rem Get arguments from the 4NT Shell from JP Software
-set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
diff --git a/h2o-app/build.gradle b/h2o-app/build.gradle
index 263a15f3d06..2fa3c8944b0 100644
--- a/h2o-app/build.gradle
+++ b/h2o-app/build.gradle
@@ -4,12 +4,10 @@
description = "H2O Application Runner"
dependencies {
- compile project(":h2o-web")
compile project(":h2o-algos")
compile project(":h2o-core")
compile project(":h2o-genmodel")
- compile project(":h2o-avro-parser")
- // Note: orc parser is included at the assembly level for each
+ // Note: orc parser is included at the assembly level for each
// Hadoop distribution
}
diff --git a/h2o-assembly/README.md b/h2o-assemblies/main/README.md
similarity index 82%
rename from h2o-assembly/README.md
rename to h2o-assemblies/main/README.md
index ee1edd49cfa..e752ee05d42 100644
--- a/h2o-assembly/README.md
+++ b/h2o-assemblies/main/README.md
@@ -7,6 +7,6 @@ with H2O.
## Building
```
-./gradlew :h2o-assembly:build
+./gradlew :h2o-assemblies:main:build
```
diff --git a/h2o-assembly/build.gradle b/h2o-assemblies/main/build.gradle
similarity index 73%
rename from h2o-assembly/build.gradle
rename to h2o-assemblies/main/build.gradle
index 7e231771bb3..3ac36cba7c8 100644
--- a/h2o-assembly/build.gradle
+++ b/h2o-assemblies/main/build.gradle
@@ -13,14 +13,16 @@ configurations {
// Dependencies
dependencies {
- compile project(":h2o-app")
- compile project(":h2o-persist-s3")
- compile project(":h2o-persist-hdfs")
- if (project.hasProperty("doIncludeOrc") && project.doIncludeOrc == "true") {
- compile project(":h2o-orc-parser")
- }
- compile project(":h2o-parquet-parser")
- compile "org.slf4j:slf4j-log4j12:1.7.5"
+ compile project(":h2o-app")
+ compile project(":h2o-web")
+ compile project(":h2o-avro-parser")
+ compile project(":h2o-persist-s3")
+ compile project(":h2o-persist-hdfs")
+ if (project.hasProperty("doIncludeOrc") && project.doIncludeOrc == "true") {
+ compile project(":h2o-orc-parser")
+ }
+ compile project(":h2o-parquet-parser")
+ compile "org.slf4j:slf4j-log4j12:1.7.5"
}
shadowJar {
@@ -51,14 +53,14 @@ artifacts {
// project build directory
//
-def assembly = "h2o-assembly.jar"
+def assembly = "main.jar"
def allInOne = "h2o.jar"
task copyJar(type: Copy) {
from ("${project.buildDir}/libs"){
include assembly
}
- into "${project.parent.buildDir}"
+ into "${project.parent.parent.buildDir}"
rename { it.replace(assembly, allInOne) }
}
// Execute always copyJar
diff --git a/h2o-assemblies/py2o/build.gradle b/h2o-assemblies/py2o/build.gradle
new file mode 100644
index 00000000000..3a66c09debe
--- /dev/null
+++ b/h2o-assemblies/py2o/build.gradle
@@ -0,0 +1,58 @@
+apply plugin: 'java'
+apply plugin: 'com.github.johnrengelman.shadow'
+
+description = "H2O Core Java library for use with python"
+
+// Exclude unwanted dependencies
+configurations {
+ compile.exclude module: 'junit'
+ compile.exclude module: 'mockito-all'
+ compile.exclude module: 'zookeeper'
+ compile.exclude module: "javax.mail.glassfish"
+}
+
+// Dependencies
+dependencies {
+ compile project(":h2o-app")
+ compile project(":h2o-grpc")
+ compile "org.slf4j:slf4j-log4j12:1.7.5"
+}
+
+shadowJar {
+ mergeServiceFiles()
+ classifier = ''
+ exclude 'META-INF/*.DSA'
+ exclude 'META-INF/*.SF'
+ exclude 'synchronize.properties'
+ exclude 'uploader.properties'
+ exclude 'test.properties'
+ exclude 'cockpitlite.properties'
+ exclude 'devpay_products.properties'
+ manifest {
+ attributes 'Main-Class': 'water.H2OApp'
+ }
+}
+
+artifacts {
+ archives shadowJar
+}
+
+//
+// Support make infrastructure by copying the resulting assembly into parent
+// project build directory
+//
+
+def assembly = "py2o.jar"
+def allInOne = "h2o-py.jar"
+
+task copyJar(type: Copy) {
+ from ("${project.buildDir}/libs"){
+ include assembly
+ }
+ into "${project.parent.parent.buildDir}"
+ rename { it.replace(assembly, allInOne) }
+}
+// Execute always copyJar
+shadowJar.finalizedBy copyJar
+// Run shadowJar as par of build
+jar.finalizedBy shadowJar
diff --git a/h2o-bindings/bin/bindings.py b/h2o-bindings/bin/bindings.py
index 8713791dc4a..346c238cfcd 100644
--- a/h2o-bindings/bin/bindings.py
+++ b/h2o-bindings/bin/bindings.py
@@ -243,6 +243,8 @@ def gen_rich_route():
mm = classname_pattern.match(path)
assert mm, "Cannot determine class name in URL " + path
e["class_name"] = mm.group(1)
+ if e["class_name"].islower():
+ e["class_name"] = e["class_name"].capitalize()
# Resolve input/output schemas into actual objects
assert e["input_schema"] in schmap, "Encountered unknown schema %s in %s" % (e["input_schema"], path)
diff --git a/h2o-bindings/build.gradle b/h2o-bindings/build.gradle
index 04cbcc0627e..61e2abcb184 100644
--- a/h2o-bindings/build.gradle
+++ b/h2o-bindings/build.gradle
@@ -17,10 +17,12 @@ dependencies {
compile 'com.squareup.okio:okio:1.8.0'
testCompile project(":h2o-app")
+ testCompile project(":h2o-web")
+ testCompile project(":h2o-avro-parser")
testCompile "junit:junit:${junitVersion}"
// Generator dependencies
- srcGenCompile project( path: ":h2o-assembly", configuration: "shadow")
+ srcGenCompile project(path: ":h2o-assemblies:main", configuration: "shadow")
}
// Configure idea import
diff --git a/h2o-core/build.gradle b/h2o-core/build.gradle
index a9edb13dd69..d16274249b9 100644
--- a/h2o-core/build.gradle
+++ b/h2o-core/build.gradle
@@ -1,3 +1,5 @@
+apply plugin: 'java'
+
//
// H2O Core Module
//
@@ -15,7 +17,7 @@ dependencies {
compile "ai.h2o:google-analytics-java:1.1.2-H2O-CUSTOM"
compile "org.eclipse.jetty.aggregate:jetty-servlet:8.1.17.v20150415"
compile "org.eclipse.jetty:jetty-plus:8.1.17.v20150415"
- compile "com.github.rwl:jtransforms:2.4.0"
+ compile "com.github.rwl:jtransforms:2.4.0"
compile("log4j:log4j:1.2.15") {
exclude module: "activation"
@@ -55,6 +57,7 @@ jar {
}
}
+
// Run a single small JVM under heavy memory load, and confirm spilling works
task testOOM(type: Exec) {
dependsOn cpLibs, jar, testJar
diff --git a/h2o-core/src/main/java/hex/createframe/CreateFrameRecipe.java b/h2o-core/src/main/java/hex/createframe/CreateFrameRecipe.java
index f14f2bea291..d28af6c1262 100644
--- a/h2o-core/src/main/java/hex/createframe/CreateFrameRecipe.java
+++ b/h2o-core/src/main/java/hex/createframe/CreateFrameRecipe.java
@@ -13,8 +13,8 @@
* Base class for all frame creation recipes.
*/
public abstract class CreateFrameRecipe> extends Iced {
- protected Key dest;
- protected long seed = -1;
+ public Key dest;
+ public long seed = -1;
//--------------------------------------------------------------------------------------------------------------------
// Inheritance interface
diff --git a/h2o-core/src/main/java/water/AbstractH2OExtension.java b/h2o-core/src/main/java/water/AbstractH2OExtension.java
index 1b7be598df0..5f150a6a6bf 100644
--- a/h2o-core/src/main/java/water/AbstractH2OExtension.java
+++ b/h2o-core/src/main/java/water/AbstractH2OExtension.java
@@ -12,13 +12,19 @@
/**
* Any up-front initialization that needs to happen before H2O is started.
- * This is called in H2OApp before H2O.main() is called.
+ * This is called in {@code H2OApp} before {@code H2O.main()} is called.
*/
public void init() {}
+
/**
- * Print stuff for
- * java -jar h2o.jar -help
+ * Called during the start up process of {@code H2OApp}, after the local
+ * network connections are opened.
+ */
+ public void onLocalNodeStarted() {}
+
+ /**
+ * Print stuff (into System.out) for {@code java -jar h2o.jar -help}
*/
public void printHelp() {}
@@ -51,7 +57,9 @@ public void validateArguments() {}
*
* @return build information.
*/
- public abstract AbstractBuildVersion getBuildVersion();
+ public AbstractBuildVersion getBuildVersion() {
+ return AbstractBuildVersion.UNKNOWN_VERSION;
+ }
/**
* Print a short message when the extension finishes initializing.
diff --git a/h2o-core/src/main/java/water/H2O.java b/h2o-core/src/main/java/water/H2O.java
index c4c4e5d89e4..e0bb8f9e94a 100644
--- a/h2o-core/src/main/java/water/H2O.java
+++ b/h2o-core/src/main/java/water/H2O.java
@@ -2,14 +2,21 @@
import com.brsanthu.googleanalytics.DefaultRequest;
import com.brsanthu.googleanalytics.GoogleAnalytics;
-
import jsr166y.CountedCompleter;
import jsr166y.ForkJoinPool;
import jsr166y.ForkJoinWorkerThread;
-
import org.apache.log4j.LogManager;
import org.apache.log4j.PropertyConfigurator;
import org.reflections.Reflections;
+import water.UDPRebooted.ShutdownTsk;
+import water.api.RequestServer;
+import water.exceptions.H2OFailException;
+import water.exceptions.H2OIllegalArgumentException;
+import water.init.*;
+import water.nbhm.NonBlockingHashMap;
+import water.parser.ParserService;
+import water.persist.PersistManager;
+import water.util.*;
import java.io.File;
import java.io.IOException;
@@ -17,42 +24,12 @@
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
-import java.net.Inet4Address;
-import java.net.Inet6Address;
-import java.net.InetAddress;
-import java.net.MulticastSocket;
-import java.net.NetworkInterface;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
+import java.net.*;
+import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
-import water.UDPRebooted.ShutdownTsk;
-import water.api.RequestServer;
-import water.exceptions.H2OFailException;
-import water.exceptions.H2OIllegalArgumentException;
-import water.init.AbstractBuildVersion;
-import water.init.AbstractEmbeddedH2OConfig;
-import water.init.JarHash;
-import water.init.NetworkInit;
-import water.init.NodePersistentStorage;
-import water.nbhm.NonBlockingHashMap;
-import water.parser.ParserService;
-import water.persist.PersistManager;
-import water.util.GAUtils;
-import water.util.Log;
-import water.util.NetworkUtils;
-import water.util.OSUtils;
-import water.util.PrettyPrint;
-
/**
* Start point for creating or joining an H2O Cloud.
*
@@ -242,6 +219,7 @@ public static void printHelp() {
/** -user_name=user_name; Set user name */
public String user_name = System.getProperty("user.name");
+
//-----------------------------------------------------------------------------------
// Node configuration
//-----------------------------------------------------------------------------------
@@ -746,24 +724,10 @@ public static void registerExtensions() {
// Disallow schemas whose parent is in another package because it takes ~4s to do the getSubTypesOf call.
String[] packages = new String[]{"water", "hex"};
-
- for (String pkg : packages) {
- Reflections reflections = new Reflections(pkg);
- for (Class registerClass : reflections.getSubTypesOf(water.AbstractH2OExtension.class)) {
- if (!Modifier.isAbstract(registerClass.getModifiers())) {
- try {
- Object instance = registerClass.newInstance();
- water.AbstractH2OExtension e = (water.AbstractH2OExtension) instance;
- H2O.addExtension(e);
- } catch (Exception e) {
- throw H2O.fail(e.toString());
- }
- }
- }
- }
-
- for (AbstractH2OExtension e : H2O.getExtensions()) {
+ ServiceLoader extensionsLoader = ServiceLoader.load(AbstractH2OExtension.class);
+ for (AbstractH2OExtension e : extensionsLoader) {
e.init();
+ extensions.add(e);
}
extensionsRegistered = true;
@@ -1867,6 +1831,11 @@ public static void main( String[] args ) {
// Start the local node. Needed before starting logging.
startLocalNode();
+ // Allow extensions to perform initialization that requires the network.
+ for (AbstractH2OExtension ext: extensions) {
+ ext.onLocalNodeStarted();
+ }
+
try {
String logDir = Log.getLogDir();
Log.info("Log dir: '" + logDir + "'");
diff --git a/h2o-core/src/main/java/water/api/JobsHandler.java b/h2o-core/src/main/java/water/api/JobsHandler.java
index cd444fbe18c..63f0d9dd87b 100644
--- a/h2o-core/src/main/java/water/api/JobsHandler.java
+++ b/h2o-core/src/main/java/water/api/JobsHandler.java
@@ -3,6 +3,8 @@
import water.*;
import water.api.schemas3.JobV3;
import water.api.schemas3.JobsV3;
+import water.api.schemas4.input.JobIV4;
+import water.api.schemas4.output.JobV4;
import water.exceptions.H2ONotFoundArgumentException;
public class JobsHandler extends Handler {
@@ -51,4 +53,32 @@ public JobsV3 cancel(int version, JobsV3 c) {
j.stop(); // Request Job stop
return c;
}
+
+
+ public static class FetchJob extends RestApiHandler {
+
+ @Override public String name() {
+ return "getJob4";
+ }
+
+ @Override public String help() {
+ return "Retrieve information about the current state of a job.";
+ }
+
+ @Override
+ public JobV4 exec(int ignored, JobIV4 input) {
+ Key key = Key.make(input.job_id);
+ Value val = DKV.get(key);
+ if (val == null)
+ throw new IllegalArgumentException("Job " + input.job_id + " is missing");
+ Iced iced = val.get();
+ if (!(iced instanceof Job))
+ throw new IllegalArgumentException("Id " + input.job_id + " references a " + iced.getClass() + " not a Job");
+
+ Job job = (Job) iced;
+ JobV4 out = new JobV4();
+ out.fillFromImpl(job);
+ return out;
+ }
+ }
}
diff --git a/h2o-core/src/main/java/water/api/RegisterV4Api.java b/h2o-core/src/main/java/water/api/RegisterV4Api.java
index 4ecffeeac45..83684f6f651 100644
--- a/h2o-core/src/main/java/water/api/RegisterV4Api.java
+++ b/h2o-core/src/main/java/water/api/RegisterV4Api.java
@@ -38,5 +38,8 @@ public void register(String relativeResourcePath) {
//------------ Frames ----------------------------------------------------------------------------------------------
registerEndpoint("POST /4/Frames/$simple", CreateFrameHandler.CreateSimpleFrame.class);
+
+ //------------ Jobs ------------------------------------------------------------------------------------------------
+ registerEndpoint("GET /4/jobs/{job_id}", JobsHandler.FetchJob.class);
}
}
diff --git a/h2o-core/src/main/java/water/api/schemas4/input/JobIV4.java b/h2o-core/src/main/java/water/api/schemas4/input/JobIV4.java
new file mode 100644
index 00000000000..f46104e4281
--- /dev/null
+++ b/h2o-core/src/main/java/water/api/schemas4/input/JobIV4.java
@@ -0,0 +1,15 @@
+package water.api.schemas4.input;
+
+import water.Iced;
+import water.api.API;
+import water.api.schemas4.InputSchemaV4;
+
+/**
+ * Input schema for the {@code "GET /4/jobs/{job_id}"} endpoint.
+ */
+public class JobIV4 extends InputSchemaV4 {
+
+ @API(help="Id of the job to fetch.")
+ public String job_id;
+
+}
diff --git a/h2o-core/src/main/java/water/api/schemas4/output/JobV4.java b/h2o-core/src/main/java/water/api/schemas4/output/JobV4.java
index c380ccf25f9..6669232fe30 100644
--- a/h2o-core/src/main/java/water/api/schemas4/output/JobV4.java
+++ b/h2o-core/src/main/java/water/api/schemas4/output/JobV4.java
@@ -1,10 +1,8 @@
package water.api.schemas4.output;
import water.Job;
-import water.Keyed;
import water.TypeMap;
import water.api.API;
-import water.api.schemas3.KeyV3;
import water.api.schemas4.OutputSchemaV4;
import java.io.PrintWriter;
@@ -14,10 +12,8 @@
/** Schema for a single Job. */
public class JobV4 extends OutputSchemaV4, JobV4> {
- // TODO: replace all KeyV3's with KeyV4's
-
- @API(help="Job key")
- public KeyV3.JobKeyV3 key;
+ @API(help="Job id")
+ public String job_id;
@API(help="Job status", values={"RUNNING", "DONE", "STOPPING", "CANCELLED", "FAILED"})
public Status status;
@@ -34,8 +30,11 @@
@API(help="Runtime in milliseconds")
public long duration;
- @API(help="Key of the target object (being created by this Job)")
- public KeyV3 dest;
+ @API(help="Id of the target object (being created by this Job)")
+ public String target_id;
+
+ @API(help="Type of the target: Frame, Model, etc.")
+ public String target_type;
@API(help="Exception message, if an exception occurred")
public String exception;
@@ -43,9 +42,6 @@
@API(help="Stacktrace")
public String stacktrace;
- @API(help="ready for view")
- public boolean ready_for_view;
-
public enum Status {
RUNNING, DONE, STOPPING, CANCELLED, FAILED
}
@@ -54,11 +50,10 @@
@Override public JobV4 fillFromImpl(Job> job) {
if (job == null) return this;
- key = new KeyV3.JobKeyV3(job._key);
+ job_id = job._key.toString();
progress = job.progress();
progress_msg = job.progress_msg();
duration = job.msec();
- ready_for_view = job.readyForView();
if (job.isRunning()) {
status = job.stop_requested()? Status.STOPPING : Status.RUNNING;
@@ -74,8 +69,9 @@
stacktrace = sw.toString();
}
- Keyed dest_type = (Keyed) TypeMap.theFreezable(job._typeid);
- dest = job._result == null ? null : KeyV3.make(dest_type.makeSchema(), job._result);
+ target_id = job._result == null || !job.readyForView()? null : job._result.toString();
+ target_type = TypeMap.theFreezable(job._typeid).getClass().getSimpleName();
+
return this;
}
diff --git a/h2o-core/src/main/java/water/init/NetworkInit.java b/h2o-core/src/main/java/water/init/NetworkInit.java
index b7b3ae461ca..e9a41205601 100644
--- a/h2o-core/src/main/java/water/init/NetworkInit.java
+++ b/h2o-core/src/main/java/water/init/NetworkInit.java
@@ -1,43 +1,21 @@
package water.init;
-import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.net.DatagramPacket;
-import java.net.Inet4Address;
-import java.net.Inet6Address;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.MulticastSocket;
-import java.net.NetworkInterface;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.SocketException;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.nio.channels.DatagramChannel;
-import java.nio.channels.ServerSocketChannel;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Enumeration;
-import java.util.HashSet;
-import java.util.List;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
import water.H2O;
import water.H2ONode;
import water.JettyHTTPD;
-import water.Paxos;
import water.util.Log;
import water.util.NetworkUtils;
import water.util.OSUtils;
+import java.io.*;
+import java.net.*;
+import java.nio.ByteBuffer;
+import java.nio.channels.DatagramChannel;
+import java.nio.channels.ServerSocketChannel;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
/**
* Data structure for holding network info specified by the user on the command line.
*/
@@ -457,6 +435,7 @@ public static void initializeNetworkSockets( ) {
apiSocket.close();
H2O.getJetty().start(H2O.ARGS.web_ip, H2O.API_PORT);
}
+
break;
} catch (Exception e) {
Log.trace("Cannot allocate API port " + H2O.API_PORT + " because of following exception: ", e);
diff --git a/h2o-docs/build.gradle b/h2o-docs/build.gradle
index e5a79460e71..591b941f1d8 100644
--- a/h2o-docs/build.gradle
+++ b/h2o-docs/build.gradle
@@ -3,7 +3,7 @@ description = "H2O Documentation"
apply plugin: 'java'
dependencies {
- compile project(":h2o-assembly")
+ compile project(":h2o-assemblies:main")
}
diff --git a/h2o-grpc/build.gradle b/h2o-grpc/build.gradle
new file mode 100644
index 00000000000..d34bdd4f2e8
--- /dev/null
+++ b/h2o-grpc/build.gradle
@@ -0,0 +1,131 @@
+import org.apache.tools.ant.taskdefs.condition.Os
+
+plugins {
+ id "com.google.protobuf" version "0.8.0"
+}
+
+apply plugin: 'java'
+apply plugin: 'com.google.protobuf'
+
+repositories {
+ mavenCentral()
+}
+
+def grpcVersion = '1.0.3'
+
+// Detect the correct version of python executable to use
+def pythonexe = "python"
+def pipexe = "pip"
+if (System.env.VIRTUAL_ENV) {
+ pythonexe = "${System.env.VIRTUAL_ENV}/bin/python"
+ pipexe = "${System.env.VIRTUAL_ENV}/bin/pip"
+}
+
+
+dependencies {
+ compile project(":h2o-core")
+ compile 'com.google.protobuf:protobuf-java:3.0.0'
+ compile "io.grpc:grpc-netty:${grpcVersion}"
+ compile "io.grpc:grpc-protobuf:${grpcVersion}"
+ compile "io.grpc:grpc-stub:${grpcVersion}"
+}
+
+
+//----------------------------------------------------------------------------------------------------------------------
+// Compile Java Protobuf+GRPC objects
+//----------------------------------------------------------------------------------------------------------------------
+
+protobuf {
+ // Configure the protoc executable (for compiling the *.proto files)
+ protoc {
+ artifact = 'com.google.protobuf:protoc:3.0.2'
+ }
+
+ // Configure the codegen plugins
+ plugins {
+ grpc {
+ artifact = "io.grpc:protoc-gen-grpc-java:${grpcVersion}"
+ }
+ }
+
+ generateProtoTasks {
+ ofSourceSet('main').each { task ->
+ task.builtins {
+ java {}
+ }
+ task.plugins {
+ // Add grpc output without any option. grpc must have been defined in the
+ // protobuf.plugins block.
+ grpc {}
+ }
+ }
+ }
+
+ generatedFilesBaseDir = "${projectDir}/proto-gen"
+}
+
+
+//----------------------------------------------------------------------------------------------------------------------
+// Compile Python Protobuf+GRPC classes
+//----------------------------------------------------------------------------------------------------------------------
+def pyProtoDir = "${projectDir}/proto-gen/main/python"
+
+static List getOsSpecificCommandLine(List args) {
+ return Os.isFamily(Os.FAMILY_WINDOWS) ? ['cmd', '/c'] + args : args
+}
+
+task installGrpciotoolsModule(type: Exec) {
+ doFirst {
+ standardOutput = new FileOutputStream("build/tmp/h2o-grpc_installGrpciotoolsModule.out")
+ }
+ commandLine getOsSpecificCommandLine([pipexe, "install", "grpcio-tools>=1.1.0"])
+}
+
+task makeProtoOutputDir(type: Exec) {
+ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
+ commandLine 'cmd', '/c', 'mkdir', pyProtoDir
+ } else {
+ commandLine 'mkdir', '-p', pyProtoDir
+ }
+}
+
+task generatePyProtoFiles(type: Exec) {
+ dependsOn installGrpciotoolsModule
+ dependsOn makeProtoOutputDir
+
+ commandLine getOsSpecificCommandLine([
+ pythonexe, "-m", "grpc_tools.protoc",
+ "-I${projectDir}/src/main/proto",
+ "--python_out=${pyProtoDir}",
+ "--grpc_python_out=${pyProtoDir}",
+ // TODO: auto-detect the list of files and process them all at once
+ "${projectDir}/src/main/proto/core/common.proto",
+ "${projectDir}/src/main/proto/core/job.proto"
+ ])
+}
+
+build.dependsOn generatePyProtoFiles
+
+
+//----------------------------------------------------------------------------------------------------------------------
+// Instruct IDEA to use the generated files as sources
+//----------------------------------------------------------------------------------------------------------------------
+
+idea {
+ module {
+ sourceDirs += file("${protobuf.generatedFilesBaseDir}/main/java");
+ sourceDirs += file("${protobuf.generatedFilesBaseDir}/main/grpc");
+ sourceDirs += file("${protobuf.generatedFilesBaseDir}/main/python");
+ }
+}
+
+
+//----------------------------------------------------------------------------------------------------------------------
+// Clean up
+//----------------------------------------------------------------------------------------------------------------------
+
+task cleanGeneratedProtoClasses(type: Delete) {
+ delete protobuf.generatedFilesBaseDir
+}
+
+clean.dependsOn cleanGeneratedProtoClasses
diff --git a/h2o-grpc/src/main/java/ai/h2o/api/GrpcExtension.java b/h2o-grpc/src/main/java/ai/h2o/api/GrpcExtension.java
new file mode 100644
index 00000000000..989102129e2
--- /dev/null
+++ b/h2o-grpc/src/main/java/ai/h2o/api/GrpcExtension.java
@@ -0,0 +1,81 @@
+package ai.h2o.api;
+
+import io.grpc.Server;
+import io.grpc.ServerBuilder;
+import water.AbstractH2OExtension;
+import water.H2O;
+import water.util.Log;
+
+import java.io.IOException;
+
+/**
+ */
+public class GrpcExtension extends AbstractH2OExtension {
+ private int grpcPort = 0; // 0 means that GRPC service should not start
+ private Server netty;
+
+ @Override
+ public String getExtensionName() {
+ return "GRPC";
+ }
+
+ @Override
+ public void printHelp() {
+ System.out.println(
+ "\nGRPC extension:\n" +
+ " -grpc_port\n" +
+ " Port number on which to start the GRPC service. If not \n" +
+ " specified, GRPC service will not be started."
+ );
+ }
+
+ @Override
+ public String[] parseArguments(String[] args) {
+ for (int i = 0; i < args.length - 1; i++) {
+ H2O.OptString s = new H2O.OptString(args[i]);
+ if (s.matches("grpc_port")) {
+ grpcPort = s.parseInt(args[i + 1]);
+ String[] new_args = new String[args.length - 2];
+ System.arraycopy(args, 0, new_args, 0, i);
+ System.arraycopy(args, i + 2, new_args, i, args.length - (i + 2));
+ return new_args;
+ }
+ }
+ return args;
+ }
+
+ @Override
+ public void validateArguments() {
+ if (grpcPort < 0 || grpcPort >= 65536) {
+ H2O.parseFailed("Invalid port number: " + grpcPort);
+ }
+ }
+
+ @Override
+ public void onLocalNodeStarted() {
+ if (grpcPort != 0) {
+ ServerBuilder sb = ServerBuilder.forPort(grpcPort);
+ RegisterGrpcApi.registerWithServer(sb);
+ netty = sb.build();
+ try {
+ Log.info("Starting GRPC server on 127.0.0.1:" + grpcPort);
+ netty.start();
+ } catch (IOException e) {
+ netty = null;
+ throw new RuntimeException("Failed to start the GRPC server on port " + grpcPort);
+ }
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+ @Override
+ public void run() {
+ if (netty != null) {
+ // Use stderr here since the logger may have been reset by its JVM shutdown hook.
+ System.err.println("*** shutting down gRPC server since JVM is shutting down");
+ netty.shutdown();
+ System.err.println("*** server shut down");
+ }
+ }
+ });
+ }
+ }
+
+}
diff --git a/h2o-grpc/src/main/java/ai/h2o/api/RegisterGrpcApi.java b/h2o-grpc/src/main/java/ai/h2o/api/RegisterGrpcApi.java
new file mode 100644
index 00000000000..1210f09f61c
--- /dev/null
+++ b/h2o-grpc/src/main/java/ai/h2o/api/RegisterGrpcApi.java
@@ -0,0 +1,14 @@
+package ai.h2o.api;
+
+import ai.h2o.api.proto.core.JobService;
+import io.grpc.ServerBuilder;
+
+
+/**
+ */
+abstract class RegisterGrpcApi {
+
+ static void registerWithServer(ServerBuilder sb) {
+ sb.addService(new JobService());
+ }
+}
diff --git a/h2o-grpc/src/main/java/ai/h2o/api/proto/core/GrpcCommon.java b/h2o-grpc/src/main/java/ai/h2o/api/proto/core/GrpcCommon.java
new file mode 100644
index 00000000000..23c744cabd8
--- /dev/null
+++ b/h2o-grpc/src/main/java/ai/h2o/api/proto/core/GrpcCommon.java
@@ -0,0 +1,43 @@
+package ai.h2o.api.proto.core;
+
+import com.google.protobuf.Message;
+import io.grpc.stub.StreamObserver;
+
+import java.lang.reflect.Method;
+
+/**
+ *
+ */
+public abstract class GrpcCommon {
+
+ public static void sendError(Throwable e, StreamObserver responseObserver, Class clz) {
+ try {
+ Method m = clz.getDeclaredMethod("newBuilder");
+ Message.Builder builder = (Message.Builder) m.invoke(null);
+
+ Method em = builder.getClass().getDeclaredMethod("setError", Error.class);
+ em.invoke(builder, buildError(e, 0));
+
+ responseObserver.onNext((T) builder.build());
+ responseObserver.onCompleted();
+ } catch (Exception e2) {
+ throw new RuntimeException(e2.getMessage(), e);
+ }
+ }
+
+
+ public static Error buildError(Throwable e, int depth) {
+ Error.Builder eb = Error.newBuilder();
+ eb.setMessage(e.getMessage());
+ StringBuilder sb = new StringBuilder();
+ for (StackTraceElement st: e.getStackTrace()) {
+ sb.append(st.toString()).append("\n");
+ }
+ eb.setStacktrace(sb.toString());
+ if (e.getCause() != null && depth < 3) {
+ eb.setCause(buildError(e.getCause(), depth + 1));
+ }
+ return eb.build();
+ }
+
+}
diff --git a/h2o-grpc/src/main/java/ai/h2o/api/proto/core/JobService.java b/h2o-grpc/src/main/java/ai/h2o/api/proto/core/JobService.java
new file mode 100644
index 00000000000..682215391a6
--- /dev/null
+++ b/h2o-grpc/src/main/java/ai/h2o/api/proto/core/JobService.java
@@ -0,0 +1,84 @@
+package ai.h2o.api.proto.core;
+
+import io.grpc.stub.StreamObserver;
+import water.*;
+
+import static ai.h2o.api.proto.core.JobInfo.Status.*;
+
+
+/**
+ */
+public class JobService extends JobGrpc.JobImplBase {
+
+ @Override
+ public void poll(JobId request, StreamObserver responseObserver) {
+ try {
+ water.Job job = resolveJob(request);
+ responseObserver.onNext(fillJobInfo(job));
+ responseObserver.onCompleted();
+ } catch (Throwable ex) {
+ GrpcCommon.sendError(ex, responseObserver, JobInfo.class);
+ }
+ }
+
+ @Override
+ public void cancel(JobId request, StreamObserver responseObserver) {
+ try {
+ water.Job job = resolveJob(request);
+ job.stop();
+ responseObserver.onNext(fillJobInfo(job));
+ responseObserver.onCompleted();
+ } catch (Throwable ex) {
+ GrpcCommon.sendError(ex, responseObserver, JobInfo.class);
+ }
+ }
+
+
+
+ //--------------------------------------------------------------------------------------------------------------------
+ // Helpers
+ //--------------------------------------------------------------------------------------------------------------------
+
+ private static water.Job resolveJob(JobId request) {
+ String strId = request.getJobId();
+ Value val = DKV.get(Key.make(strId));
+ if (val == null) {
+ throw new IllegalArgumentException("Job " + strId + " not found in the DKV");
+ }
+ Iced iced = val.get();
+ if (iced instanceof Job) {
+ return (water.Job) iced;
+ } else {
+ throw new IllegalArgumentException("Id " + strId + " does not reference a Job but a " + iced.getClass());
+ }
+ }
+
+
+ public static JobInfo fillJobInfo(water.Job job) {
+ JobInfo.Builder jb = JobInfo.newBuilder();
+ jb.setJobId(job._key.toString())
+ .setProgress(job.progress())
+ .setMessage(job.progress_msg())
+ .setDuration(job.msec());
+
+ if (job.isRunning()) {
+ jb.setStatus(job.stop_requested()? STOPPING : RUNNING);
+ } else {
+ jb.setStatus(job.stop_requested()? CANCELLED : DONE);
+ }
+
+ Throwable ex = job.ex();
+ if (ex != null) {
+ jb.setStatus(FAILED)
+ .setError(GrpcCommon.buildError(ex, 0));
+ }
+
+ if (job._result != null && !job.readyForView())
+ jb.setTargetId(job._result.toString());
+
+ String ttype = TypeMap.theFreezable(job._typeid).getClass().getSimpleName();
+ jb.setTargetType(JobInfo.TargetType.valueOf(ttype));
+
+ return jb.build();
+ }
+}
diff --git a/h2o-grpc/src/main/java/ai/h2o/api/proto/frames/CreateFrameService.java b/h2o-grpc/src/main/java/ai/h2o/api/proto/frames/CreateFrameService.java
new file mode 100644
index 00000000000..9aa00e80043
--- /dev/null
+++ b/h2o-grpc/src/main/java/ai/h2o/api/proto/frames/CreateFrameService.java
@@ -0,0 +1,55 @@
+package ai.h2o.api.proto.frames;
+
+
+import ai.h2o.api.proto.core.GrpcCommon;
+import ai.h2o.api.proto.core.JobInfo;
+import ai.h2o.api.proto.core.JobService;
+import hex.createframe.recipes.SimpleCreateFrameRecipe;
+import io.grpc.stub.StreamObserver;
+import water.Job;
+import water.Key;
+import water.fvec.Frame;
+
+/**
+ */
+public class CreateFrameService extends CreateFrameGrpc.CreateFrameImplBase {
+
+ @Override
+ public void simple(CreateFrameSimpleSpec request, StreamObserver responseObserver) {
+ try {
+ SimpleCreateFrameRecipe cf = new SimpleCreateFrameRecipe();
+ cf.dest = Key.make(request.getTargetId());
+ cf.seed = request.getSeed();
+ cf.nrows = request.getNrows();
+ cf.ncols_real = request.getNcolsReal();
+ cf.ncols_int = request.getNcolsInt();
+ cf.ncols_enum = request.getNcolsEnum();
+ cf.ncols_bool = request.getNcolsBool();
+ cf.ncols_str = request.getNcolsStr();
+ cf.ncols_time = request.getNcolsTime();
+ cf.real_lb = request.getRealLb();
+ cf.real_ub = request.getRealUb();
+ cf.int_lb = request.getIntLb();
+ cf.int_ub = request.getIntUb();
+ cf.enum_nlevels = request.getEnumNlevels();
+ cf.bool_p = request.getBoolP();
+ cf.time_lb = request.getTimeLb();
+ cf.time_ub = request.getTimeUb();
+ cf.str_length = request.getStrLength();
+ cf.missing_fraction = request.getMissingFraction();
+ cf.response_type = SimpleCreateFrameRecipe.ResponseType.valueOf(request.getResponseType().toString());
+ cf.response_lb = request.getResponseLb();
+ cf.response_ub = request.getResponseUb();
+ cf.response_p = request.getResponseP();
+ cf.response_nlevels = request.getResponseNlevels();
+
+ Job job = cf.exec();
+ responseObserver.onNext(JobService.fillJobInfo(job));
+ responseObserver.onCompleted();
+
+ } catch (Throwable ex) {
+ GrpcCommon.sendError(ex, responseObserver, JobInfo.class);
+ }
+ }
+
+}
diff --git a/h2o-grpc/src/main/proto/core/common.proto b/h2o-grpc/src/main/proto/core/common.proto
new file mode 100644
index 00000000000..1341724efbb
--- /dev/null
+++ b/h2o-grpc/src/main/proto/core/common.proto
@@ -0,0 +1,20 @@
+syntax = "proto3";
+
+package core;
+option java_package = "ai.h2o.api.proto.core";
+option java_multiple_files = true;
+
+
+message Empty {
+ // Empty message that can be reused for any RPC that takes / returns
+ // no values
+}
+
+
+message Error {
+ //
+ string message = 1;
+ string stacktrace = 2;
+ Error cause = 3;
+}
+
diff --git a/h2o-grpc/src/main/proto/core/create_frame.proto b/h2o-grpc/src/main/proto/core/create_frame.proto
new file mode 100644
index 00000000000..69ad8a91ce1
--- /dev/null
+++ b/h2o-grpc/src/main/proto/core/create_frame.proto
@@ -0,0 +1,81 @@
+syntax = "proto3";
+
+import "core/job.proto";
+
+option java_package = "ai.h2o.api.proto.frames";
+option java_multiple_files = true;
+
+package core;
+
+
+service CreateFrame {
+ rpc simple (CreateFrameSimpleSpec) returns (JobInfo);
+}
+
+
+message CreateFrameSimpleSpec {
+ enum ResponseType {
+ NONE = 0;
+ REAL = 1;
+ INT = 2;
+ BOOL = 3;
+ ENUM = 4;
+ TIME = 5;
+ }
+
+ // Id for the frame to be created
+ string target_id = 1;
+
+ // Seed for the random number generator. Providing same seed should produce exactly same frames
+ int64 seed = 2;
+
+ // Number of rows
+ int32 nrows = 3;
+
+ // Number of real-valued columns. Values in these columns will be uniformly distributed between
+ // real_lb and real_ub
+ int32 ncols_real = 4;
+
+ // Number of integer columns
+ int32 ncols_int = 5;
+
+ // Number of categorical (enum) columns
+ int32 ncols_enum = 6;
+
+ // Number of binary (boolean) columns
+ int32 ncols_bool = 7;
+
+ // Number of string columns
+ int32 ncols_str = 8;
+
+ // Number of columns of "time" type
+ int32 ncols_time = 9;
+
+ double real_lb = 10;
+ double real_ub = 11;
+ int32 int_lb = 12;
+ int32 int_ub = 13;
+ int32 enum_nlevels = 14;
+ float bool_p = 15;
+ int64 time_lb = 16;
+ int64 time_ub = 17;
+ int32 str_length = 18;
+
+ // Fraction of missing values
+ float missing_fraction = 19;
+
+ // Type of the response column to add
+ ResponseType response_type = 20;
+
+ // Lower bound for the response variable (real/int/time types)
+ double response_lb = 21;
+
+ // Upper bound for the response variable (real/int/time types)
+ double response_ub = 22;
+
+ // Frequency of 1s for the bool (binary) response column
+ double response_p = 23;
+
+ // Number of categorical levels for the enum response column
+ int32 response_nlevels = 24;
+}
diff --git a/h2o-grpc/src/main/proto/core/job.proto b/h2o-grpc/src/main/proto/core/job.proto
new file mode 100644
index 00000000000..faee5db8849
--- /dev/null
+++ b/h2o-grpc/src/main/proto/core/job.proto
@@ -0,0 +1,49 @@
+syntax = "proto3";
+
+import "core/common.proto";
+
+option java_package = "ai.h2o.api.proto.core";
+//option java_outer_classname = "JobProto";
+option java_multiple_files = true;
+
+package core;
+
+
+service Job {
+ rpc poll (JobId) returns (JobInfo);
+
+ rpc cancel(JobId) returns (JobInfo);
+}
+
+
+message JobId {
+ string job_id = 1;
+}
+
+message JobInfo {
+ Error error = 1;
+
+ enum Status {
+ RUNNING = 0;
+ STOPPING = 1;
+ DONE = 2;
+ CANCELLED = 3;
+ FAILED = 4;
+ }
+
+ enum TargetType {
+ FRAME = 0;
+ MODEL = 1;
+ }
+
+ string job_id = 2;
+ Status status = 3;
+ float progress = 4;
+ string message = 5;
+ string target_id = 6;
+ TargetType target_type = 7;
+
+ int64 start_time = 8;
+ int64 duration = 9;
+ repeated string warning = 10;
+}
diff --git a/h2o-grpc/src/main/resources/META-INF/services/water.AbstractH2OExtension b/h2o-grpc/src/main/resources/META-INF/services/water.AbstractH2OExtension
new file mode 100644
index 00000000000..9da5cadc194
--- /dev/null
+++ b/h2o-grpc/src/main/resources/META-INF/services/water.AbstractH2OExtension
@@ -0,0 +1 @@
+ai.h2o.api.GrpcExtension
diff --git a/h2o-hadoop/assemblyjar.gradle b/h2o-hadoop/assemblyjar.gradle
index ae5ee01d426..d47bef8910e 100644
--- a/h2o-hadoop/assemblyjar.gradle
+++ b/h2o-hadoop/assemblyjar.gradle
@@ -15,6 +15,8 @@ dependencies {
}
compile project(':h2o-hadoop:h2o-' + hadoopVersion)
compile project(':h2o-app')
+ compile project(":h2o-web")
+ compile project(":h2o-avro-parser")
// Include S3 persist layer
compile(project(":h2o-persist-s3"))
// Include HDFS persist layer
diff --git a/h2o-hadoop/driverjar.gradle b/h2o-hadoop/driverjar.gradle
index 21d00efc295..560bda5ec54 100644
--- a/h2o-hadoop/driverjar.gradle
+++ b/h2o-hadoop/driverjar.gradle
@@ -30,5 +30,7 @@ dependencies {
compile('org.apache.hadoop:hadoop-client:' + hadoopMavenArtifactVersion)
}
compile project(':h2o-app')
+ compile project(":h2o-web")
+ compile project(":h2o-avro-parser")
}
diff --git a/h2o-hadoop/h2o-mapreduce-generic/build.gradle b/h2o-hadoop/h2o-mapreduce-generic/build.gradle
index da5b7e9bbc2..d561dc64667 100644
--- a/h2o-hadoop/h2o-mapreduce-generic/build.gradle
+++ b/h2o-hadoop/h2o-mapreduce-generic/build.gradle
@@ -17,4 +17,6 @@ compileJava {
dependencies {
compile('org.apache.hadoop:hadoop-client:' + hadoopMavenArtifactVersion)
compile project(':h2o-app')
+ compile project(":h2o-web")
+ compile project(":h2o-avro-parser")
}
diff --git a/h2o-hadoop/h2o-yarn-generic/build.gradle b/h2o-hadoop/h2o-yarn-generic/build.gradle
index c438e459407..2e663683dd1 100644
--- a/h2o-hadoop/h2o-yarn-generic/build.gradle
+++ b/h2o-hadoop/h2o-yarn-generic/build.gradle
@@ -17,4 +17,6 @@ compileJava {
dependencies {
compile('org.apache.hadoop:hadoop-client:' + hadoopMavenArtifactVersion)
compile project(':h2o-app')
+ compile project(":h2o-web")
+ compile project(":h2o-avro-parser")
}
diff --git a/h2o-py/build.gradle b/h2o-py/build.gradle
index e2195237f99..e8932b0d4e4 100644
--- a/h2o-py/build.gradle
+++ b/h2o-py/build.gradle
@@ -1,4 +1,3 @@
-import org.apache.commons.io.output.ByteArrayOutputStream
import org.apache.tools.ant.taskdefs.condition.Os
import java.nio.file.FileSystems
import java.nio.file.Files
@@ -9,16 +8,20 @@ import static java.nio.file.StandardCopyOption.*;
defaultTasks 'build_python'
description = "H2O Python Package"
-dependencies {
- compile project(":h2o-assembly")
+java.lang.String pythonexe = "python"
+java.lang.String pipexe = "pip"
+if (System.env.VIRTUAL_ENV) {
+ pythonexe = "${System.env.VIRTUAL_ENV}/bin/python".toString()
+ pipexe = "${System.env.VIRTUAL_ENV}/bin/pip".toString()
}
-def getOS() {
- String os = [Os.FAMILY_WINDOWS, Os.FAMILY_MAC, Os.FAMILY_UNIX].find {String family -> Os.isFamily(family) }
- return os
+dependencies {
+ compile project(":h2o-assemblies:main")
}
-def getOsSpecificCommandLine(args) { return Os.isFamily(Os.FAMILY_WINDOWS) ? [ 'cmd', '/c' ] + args : args }
+static List getOsSpecificCommandLine(List args) {
+ return Os.isFamily(Os.FAMILY_WINDOWS) ? [ 'cmd', '/c' ] + args : args
+}
def bv = new H2OBuildVersion(rootDir, version)
@@ -27,19 +30,20 @@ ext {
T = getProjectDir().toString()
}
+
task makeOutputDir(type: Exec) {
- if(Os.isFamily(Os.FAMILY_WINDOWS)){
+ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
commandLine getOsSpecificCommandLine(['if not exist "build\\tmp" mkdir', 'build\\tmp'])
} else {
commandLine getOsSpecificCommandLine(['mkdir', '-p', 'build/tmp'])
}
}
+
task setProjectVersion << {
File INIT = new File([T, "h2o", "__init__.py"].join(File.separator))
println " INIT.path = " + INIT.path
- def txt=""
- txt = INIT.text
+ def txt = INIT.text
txt = txt.replaceAll("SUBST_PROJECT_VERSION", PROJECT_VERSION)
INIT.write(txt)
}
@@ -47,35 +51,45 @@ task setProjectVersion << {
task resetProjectVersion << {
File INIT = new File([T, "h2o", "__init__.py"].join(File.separator))
println " INIT.path = " + INIT.path
- def txt=""
- txt = INIT.text
+ def txt = INIT.text
txt = txt.replaceAll(PROJECT_VERSION, "SUBST_PROJECT_VERSION")
INIT.write(txt)
}
-task upgradeOrInstallTabulate(type: Exec) {
- commandLine getOsSpecificCommandLine(["pip", "install", "tabulate", "--user", "--upgrade"])
+task installPythonDependencies(type: Exec) {
+ doFirst {
+ standardOutput = new FileOutputStream("build/tmp/h2o-py_installPythonDependencies.out")
+ }
+ commandLine getOsSpecificCommandLine([
+ pipexe, "install",
+ "tabulate>=0.7.5",
+ "requests>=2.10",
+ "colorama>=0.3.7",
+ "future>=0.15.2"
+ ])
}
-task upgradeOrInstallWheel(type: Exec) {
- commandLine getOsSpecificCommandLine(["pip", "install", "wheel", "--user", "--upgrade", "--ignore-installed"])
-}
-task buildDist(type: Exec, dependsOn: makeOutputDir) {
+
+task buildDist(type: Exec) {
+ dependsOn installPythonDependencies
+ dependsOn setProjectVersion
+ dependsOn makeOutputDir
+
doFirst {
standardOutput = new FileOutputStream("build/tmp/h2o-py_buildDist.out")
}
- commandLine getOsSpecificCommandLine(["python", "setup.py", "bdist_wheel"])
+ commandLine getOsSpecificCommandLine([pythonexe, "setup.py", "bdist_wheel"])
}
-def testsPath = new File("./tests")
+File testsPath = new File("./tests")
task smokeTest(type: Exec) {
dependsOn build
println "PyUnit smoke test (run.py --wipeall --testsize s)..."
workingDir testsPath
commandLine 'pwd'
- def args = ['python', '../../scripts/run.py', '--wipeall', '--testsize', 's']
+ List args = [pythonexe, '../../scripts/run.py', '--wipeall', '--testsize', 's']
if (project.hasProperty("jacocoCoverage")) {
args << '--jacoco'
}
@@ -100,10 +114,6 @@ task cleaner << {
}
clean.dependsOn cleaner, cleanUpSmokeTest
-upgradeOrInstallWheel.dependsOn cleaner
-// buildDist.dependsOn upgradeOrInstallTabulate
-// buildDist.dependsOn upgradeOrInstallWheel
-buildDist.dependsOn setProjectVersion
resetProjectVersion.dependsOn buildDist
task build_python(dependsOn: resetProjectVersion)
build.dependsOn build_python
diff --git a/h2o-py/h2o/h2o.py b/h2o-py/h2o/h2o.py
index 84fa11b0bce..117f6ba590f 100644
--- a/h2o-py/h2o/h2o.py
+++ b/h2o-py/h2o/h2o.py
@@ -105,11 +105,11 @@ def connection():
def version_check():
"""Used to verify that h2o-python module and the H2O server are compatible with each other."""
+ from .__init__ import __version__ as ver_pkg
ci = h2oconn.cluster
if not ci:
raise H2OConnectionError("Connection not initialized. Did you run h2o.connect()?")
ver_h2o = ci.version
- from .__init__ import __version__ as ver_pkg
if ver_pkg == "SUBST_PROJECT_VERSION": ver_pkg = "UNKNOWN"
if str(ver_h2o) != str(ver_pkg):
branch_name_h2o = ci.branch_name
diff --git a/h2o-r/build.gradle b/h2o-r/build.gradle
index 1f7d629378a..594bb7e4970 100644
--- a/h2o-r/build.gradle
+++ b/h2o-r/build.gradle
@@ -12,7 +12,7 @@ description = "H2O R Package"
//apply plugin: 'water.gradle.plugins.manageLocalClouds'
dependencies {
- compile project(":h2o-assembly")
+ compile project(":h2o-assemblies:main")
}
def getOS() {
diff --git a/h2o-test-integ/build.gradle b/h2o-test-integ/build.gradle
index b640254db7e..b0c84fb6e62 100644
--- a/h2o-test-integ/build.gradle
+++ b/h2o-test-integ/build.gradle
@@ -1,7 +1,7 @@
description = "H2O Integration Testing"
dependencies {
- compile project(":h2o-assembly")
+ compile project(":h2o-assemblies:main")
}
def runner = new File("$rootDir/scripts/run.py").canonicalPath
diff --git a/scripts/jenkins/PR_py3unit_small.sh b/scripts/jenkins/PR_py3unit_small.sh
index 893533402f1..4d22d619046 100644
--- a/scripts/jenkins/PR_py3unit_small.sh
+++ b/scripts/jenkins/PR_py3unit_small.sh
@@ -50,6 +50,7 @@ echo "* Activating Python virtualenv"
echo "*********************************************"
echo ""
source $WORKSPACE/../h2o_venv3/bin/activate
+pip install --upgrade pip
# Use the Jenkins-user shared R library; already sync'd no need to sync again
export R_LIBS_USER=${WORKSPACE}/../Rlibrary
diff --git a/scripts/jenkins/PR_pybooklets.sh b/scripts/jenkins/PR_pybooklets.sh
index 6300f9eb921..69c271587ac 100644
--- a/scripts/jenkins/PR_pybooklets.sh
+++ b/scripts/jenkins/PR_pybooklets.sh
@@ -49,6 +49,7 @@ echo "* Activating Python virtualenv"
echo "*********************************************"
echo ""
source $WORKSPACE/../h2o_venv/bin/activate
+pip install --upgrade pip
# Use the Jenkins-user shared R library; already sync'd no need to sync again
export R_LIBS_USER=${WORKSPACE}/../Rlibrary
diff --git a/scripts/jenkins/PR_python_demos.sh b/scripts/jenkins/PR_python_demos.sh
index 1c9ec523e69..7cddf1a58af 100644
--- a/scripts/jenkins/PR_python_demos.sh
+++ b/scripts/jenkins/PR_python_demos.sh
@@ -49,6 +49,7 @@ echo "* Activating Python virtualenv"
echo "*********************************************"
echo ""
source $WORKSPACE/../h2o_venv/bin/activate
+pip install --upgrade pip
python --version
diff --git a/scripts/jenkins/PR_pyunit_medium_large.sh b/scripts/jenkins/PR_pyunit_medium_large.sh
index 68f9303aea6..fc11e68f4b6 100644
--- a/scripts/jenkins/PR_pyunit_medium_large.sh
+++ b/scripts/jenkins/PR_pyunit_medium_large.sh
@@ -49,6 +49,7 @@ echo "* Activating Python virtualenv"
echo "*********************************************"
echo ""
source $WORKSPACE/../h2o_venv/bin/activate
+pip install --upgrade pip
# Use the Jenkins-user shared R library; already sync'd no need to sync again
export R_LIBS_USER=${WORKSPACE}/../Rlibrary
diff --git a/scripts/jenkins/PR_pyunit_small.sh b/scripts/jenkins/PR_pyunit_small.sh
index c3bb7f47bf3..da0d49d45ad 100644
--- a/scripts/jenkins/PR_pyunit_small.sh
+++ b/scripts/jenkins/PR_pyunit_small.sh
@@ -53,6 +53,7 @@ echo "* Activating Python virtualenv"
echo "*********************************************"
echo ""
source $WORKSPACE/../h2o_venv/bin/activate
+pip install --upgrade pip
which python
which pip
diff --git a/scripts/jenkins/PR_pyunit_small_J6.sh b/scripts/jenkins/PR_pyunit_small_J6.sh
index fd45510c9ee..2e6a4a0dd72 100644
--- a/scripts/jenkins/PR_pyunit_small_J6.sh
+++ b/scripts/jenkins/PR_pyunit_small_J6.sh
@@ -53,6 +53,7 @@ echo "* Activating Python virtualenv"
echo "*********************************************"
echo ""
source $WORKSPACE/../h2o_venv/bin/activate
+pip install --upgrade pip
# Use the Jenkins-user shared R library; already sync'd no need to sync again
export R_LIBS_USER=${WORKSPACE}/../Rlibrary
diff --git a/scripts/jenkins/PR_smoke_pyunit.sh b/scripts/jenkins/PR_smoke_pyunit.sh
index 903dcba1c0c..204069233a8 100644
--- a/scripts/jenkins/PR_smoke_pyunit.sh
+++ b/scripts/jenkins/PR_smoke_pyunit.sh
@@ -50,6 +50,7 @@ echo "* Activating Python virtualenv"
echo "*********************************************"
echo ""
source $WORKSPACE/../h2o_venv/bin/activate
+pip install --upgrade pip
# Use the Jenkins-user shared R library; already sync'd no need to sync again
export R_LIBS_USER=${WORKSPACE}/../Rlibrary
diff --git a/scripts/jenkins/PR_startup_checks.sh b/scripts/jenkins/PR_startup_checks.sh
index f2b64eef616..a41865eee76 100644
--- a/scripts/jenkins/PR_startup_checks.sh
+++ b/scripts/jenkins/PR_startup_checks.sh
@@ -51,6 +51,9 @@ echo "*********************************************"
echo ""
virtualenv $WORKSPACE/h2o_venv --python=python2.7
source $WORKSPACE/h2o_venv/bin/activate
+pip install --upgrade pip
+
+# This should be done in gradle...
pip install numpy --upgrade
pip install scipy --upgrade
pip install -r h2o-py/requirements.txt
diff --git a/settings.gradle b/settings.gradle
index 64589daa7c3..732b19020c7 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -6,7 +6,8 @@ include 'h2o-web'
include 'h2o-app'
include 'h2o-r'
include 'h2o-py'
-include 'h2o-assembly'
+include 'h2o-assemblies:main'
+include 'h2o-assemblies:py2o'
include 'h2o-persist-hdfs'
include 'h2o-persist-s3'
include 'h2o-docs'
@@ -19,6 +20,7 @@ include 'h2o-test-accuracy'
include 'h2o-avro-parser'
include 'h2o-orc-parser'
include 'h2o-parquet-parser'
+include 'h2o-grpc'
// Reconfigure scala projects to support cross compilation
// The following code will create two projects for each included item: