diff --git a/.gitignore b/.gitignore
index 3efc25aafb112ede07b130e699e8fcde7fed58fa..63332d1b16bf9c1c72fccf477a1e77875d6c0919 100644
--- a/.gitignore
+++ b/.gitignore
@@ -17,3 +17,5 @@ coverage
 # cache for run_tests.py
 .run_tests_cache
 
+# emacs temp files
+*~
\ No newline at end of file
diff --git a/src/node/common.js b/src/node/common.js
index 656a4aca9534de414b978eba9d56738c72a3aab1..54247e3fa119280a14a7f7b6557ab631de4bed72 100644
--- a/src/node/common.js
+++ b/src/node/common.js
@@ -31,6 +31,8 @@
  *
  */
 
+var capitalize = require('underscore.string/capitalize');
+
 /**
  * Get a function that deserializes a specific type of protobuf.
  * @param {function()} cls The constructor of the message type to deserialize
@@ -73,6 +75,9 @@ function fullyQualifiedName(value) {
     return '';
   }
   var name = value.name;
+  if (value.className === 'Service.RPCMethod') {
+    name = capitalize(name);
+  }
   if (value.hasOwnProperty('parent')) {
     var parent_name = fullyQualifiedName(value.parent);
     if (parent_name !== '') {
diff --git a/src/node/examples/math_server.js b/src/node/examples/math_server.js
index 366513dc17db261e4daf94c3495ae30cfdcc612f..d649b4fd6d0749ad74c2ce319c9bcf0941843f17 100644
--- a/src/node/examples/math_server.js
+++ b/src/node/examples/math_server.js
@@ -119,10 +119,10 @@ function mathDivMany(stream) {
 
 var server = new Server({
   'math.Math' : {
-    Div: mathDiv,
-    Fib: mathFib,
-    Sum: mathSum,
-    DivMany: mathDivMany
+    div: mathDiv,
+    fib: mathFib,
+    sum: mathSum,
+    divMany: mathDivMany
   }
 });
 
diff --git a/src/node/interop/empty.proto b/src/node/interop/empty.proto
new file mode 100644
index 0000000000000000000000000000000000000000..c9920a22eec0801bc1d8ce4e3c027253358936eb
--- /dev/null
+++ b/src/node/interop/empty.proto
@@ -0,0 +1,19 @@
+syntax = "proto2";
+
+package grpc.testing;
+
+// An empty message that you can re-use to avoid defining duplicated empty
+// messages in your project. A typical example is to use it as argument or the
+// return value of a service API. For instance:
+//
+//   service Foo {
+//     rpc Bar (grpc.testing.Empty) returns (grpc.testing.Empty) { };
+//   };
+//
+// MOE:begin_strip
+// The difference between this one and net/rpc/empty-message.proto is that
+// 1) The generated message here is in proto2 C++ API.
+// 2) The proto2.Empty has minimum dependencies
+//    (no message_set or net/rpc dependencies)
+// MOE:end_strip
+message Empty {}
diff --git a/src/node/interop/interop_client.js b/src/node/interop/interop_client.js
new file mode 100644
index 0000000000000000000000000000000000000000..cf75b9a77ac91e419d835652af1cdbe8b6e7cecb
--- /dev/null
+++ b/src/node/interop/interop_client.js
@@ -0,0 +1,274 @@
+/*
+ *
+ * Copyright 2014, Google Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+var fs = require('fs');
+var path = require('path');
+var grpc = require('..');
+var testProto = grpc.load(__dirname + '/test.proto').grpc.testing;
+
+var assert = require('assert');
+
+/**
+ * Create a buffer filled with size zeroes
+ * @param {number} size The length of the buffer
+ * @return {Buffer} The new buffer
+ */
+function zeroBuffer(size) {
+  var zeros = new Buffer(size);
+  zeros.fill(0);
+  return zeros;
+}
+
+/**
+ * Run the empty_unary test
+ * @param {Client} client The client to test against
+ * @param {function} done Callback to call when the test is completed. Included
+ *     primarily for use with mocha
+ */
+function emptyUnary(client, done) {
+  var call = client.emptyCall({}, function(err, resp) {
+    assert.ifError(err);
+  });
+  call.on('status', function(status) {
+    assert.strictEqual(status.code, grpc.status.OK);
+    if (done) {
+      done();
+    }
+  });
+}
+
+/**
+ * Run the large_unary test
+ * @param {Client} client The client to test against
+ * @param {function} done Callback to call when the test is completed. Included
+ *     primarily for use with mocha
+ */
+function largeUnary(client, done) {
+  var arg = {
+    response_type: testProto.PayloadType.COMPRESSABLE,
+    response_size: 314159,
+    payload: {
+      body: zeroBuffer(271828)
+    }
+  };
+  var call = client.unaryCall(arg, function(err, resp) {
+    assert.ifError(err);
+    assert.strictEqual(resp.payload.type, testProto.PayloadType.COMPRESSABLE);
+    assert.strictEqual(resp.payload.body.limit - resp.payload.body.offset,
+                       314159);
+  });
+  call.on('status', function(status) {
+    assert.strictEqual(status.code, grpc.status.OK);
+    if (done) {
+      done();
+    }
+  });
+}
+
+/**
+ * Run the client_streaming test
+ * @param {Client} client The client to test against
+ * @param {function} done Callback to call when the test is completed. Included
+ *     primarily for use with mocha
+ */
+function clientStreaming(client, done) {
+  var call = client.streamingInputCall(function(err, resp) {
+    assert.ifError(err);
+    assert.strictEqual(resp.aggregated_payload_size, 74922);
+  });
+  call.on('status', function(status) {
+    assert.strictEqual(status.code, grpc.status.OK);
+    if (done) {
+      done();
+    }
+  });
+  var payload_sizes = [27182, 8, 1828, 45904];
+  for (var i = 0; i < payload_sizes.length; i++) {
+    call.write({payload: {body: zeroBuffer(payload_sizes[i])}});
+  }
+  call.end();
+}
+
+/**
+ * Run the server_streaming test
+ * @param {Client} client The client to test against
+ * @param {function} done Callback to call when the test is completed. Included
+ *     primarily for use with mocha
+ */
+function serverStreaming(client, done) {
+  var arg = {
+    response_type: testProto.PayloadType.COMPRESSABLE,
+    response_parameters: [
+      {size: 31415},
+      {size: 9},
+      {size: 2653},
+      {size: 58979}
+    ]
+  };
+  var call = client.streamingOutputCall(arg);
+  var resp_index = 0;
+  call.on('data', function(value) {
+    assert(resp_index < 4);
+    assert.strictEqual(value.payload.type, testProto.PayloadType.COMPRESSABLE);
+    assert.strictEqual(value.payload.body.limit - value.payload.body.offset,
+                       arg.response_parameters[resp_index].size);
+    resp_index += 1;
+  });
+  call.on('status', function(status) {
+    assert.strictEqual(resp_index, 4);
+    assert.strictEqual(status.code, grpc.status.OK);
+    if (done) {
+      done();
+    }
+  });
+}
+
+/**
+ * Run the ping_pong test
+ * @param {Client} client The client to test against
+ * @param {function} done Callback to call when the test is completed. Included
+ *     primarily for use with mocha
+ */
+function pingPong(client, done) {
+  var payload_sizes = [27182, 8, 1828, 45904];
+  var response_sizes = [31415, 9, 2653, 58979];
+  var call = client.fullDuplexCall();
+  call.on('status', function(status) {
+    assert.strictEqual(status.code, grpc.status.OK);
+    if (done) {
+      done();
+    }
+  });
+  var index = 0;
+  call.write({
+      response_type: testProto.PayloadType.COMPRESSABLE,
+      response_parameters: [
+        {size: response_sizes[index]}
+      ],
+      payload: {body: zeroBuffer(payload_sizes[index])}
+  });
+  call.on('data', function(response) {
+    assert.strictEqual(response.payload.type,
+                       testProto.PayloadType.COMPRESSABLE);
+    assert.equal(response.payload.body.limit - response.payload.body.offset,
+                 response_sizes[index]);
+    index += 1;
+    if (index == 4) {
+      call.end();
+    } else {
+      call.write({
+        response_type: testProto.PayloadType.COMPRESSABLE,
+        response_parameters: [
+          {size: response_sizes[index]}
+        ],
+        payload: {body: zeroBuffer(payload_sizes[index])}
+      });
+    }
+  });
+}
+
+/**
+ * Run the empty_stream test.
+ * NOTE: This does not work, but should with the new invoke API
+ * @param {Client} client The client to test against
+ * @param {function} done Callback to call when the test is completed. Included
+ *     primarily for use with mocha
+ */
+function emptyStream(client, done) {
+  var call = client.fullDuplexCall();
+  call.on('status', function(status) {
+    assert.strictEqual(status.code, grpc.status.OK);
+    if (done) {
+      done();
+    }
+  });
+  call.on('data', function(value) {
+    assert.fail(value, null, 'No data should have been received', '!==');
+  });
+  call.end();
+}
+
+/**
+ * Map from test case names to test functions
+ */
+var test_cases = {
+  empty_unary: emptyUnary,
+  large_unary: largeUnary,
+  client_streaming: clientStreaming,
+  server_streaming: serverStreaming,
+  ping_pong: pingPong,
+  empty_stream: emptyStream
+};
+
+/**
+ * Execute a single test case.
+ * @param {string} address The address of the server to connect to, in the
+ *     format "hostname:port"
+ * @param {string} host_overrirde The hostname of the server to use as an SSL
+ *     override
+ * @param {string} test_case The name of the test case to run
+ * @param {bool} tls Indicates that a secure channel should be used
+ * @param {function} done Callback to call when the test is completed. Included
+ *     primarily for use with mocha
+ */
+function runTest(address, host_override, test_case, tls, done) {
+  // TODO(mlumish): enable TLS functionality
+  var options = {};
+  if (tls) {
+    var ca_path = path.join(__dirname, '../test/data/ca.pem');
+    var ca_data = fs.readFileSync(ca_path);
+    var creds = grpc.Credentials.createSsl(ca_data);
+    options.credentials = creds;
+    if (host_override) {
+      options['grpc.ssl_target_name_override'] = host_override;
+    }
+  }
+  var client = new testProto.TestService(address, options);
+
+  test_cases[test_case](client, done);
+}
+
+if (require.main === module) {
+  var parseArgs = require('minimist');
+  var argv = parseArgs(process.argv, {
+    string: ['server_host', 'server_host_override', 'server_port', 'test_case',
+             'use_tls', 'use_test_ca']
+  });
+  runTest(argv.server_host + ':' + argv.server_port, argv.server_host_override,
+          argv.test_case, argv.use_tls === 'true');
+}
+
+/**
+ * See docs for runTest
+ */
+exports.runTest = runTest;
diff --git a/src/node/interop/interop_server.js b/src/node/interop/interop_server.js
new file mode 100644
index 0000000000000000000000000000000000000000..735b7a6d18be4eb808814d25dd76bf68409ad2e1
--- /dev/null
+++ b/src/node/interop/interop_server.js
@@ -0,0 +1,202 @@
+/*
+ *
+ * Copyright 2014, Google Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+var fs = require('fs');
+var path = require('path');
+var _ = require('underscore');
+var grpc = require('..');
+var testProto = grpc.load(__dirname + '/test.proto').grpc.testing;
+var Server = grpc.buildServer([testProto.TestService.service]);
+
+/**
+ * Create a buffer filled with size zeroes
+ * @param {number} size The length of the buffer
+ * @return {Buffer} The new buffer
+ */
+function zeroBuffer(size) {
+  var zeros = new Buffer(size);
+  zeros.fill(0);
+  return zeros;
+}
+
+/**
+ * Respond to an empty parameter with an empty response.
+ * NOTE: this currently does not work due to issue #137
+ * @param {Call} call Call to handle
+ * @param {function(Error, Object)} callback Callback to call with result
+ *     or error
+ */
+function handleEmpty(call, callback) {
+  callback(null, {});
+}
+
+/**
+ * Handle a unary request by sending the requested payload
+ * @param {Call} call Call to handle
+ * @param {function(Error, Object)} callback Callback to call with result or
+ *     error
+ */
+function handleUnary(call, callback) {
+  var req = call.request;
+  var zeros = zeroBuffer(req.response_size);
+  var payload_type = req.response_type;
+  if (payload_type === testProto.PayloadType.RANDOM) {
+    payload_type = [
+      testProto.PayloadType.COMPRESSABLE,
+      testProto.PayloadType.UNCOMPRESSABLE][Math.random() < 0.5 ? 0 : 1];
+  }
+  callback(null, {payload: {type: payload_type, body: zeros}});
+}
+
+/**
+ * Respond to a streaming call with the total size of all payloads
+ * @param {Call} call Call to handle
+ * @param {function(Error, Object)} callback Callback to call with result or
+ *     error
+ */
+function handleStreamingInput(call, callback) {
+  var aggregate_size = 0;
+  call.on('data', function(value) {
+    aggregate_size += value.payload.body.limit - value.payload.body.offset;
+  });
+  call.on('end', function() {
+    callback(null, {aggregated_payload_size: aggregate_size});
+  });
+}
+
+/**
+ * Respond to a payload request with a stream of the requested payloads
+ * @param {Call} call Call to handle
+ */
+function handleStreamingOutput(call) {
+  var req = call.request;
+  var payload_type = req.response_type;
+  if (payload_type === testProto.PayloadType.RANDOM) {
+    payload_type = [
+      testProto.PayloadType.COMPRESSABLE,
+      testProto.PayloadType.UNCOMPRESSABLE][Math.random() < 0.5 ? 0 : 1];
+  }
+  _.each(req.response_parameters, function(resp_param) {
+    call.write({
+      payload: {
+        body: zeroBuffer(resp_param.size),
+        type: payload_type
+      }
+    });
+  });
+  call.end();
+}
+
+/**
+ * Respond to a stream of payload requests with a stream of payload responses as
+ * they arrive.
+ * @param {Call} call Call to handle
+ */
+function handleFullDuplex(call) {
+  call.on('data', function(value) {
+    var payload_type = value.response_type;
+    if (payload_type === testProto.PayloadType.RANDOM) {
+      payload_type = [
+        testProto.PayloadType.COMPRESSABLE,
+        testProto.PayloadType.UNCOMPRESSABLE][Math.random() < 0.5 ? 0 : 1];
+    }
+    _.each(value.response_parameters, function(resp_param) {
+      call.write({
+        payload: {
+          body: zeroBuffer(resp_param.size),
+          type: payload_type
+        }
+      });
+    });
+  });
+  call.on('end', function() {
+    call.end();
+  });
+}
+
+/**
+ * Respond to a stream of payload requests with a stream of payload responses
+ * after all requests have arrived
+ * @param {Call} call Call to handle
+ */
+function handleHalfDuplex(call) {
+  throw new Error('HalfDuplexCall not yet implemented');
+}
+
+/**
+ * Get a server object bound to the given port
+ * @param {string} port Port to which to bind
+ * @param {boolean} tls Indicates that the bound port should use TLS
+ * @return {Server} Server object bound to the support
+ */
+function getServer(port, tls) {
+  // TODO(mlumish): enable TLS functionality
+  var options = {};
+  if (tls) {
+    var key_path = path.join(__dirname, '../test/data/server1.key');
+    var pem_path = path.join(__dirname, '../test/data/server1.pem');
+
+    var key_data = fs.readFileSync(key_path);
+    var pem_data = fs.readFileSync(pem_path);
+    var server_creds = grpc.ServerCredentials.createSsl(null,
+                                                        key_data,
+                                                        pem_data);
+    options.credentials = server_creds;
+  }
+  var server = new Server({
+    'grpc.testing.TestService' : {
+      emptyCall: handleEmpty,
+      unaryCall: handleUnary,
+      streamingOutputCall: handleStreamingOutput,
+      streamingInputCall: handleStreamingInput,
+      fullDuplexCall: handleFullDuplex,
+      halfDuplexCall: handleHalfDuplex
+    }
+  }, options);
+  server.bind('0.0.0.0:' + port, tls);
+  return server;
+}
+
+if (require.main === module) {
+  var parseArgs = require('minimist');
+  var argv = parseArgs(process.argv, {
+    string: ['port', 'use_tls']
+  });
+  var server = getServer(argv.port, argv.use_tls === 'true');
+  server.start();
+}
+
+/**
+ * See docs for getServer
+ */
+exports.getServer = getServer;
diff --git a/src/node/interop/messages.proto b/src/node/interop/messages.proto
new file mode 100644
index 0000000000000000000000000000000000000000..29db0dd8b1aa0f4445bfc8b85e90d5304c9d2160
--- /dev/null
+++ b/src/node/interop/messages.proto
@@ -0,0 +1,94 @@
+// Message definitions to be used by integration test service definitions.
+
+syntax = "proto2";
+
+package grpc.testing;
+
+// The type of payload that should be returned.
+enum PayloadType {
+  // Compressable text format.
+  COMPRESSABLE = 0;
+
+  // Uncompressable binary format.
+  UNCOMPRESSABLE = 1;
+
+  // Randomly chosen from all other formats defined in this enum.
+  RANDOM = 2;
+}
+
+// A block of data, to simply increase gRPC message size.
+message Payload {
+  // The type of data in body.
+  optional PayloadType type = 1;
+  // Primary contents of payload.
+  optional bytes body = 2;
+}
+
+// Unary request.
+message SimpleRequest {
+  // Desired payload type in the response from the server.
+  // If response_type is RANDOM, server randomly chooses one from other formats.
+  optional PayloadType response_type = 1;
+
+  // Desired payload size in the response from the server.
+  // If response_type is COMPRESSABLE, this denotes the size before compression.
+  optional int32 response_size = 2;
+
+  // Optional input payload sent along with the request.
+  optional Payload payload = 3;
+}
+
+// Unary response, as configured by the request.
+message SimpleResponse {
+  // Payload to increase message size.
+  optional Payload payload = 1;
+  // The user the request came from, for verifying authentication was
+  // successful when the client expected it.
+  optional int64 effective_gaia_user_id = 2;
+}
+
+// Client-streaming request.
+message StreamingInputCallRequest {
+  // Optional input payload sent along with the request.
+  optional Payload payload = 1;
+
+  // Not expecting any payload from the response.
+}
+
+// Client-streaming response.
+message StreamingInputCallResponse {
+  // Aggregated size of payloads received from the client.
+  optional int32 aggregated_payload_size = 1;
+}
+
+// Configuration for a particular response.
+message ResponseParameters {
+  // Desired payload sizes in responses from the server.
+  // If response_type is COMPRESSABLE, this denotes the size before compression.
+  optional int32 size = 1;
+
+  // Desired interval between consecutive responses in the response stream in
+  // microseconds.
+  optional int32 interval_us = 2;
+}
+
+// Server-streaming request.
+message StreamingOutputCallRequest {
+  // Desired payload type in the response from the server.
+  // If response_type is RANDOM, the payload from each response in the stream
+  // might be of different types. This is to simulate a mixed type of payload
+  // stream.
+  optional PayloadType response_type = 1;
+
+  // Configuration for each expected response message.
+  repeated ResponseParameters response_parameters = 2;
+
+  // Optional input payload sent along with the request.
+  optional Payload payload = 3;
+}
+
+// Server-streaming response, as configured by the request and parameters.
+message StreamingOutputCallResponse {
+  // Payload to increase response size.
+  optional Payload payload = 1;
+}
diff --git a/src/node/interop/test.proto b/src/node/interop/test.proto
new file mode 100644
index 0000000000000000000000000000000000000000..8380ebb31de74fc7e68a83ac5fc7dbd4485372fb
--- /dev/null
+++ b/src/node/interop/test.proto
@@ -0,0 +1,42 @@
+// An integration test service that covers all the method signature permutations
+// of unary/streaming requests/responses.
+syntax = "proto2";
+
+import "empty.proto";
+import "messages.proto";
+
+package grpc.testing;
+
+// A simple service to test the various types of RPCs and experiment with
+// performance with various types of payload.
+service TestService {
+  // One empty request followed by one empty response.
+  rpc EmptyCall(grpc.testing.Empty) returns (grpc.testing.Empty);
+
+  // One request followed by one response.
+  // The server returns the client payload as-is.
+  rpc UnaryCall(SimpleRequest) returns (SimpleResponse);
+
+  // One request followed by a sequence of responses (streamed download).
+  // The server returns the payload with client desired type and sizes.
+  rpc StreamingOutputCall(StreamingOutputCallRequest)
+      returns (stream StreamingOutputCallResponse);
+
+  // A sequence of requests followed by one response (streamed upload).
+  // The server returns the aggregated size of client payload as the result.
+  rpc StreamingInputCall(stream StreamingInputCallRequest)
+      returns (StreamingInputCallResponse);
+
+  // A sequence of requests with each request served by the server immediately.
+  // As one request could lead to multiple responses, this interface
+  // demonstrates the idea of full duplexing.
+  rpc FullDuplexCall(stream StreamingOutputCallRequest)
+      returns (stream StreamingOutputCallResponse);
+
+  // A sequence of requests followed by a sequence of responses.
+  // The server buffers all the client requests and then serves them in order. A
+  // stream of responses are returned to the client when the server starts with
+  // first request.
+  rpc HalfDuplexCall(stream StreamingOutputCallRequest)
+      returns (stream StreamingOutputCallResponse);
+}
diff --git a/src/node/main.js b/src/node/main.js
index a8dfa200245d056b72dbced8bf86771395865ba1..751c3525d37c91d3d36ef4fedd2f76e80328e59d 100644
--- a/src/node/main.js
+++ b/src/node/main.js
@@ -55,7 +55,7 @@ function loadObject(value) {
     return result;
   } else if (value.className === 'Service') {
     return surface_client.makeClientConstructor(value);
-  } else if (value.className === 'Service.Message') {
+  } else if (value.className === 'Message' || value.className === 'Enum') {
     return value.build();
   } else {
     return value;
@@ -96,3 +96,13 @@ exports.status = grpc.status;
  * Call error name to code number mapping
  */
 exports.callError = grpc.callError;
+
+/**
+ * Credentials factories
+ */
+exports.Credentials = grpc.Credentials;
+
+/**
+ * ServerCredentials factories
+ */
+exports.ServerCredentials = grpc.ServerCredentials;
diff --git a/src/node/package.json b/src/node/package.json
index ed93c4ff41eb6e02b57f7d55a69ece2608135469..5f3c6fa3455d29719fae7f212457531bbe95641b 100644
--- a/src/node/package.json
+++ b/src/node/package.json
@@ -8,12 +8,14 @@
   "dependencies": {
     "bindings": "^1.2.1",
     "nan": "~1.3.0",
+    "protobufjs": "murgatroid99/ProtoBuf.js",
     "underscore": "^1.7.0",
-    "protobufjs": "murgatroid99/ProtoBuf.js"
+    "underscore.string": "^3.0.0"
   },
   "devDependencies": {
+    "highland": "~2.2.0",
     "mocha": "~1.21.0",
-    "highland": "~2.0.0"
+    "minimist": "^1.1.0"
   },
   "main": "main.js"
 }
diff --git a/src/node/surface_client.js b/src/node/surface_client.js
index 77dab5ca6f8ac77fa51d83d9bbe1d4fb6dd78a88..996e3d101fcd94556585aa722940bc80520196b3 100644
--- a/src/node/surface_client.js
+++ b/src/node/surface_client.js
@@ -33,6 +33,9 @@
 
 var _ = require('underscore');
 
+var capitalize = require('underscore.string/capitalize');
+var decapitalize = require('underscore.string/decapitalize');
+
 var client = require('./client.js');
 
 var common = require('./common.js');
@@ -352,10 +355,11 @@ function makeClientConstructor(service) {
         method_type = 'unary';
       }
     }
-    SurfaceClient.prototype[method.name] = requester_makers[method_type](
-        prefix + method.name,
-        common.serializeCls(method.resolvedRequestType.build()),
-        common.deserializeCls(method.resolvedResponseType.build()));
+    SurfaceClient.prototype[decapitalize(method.name)] =
+        requester_makers[method_type](
+            prefix + capitalize(method.name),
+            common.serializeCls(method.resolvedRequestType.build()),
+            common.deserializeCls(method.resolvedResponseType.build()));
   });
 
   SurfaceClient.service = service;
diff --git a/src/node/surface_server.js b/src/node/surface_server.js
index 55f3583b09177021da14f09439201b8dc9a68ab4..bc688839fe56ed017e2b4d8eb738891382d0b5a0 100644
--- a/src/node/surface_server.js
+++ b/src/node/surface_server.js
@@ -33,6 +33,9 @@
 
 var _ = require('underscore');
 
+var capitalize = require('underscore.string/capitalize');
+var decapitalize = require('underscore.string/decapitalize');
+
 var Server = require('./server.js');
 
 var stream = require('stream');
@@ -332,15 +335,16 @@ function makeServerConstructor(services) {
             method_type = 'unary';
           }
         }
-        if (service_handlers[service_name][method.name] === undefined) {
+        if (service_handlers[service_name][decapitalize(method.name)] ===
+            undefined) {
           throw new Error('Method handler for ' +
               common.fullyQualifiedName(method) + ' not provided.');
         }
         var binary_handler = handler_makers[method_type](
-            service_handlers[service_name][method.name],
+            service_handlers[service_name][decapitalize(method.name)],
             common.serializeCls(method.resolvedResponseType.build()),
             common.deserializeCls(method.resolvedRequestType.build()));
-        server.register(prefix + method.name, binary_handler);
+        server.register(prefix + capitalize(method.name), binary_handler);
       });
     }, this);
   }
diff --git a/src/node/test/interop_sanity_test.js b/src/node/test/interop_sanity_test.js
new file mode 100644
index 0000000000000000000000000000000000000000..9959a165ad1d87f122f237c5f078e34a525427e7
--- /dev/null
+++ b/src/node/test/interop_sanity_test.js
@@ -0,0 +1,74 @@
+/*
+ *
+ * Copyright 2014, Google Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+var interop_server = require('../interop/interop_server.js');
+var interop_client = require('../interop/interop_client.js');
+
+var port_picker = require('../port_picker');
+
+var server;
+
+var port;
+
+var name_override = 'foo.test.google.com';
+
+describe('Interop tests', function() {
+  before(function(done) {
+    port_picker.nextAvailablePort(function(addr) {
+      server = interop_server.getServer(addr.substring(addr.indexOf(':') + 1), true);
+      server.listen();
+      port = addr;
+      done();
+    });
+  });
+  // This depends on not using a binary stream
+  it.skip('should pass empty_unary', function(done) {
+    interop_client.runTest(port, name_override, 'empty_unary', true, done);
+  });
+  it('should pass large_unary', function(done) {
+    interop_client.runTest(port, name_override, 'large_unary', true, done);
+  });
+  it('should pass client_streaming', function(done) {
+    interop_client.runTest(port, name_override, 'client_streaming', true, done);
+  });
+  it('should pass server_streaming', function(done) {
+    interop_client.runTest(port, name_override, 'server_streaming', true, done);
+  });
+  it('should pass ping_pong', function(done) {
+    interop_client.runTest(port, name_override, 'ping_pong', true, done);
+  });
+  // This depends on the new invoke API
+  it.skip('should pass empty_stream', function(done) {
+    interop_client.runTest(port, name_override, 'empty_stream', true, done);
+  });
+});
diff --git a/src/node/test/math_client_test.js b/src/node/test/math_client_test.js
index 29d5648d467046e0a786c80172669c37715e6d42..0e365bf87060677810e6a9fcce2ceac76f534c89 100644
--- a/src/node/test/math_client_test.js
+++ b/src/node/test/math_client_test.js
@@ -59,7 +59,7 @@ describe('Math client', function() {
   });
   it('should handle a single request', function(done) {
     var arg = {dividend: 7, divisor: 4};
-    var call = math_client.Div(arg, function handleDivResult(err, value) {
+    var call = math_client.div(arg, function handleDivResult(err, value) {
       assert.ifError(err);
       assert.equal(value.quotient, 1);
       assert.equal(value.remainder, 3);
@@ -70,7 +70,7 @@ describe('Math client', function() {
     });
   });
   it('should handle a server streaming request', function(done) {
-    var call = math_client.Fib({limit: 7});
+    var call = math_client.fib({limit: 7});
     var expected_results = [1, 1, 2, 3, 5, 8, 13];
     var next_expected = 0;
     call.on('data', function checkResponse(value) {
@@ -83,7 +83,7 @@ describe('Math client', function() {
     });
   });
   it('should handle a client streaming request', function(done) {
-    var call = math_client.Sum(function handleSumResult(err, value) {
+    var call = math_client.sum(function handleSumResult(err, value) {
       assert.ifError(err);
       assert.equal(value.num, 21);
     });
@@ -101,7 +101,7 @@ describe('Math client', function() {
       assert.equal(value.quotient, index);
       assert.equal(value.remainder, 1);
     }
-    var call = math_client.DivMany();
+    var call = math_client.divMany();
     var response_index = 0;
     call.on('data', function(value) {
       checkResponse(response_index, value);
diff --git a/src/node/test/surface_test.js b/src/node/test/surface_test.js
index 8d0d8ec3bc942f29ea99334a8c09b8c0078c7359..34f1a156eb99c62b19d2ca4f6c0feddb9adb5264 100644
--- a/src/node/test/surface_test.js
+++ b/src/node/test/surface_test.js
@@ -59,9 +59,9 @@ describe('Surface server constructor', function() {
     assert.throws(function() {
       new Server({
         'math.Math': {
-          'Div': function() {},
-          'DivMany': function() {},
-          'Fib': function() {}
+          'div': function() {},
+          'divMany': function() {},
+          'fib': function() {}
         }
       });
     }, /math.Math.Sum/);
diff --git a/tools/dockerfile/grpc_base/Dockerfile b/tools/dockerfile/grpc_base/Dockerfile
index 45be17259355ac5230e7506d19b7d7bcfbf84ac2..be1b69b0dc552153d5b164eb65ec0611786597c3 100644
--- a/tools/dockerfile/grpc_base/Dockerfile
+++ b/tools/dockerfile/grpc_base/Dockerfile
@@ -13,6 +13,7 @@ RUN apt-get update && apt-get install -y \
   libc6 \
   libc6-dbg \
   libc6-dev \
+  libgtest-dev \
   libtool \
   make \
   strace \
@@ -34,23 +35,13 @@ ENV CLOUD_SDK /google-cloud-sdk
 RUN $CLOUD_SDK/install.sh --usage-reporting=true --path-update=true --bash-completion=true --rc-path=/.bashrc --disable-installation-options
 ENV PATH $CLOUD_SDK/bin:$PATH
 
-# Install gcompute-tools to allow access to private git-on-borg repos
-RUN git clone https://gerrit.googlesource.com/gcompute-tools /var/local/git/gcompute-tools
-
-# Start the daemon that allows access to private git-on-borg repos
-RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
-
-# Install the grpc-tools scripts dir from git
-RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc-tools /var/local/git/grpc-tools
-
-# Install the grpc-protobuf dir that has the protoc patch
-RUN git clone https://team.googlesource.com/one-platform-grpc-team/protobuf /var/local/git/protobuf
-
-# Install the patched version of protoc
-RUN cd /var/local/git/protobuf && \
-  ./autogen.sh && \
-  ./configure --prefix=/usr && \
-  make && make check && make install && make clean
+# Install a GitHub SSH service credential that gives access to the GitHub repo while it's private
+# TODO: remove this once the repo is public
+ADD .ssh .ssh
+RUN chmod 600 .ssh/github.rsa
+RUN mkdir -p $HOME/.ssh && echo 'Host github.com' > $HOME/.ssh/config
+RUN echo "    IdentityFile /.ssh/github.rsa" >> $HOME/.ssh/config
+RUN echo 'StrictHostKeyChecking no' >> $HOME/.ssh/config
 
 # Define the default command.
 CMD ["bash"]
diff --git a/tools/dockerfile/grpc_cxx/Dockerfile b/tools/dockerfile/grpc_cxx/Dockerfile
index cf38e976b14429a7707577b26f15d2c200d36ee2..ea3a1dba8f1793067001757ed2509bf2ce2837d0 100644
--- a/tools/dockerfile/grpc_cxx/Dockerfile
+++ b/tools/dockerfile/grpc_cxx/Dockerfile
@@ -1,15 +1,18 @@
 # Dockerfile for gRPC C++
 FROM grpc/base
 
-# Start the daemon that allows access to the protected git-on-borg repos
-RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
-
-RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc /var/local/git/grpc
-RUN cd /var/local/git/grpc \
-  && git pull --recurse-submodules \
-  && git submodule update --init --recursive
+# Get the source from GitHub
+RUN git clone git@github.com:google/grpc.git /var/local/git/grpc
+RUN cd /var/local/git/grpc && \
+  git pull --recurse-submodules && \
+  git submodule update --init --recursive
 
+# Build the protobuf library; then the C core.
+RUN cd /var/local/git/grpc/third_party/protobuf && \
+  ./autogen.sh && \
+  ./configure --prefix=/usr && \
+  make -j12 && make check && make install && make clean
 RUN make install -C /var/local/git/grpc
 
 # Define the default command.
-CMD ["bash"]
\ No newline at end of file
+CMD ["bash"]
diff --git a/tools/dockerfile/grpc_php/Dockerfile b/tools/dockerfile/grpc_php/Dockerfile
index 0e50af70a225da3cdcde8869ed29bc238aa31e27..177587669c6a7532797be70af7cefcfe23134dc3 100644
--- a/tools/dockerfile/grpc_php/Dockerfile
+++ b/tools/dockerfile/grpc_php/Dockerfile
@@ -1,9 +1,6 @@
 # Dockerfile for gRPC PHP
 FROM grpc/php_base
 
-# Start the daemon that allows access to the protected git-on-borg repos
-RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
-
 RUN cd /var/local/git/grpc \
   && git pull --recurse-submodules \
   && git submodule update --init --recursive
@@ -15,4 +12,4 @@ RUN cd /var/local/git/grpc/src/php/ext/grpc && git pull && phpize
 # Build the grpc PHP extension
 RUN cd /var/local/git/grpc/src/php/ext/grpc \
   && ./configure \
-  && make
\ No newline at end of file
+  && make
diff --git a/tools/dockerfile/grpc_php_base/Dockerfile b/tools/dockerfile/grpc_php_base/Dockerfile
index 8ec90f48b8f99d189ed7ca94387100b320a41117..47266a310e381e6633744e90ae6e09da26b9710d 100644
--- a/tools/dockerfile/grpc_php_base/Dockerfile
+++ b/tools/dockerfile/grpc_php_base/Dockerfile
@@ -43,9 +43,10 @@ RUN cd /var/local \
   && tar -xf php-5.5.17.tar.gz \
   && cd php-5.5.17 \
   && ./configure --with-zlib=/usr --with-libxml-dir=ext/libxml \
-  && make && make install
+  && make -j12 && make install
 
 # Start the daemon that allows access to the protected git-on-borg repos
+RUN git clone https://gerrit.googlesource.com/gcompute-tools /var/local/git/gcompute-tools
 RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
 
 # Download the patched PHP protobuf so that PHP gRPC clients can be generated
@@ -64,6 +65,18 @@ ENV PATH /usr/local/rvm/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/s
 # rake: a ruby version of make used to build the PHP Protobuf extension
 RUN rvm all do gem install ronn rake
 
+# Get the source from GitHub, this gets the protobuf library as well
+RUN git clone git@github.com:google/grpc.git /var/local/git/grpc
+RUN cd /var/local/git/grpc && \
+  git pull --recurse-submodules && \
+  git submodule update --init --recursive
+
+# Build and install the protobuf library
+RUN cd /var/local/git/grpc/third_party/protobuf && \
+  ./autogen.sh && \
+  ./configure --prefix=/usr && \
+  make -j12 && make check && make install && make clean
+
 # Install the patched PHP protobuf so that PHP gRPC clients can be generated
 # from proto3 schemas.
 RUN cd /var/local/git/protobuf-php \
@@ -75,10 +88,7 @@ RUN wget https://phar.phpunit.de/phpunit.phar \
   && chmod +x phpunit.phar \
   && mv phpunit.phar /usr/local/bin/phpunit
 
-RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc /var/local/git/grpc
-RUN cd /var/local/git/grpc \
-  && git submodule update --init --recursive
-
+# Build the C core
 RUN make static_c shared_c -j12 -C /var/local/git/grpc
 
 # Define the default command.
diff --git a/tools/dockerfile/grpc_ruby/Dockerfile b/tools/dockerfile/grpc_ruby/Dockerfile
index 9aa34bfcc99a94d0e02452a1fdecaeeb577250c5..43ec0183ca04bc436f8601e1675f839d259dcf1d 100644
--- a/tools/dockerfile/grpc_ruby/Dockerfile
+++ b/tools/dockerfile/grpc_ruby/Dockerfile
@@ -1,19 +1,14 @@
 # Dockerfile for gRPC Ruby
 FROM grpc/ruby_base
 
-# Start the daemon that allows access to the protected git-on-borg repos
-RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
-
+# Build the C libary
 RUN cd /var/local/git/grpc \
   && git pull --recurse-submodules \
   && git submodule update --init --recursive
 
+# Build the C core.
 RUN make install_c -C /var/local/git/grpc
 
-# Install the grpc gem locally with its dependencies and build the extension.
-RUN /bin/bash -l -c 'cd /var/local/git/beefcake && bundle && gem build beefcake.gemspec && gem install beefcake'
-RUN /bin/bash -l -c 'cd /var/local/git/grpc/src/ruby && bundle && rake compile:grpc && gem build grpc.gemspec && gem install grpc'
-
 # TODO add a command to run the unittest tests when the bug below is fixed
 # - the tests fail due to an error in the C threading library:
 #   they fail with 'ruby: __pthread_mutex_cond_lock_adjust for unknown reasons' at the end of a testcase
diff --git a/tools/dockerfile/grpc_ruby_base/Dockerfile b/tools/dockerfile/grpc_ruby_base/Dockerfile
index ad14e43ec7b6730ce2caaf1c1ea72770a228527d..b2af9d716069112381dae472cb37327599fd3b12 100644
--- a/tools/dockerfile/grpc_ruby_base/Dockerfile
+++ b/tools/dockerfile/grpc_ruby_base/Dockerfile
@@ -31,15 +31,6 @@ RUN apt-get update && apt-get install -y \
     sqlite3 \
     zlib1g-dev
 
-
-# Start the daemon that allows access to the protected git-on-borg repos
-RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
-
-# Download the patched Ruby protobuf (beefcake) so that Ruby gRPC clients can
-# be generated from proto3 schemas.
-RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc-ruby-beefcake \
-              /var/local/git/beefcake
-
 # Install RVM, use this to install ruby
 RUN gpg --keyserver hkp://keys.gnupg.net --recv-keys D39DC0E3  # Needed for RVM
 RUN /bin/bash -l -c "curl -L get.rvm.io | bash -s stable"
@@ -52,8 +43,17 @@ RUN /bin/bash -l -c "echo 'source /home/grpc_ruby/.rvm/scripts/rvm' >> ~/.bashrc
 RUN /bin/bash -l -c "echo 'rvm --default use ruby-2.1' >> ~/.bashrc"
 RUN /bin/bash -l -c "gem install bundler --no-ri --no-rdoc"
 
-RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc /var/local/git/grpc
-RUN cd /var/local/git/grpc \
-  && git submodule update --init --recursive
+# Get the source from GitHub
+RUN git clone git@github.com:google/grpc.git /var/local/git/grpc
+RUN cd /var/local/git/grpc && \
+  git pull --recurse-submodules && \
+  git submodule update --init --recursive
+
+# Build and install the protobuf library
+RUN cd /var/local/git/grpc/third_party/protobuf && \
+  ./autogen.sh && \
+  ./configure --prefix=/usr && \
+  make -j12 && make check && make install && make clean
 
-RUN make static_c shared_c -C /var/local/git/grpc
\ No newline at end of file
+# Build the C core
+RUN make static_c shared_c -j12 -C /var/local/git/grpc
diff --git a/tools/gce_setup/shared_startup_funcs.sh b/tools/gce_setup/shared_startup_funcs.sh
index 9ea6eca46197fc43585f42861e1b1d7652534650..9c747466a911118908b637ad2aa8e17c571fdd79 100755
--- a/tools/gce_setup/shared_startup_funcs.sh
+++ b/tools/gce_setup/shared_startup_funcs.sh
@@ -388,7 +388,7 @@ grpc_docker_pull_known() {
 # grpc_dockerfile_install "grpc/image" /var/local/dockerfile/grpc_image
 grpc_dockerfile_install() {
   local image_label=$1
-  [[ -n $image_label ]] || { echo "missing arg: image_label" >&2; return 1; }
+  [[ -n $image_label ]] || { echo "$FUNCNAME: missing arg: image_label" >&2; return 1; }
   local docker_img_url=0.0.0.0:5000/$image_label
 
   local dockerfile_dir=$2
@@ -400,19 +400,25 @@ grpc_dockerfile_install() {
   [[ $cache == "cache=1" ]] && { cache_opt=''; }
   [[ $cache == "cache=true" ]] && { cache_opt=''; }
 
-  [[ -d $dockerfile_dir ]] || { echo "not a valid dir: $dockerfile_dir"; return 1; }
+  [[ -d $dockerfile_dir ]] || { echo "$FUNCNAME: not a valid dir: $dockerfile_dir"; return 1; }
+
+  # For grpc/base, sync the ssh key into the  .ssh dir in the dockerfile context
+
+  [[ $image_label == "grpc/base" ]] && {
+    grpc_docker_sync_github_key $dockerfile_dir/.ssh || return 1;
+  }
 
   # TODO(temiola): maybe make cache/no-cache a func option?
   sudo docker build $cache_opt -t $image_label $dockerfile_dir || {
-    echo "docker op error: build of $image_label <- $dockerfile_dir"
+    echo "$FUNCNAME:: build of $image_label <- $dockerfile_dir"
     return 1
   }
   sudo docker tag $image_label $docker_img_url || {
-    echo "docker op error: tag of $docker_img_url"
+    echo "$FUNCNAME: failed to tag $docker_img_url as $image_label"
     return 1
   }
   sudo docker push $docker_img_url || {
-    echo "docker op error: push of $docker_img_url"
+    echo "$FUNCNAME: failed to push $docker_img_url"
     return 1
   }
 }
@@ -428,3 +434,31 @@ grpc_dockerfile_install() {
 grpc_dockerfile_refresh() {
   grpc_dockerfile_install "$@"
 }
+
+# grpc_docker_sync_github_key.
+#
+# Copies the docker github key from GCS to the target dir
+#
+# call-seq:
+#   grpc_docker_sync_github_key <target_dir>
+grpc_docker_sync_github_key() {
+  local target_dir=$1
+  [[ -n $target_dir ]] || { echo "$FUNCNAME: missing arg: target_dir" >&2; return 1; }
+
+  # determine the admin root; the parent of the dockerfile root,
+  local gs_dockerfile_root=$(load_metadata "attributes/gs_dockerfile_root")
+  [[ -n $gs_dockerfile_root ]] || {
+    echo "$FUNCNAME: missing metadata: gs_dockerfile_root" >&2
+    return 1
+  }
+  local gcs_admin_root=$(dirname $gs_dockerfile_root)
+
+  # cp the file from gsutil to a known local area
+  local gcs_key_path=$gcs_admin_root/github/ssh_key
+  local local_key_path=$target_dir/github.rsa
+  mkdir -p $target_dir || {
+    echo "$FUNCNAME: could not create dir: $target_dir" 1>&2
+    return 1
+  }
+  gsutil cp $src $gcs_key_path $local_key_path
+}