Skip to content
Snippets Groups Projects
Commit eeccd21f authored by murgatroid99's avatar murgatroid99
Browse files

Merge branch 'master' of github.com:google/grpc

parents 9e14ead3 7e3d7120
Branches
Tags
No related merge requests found
Showing
with 790 additions and 62 deletions
...@@ -17,3 +17,5 @@ coverage ...@@ -17,3 +17,5 @@ coverage
# cache for run_tests.py # cache for run_tests.py
.run_tests_cache .run_tests_cache
# emacs temp files
*~
\ No newline at end of file
...@@ -31,6 +31,8 @@ ...@@ -31,6 +31,8 @@
* *
*/ */
var capitalize = require('underscore.string/capitalize');
/** /**
* Get a function that deserializes a specific type of protobuf. * Get a function that deserializes a specific type of protobuf.
* @param {function()} cls The constructor of the message type to deserialize * @param {function()} cls The constructor of the message type to deserialize
...@@ -73,6 +75,9 @@ function fullyQualifiedName(value) { ...@@ -73,6 +75,9 @@ function fullyQualifiedName(value) {
return ''; return '';
} }
var name = value.name; var name = value.name;
if (value.className === 'Service.RPCMethod') {
name = capitalize(name);
}
if (value.hasOwnProperty('parent')) { if (value.hasOwnProperty('parent')) {
var parent_name = fullyQualifiedName(value.parent); var parent_name = fullyQualifiedName(value.parent);
if (parent_name !== '') { if (parent_name !== '') {
......
...@@ -119,10 +119,10 @@ function mathDivMany(stream) { ...@@ -119,10 +119,10 @@ function mathDivMany(stream) {
var server = new Server({ var server = new Server({
'math.Math' : { 'math.Math' : {
Div: mathDiv, div: mathDiv,
Fib: mathFib, fib: mathFib,
Sum: mathSum, sum: mathSum,
DivMany: mathDivMany divMany: mathDivMany
} }
}); });
......
syntax = "proto2";
package grpc.testing;
// An empty message that you can re-use to avoid defining duplicated empty
// messages in your project. A typical example is to use it as argument or the
// return value of a service API. For instance:
//
// service Foo {
// rpc Bar (grpc.testing.Empty) returns (grpc.testing.Empty) { };
// };
//
// MOE:begin_strip
// The difference between this one and net/rpc/empty-message.proto is that
// 1) The generated message here is in proto2 C++ API.
// 2) The proto2.Empty has minimum dependencies
// (no message_set or net/rpc dependencies)
// MOE:end_strip
message Empty {}
/*
*
* Copyright 2014, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
var fs = require('fs');
var path = require('path');
var grpc = require('..');
var testProto = grpc.load(__dirname + '/test.proto').grpc.testing;
var assert = require('assert');
/**
* Create a buffer filled with size zeroes
* @param {number} size The length of the buffer
* @return {Buffer} The new buffer
*/
function zeroBuffer(size) {
var zeros = new Buffer(size);
zeros.fill(0);
return zeros;
}
/**
* Run the empty_unary test
* @param {Client} client The client to test against
* @param {function} done Callback to call when the test is completed. Included
* primarily for use with mocha
*/
function emptyUnary(client, done) {
var call = client.emptyCall({}, function(err, resp) {
assert.ifError(err);
});
call.on('status', function(status) {
assert.strictEqual(status.code, grpc.status.OK);
if (done) {
done();
}
});
}
/**
* Run the large_unary test
* @param {Client} client The client to test against
* @param {function} done Callback to call when the test is completed. Included
* primarily for use with mocha
*/
function largeUnary(client, done) {
var arg = {
response_type: testProto.PayloadType.COMPRESSABLE,
response_size: 314159,
payload: {
body: zeroBuffer(271828)
}
};
var call = client.unaryCall(arg, function(err, resp) {
assert.ifError(err);
assert.strictEqual(resp.payload.type, testProto.PayloadType.COMPRESSABLE);
assert.strictEqual(resp.payload.body.limit - resp.payload.body.offset,
314159);
});
call.on('status', function(status) {
assert.strictEqual(status.code, grpc.status.OK);
if (done) {
done();
}
});
}
/**
* Run the client_streaming test
* @param {Client} client The client to test against
* @param {function} done Callback to call when the test is completed. Included
* primarily for use with mocha
*/
function clientStreaming(client, done) {
var call = client.streamingInputCall(function(err, resp) {
assert.ifError(err);
assert.strictEqual(resp.aggregated_payload_size, 74922);
});
call.on('status', function(status) {
assert.strictEqual(status.code, grpc.status.OK);
if (done) {
done();
}
});
var payload_sizes = [27182, 8, 1828, 45904];
for (var i = 0; i < payload_sizes.length; i++) {
call.write({payload: {body: zeroBuffer(payload_sizes[i])}});
}
call.end();
}
/**
* Run the server_streaming test
* @param {Client} client The client to test against
* @param {function} done Callback to call when the test is completed. Included
* primarily for use with mocha
*/
function serverStreaming(client, done) {
var arg = {
response_type: testProto.PayloadType.COMPRESSABLE,
response_parameters: [
{size: 31415},
{size: 9},
{size: 2653},
{size: 58979}
]
};
var call = client.streamingOutputCall(arg);
var resp_index = 0;
call.on('data', function(value) {
assert(resp_index < 4);
assert.strictEqual(value.payload.type, testProto.PayloadType.COMPRESSABLE);
assert.strictEqual(value.payload.body.limit - value.payload.body.offset,
arg.response_parameters[resp_index].size);
resp_index += 1;
});
call.on('status', function(status) {
assert.strictEqual(resp_index, 4);
assert.strictEqual(status.code, grpc.status.OK);
if (done) {
done();
}
});
}
/**
* Run the ping_pong test
* @param {Client} client The client to test against
* @param {function} done Callback to call when the test is completed. Included
* primarily for use with mocha
*/
function pingPong(client, done) {
var payload_sizes = [27182, 8, 1828, 45904];
var response_sizes = [31415, 9, 2653, 58979];
var call = client.fullDuplexCall();
call.on('status', function(status) {
assert.strictEqual(status.code, grpc.status.OK);
if (done) {
done();
}
});
var index = 0;
call.write({
response_type: testProto.PayloadType.COMPRESSABLE,
response_parameters: [
{size: response_sizes[index]}
],
payload: {body: zeroBuffer(payload_sizes[index])}
});
call.on('data', function(response) {
assert.strictEqual(response.payload.type,
testProto.PayloadType.COMPRESSABLE);
assert.equal(response.payload.body.limit - response.payload.body.offset,
response_sizes[index]);
index += 1;
if (index == 4) {
call.end();
} else {
call.write({
response_type: testProto.PayloadType.COMPRESSABLE,
response_parameters: [
{size: response_sizes[index]}
],
payload: {body: zeroBuffer(payload_sizes[index])}
});
}
});
}
/**
* Run the empty_stream test.
* NOTE: This does not work, but should with the new invoke API
* @param {Client} client The client to test against
* @param {function} done Callback to call when the test is completed. Included
* primarily for use with mocha
*/
function emptyStream(client, done) {
var call = client.fullDuplexCall();
call.on('status', function(status) {
assert.strictEqual(status.code, grpc.status.OK);
if (done) {
done();
}
});
call.on('data', function(value) {
assert.fail(value, null, 'No data should have been received', '!==');
});
call.end();
}
/**
* Map from test case names to test functions
*/
var test_cases = {
empty_unary: emptyUnary,
large_unary: largeUnary,
client_streaming: clientStreaming,
server_streaming: serverStreaming,
ping_pong: pingPong,
empty_stream: emptyStream
};
/**
* Execute a single test case.
* @param {string} address The address of the server to connect to, in the
* format "hostname:port"
* @param {string} host_overrirde The hostname of the server to use as an SSL
* override
* @param {string} test_case The name of the test case to run
* @param {bool} tls Indicates that a secure channel should be used
* @param {function} done Callback to call when the test is completed. Included
* primarily for use with mocha
*/
function runTest(address, host_override, test_case, tls, done) {
// TODO(mlumish): enable TLS functionality
var options = {};
if (tls) {
var ca_path = path.join(__dirname, '../test/data/ca.pem');
var ca_data = fs.readFileSync(ca_path);
var creds = grpc.Credentials.createSsl(ca_data);
options.credentials = creds;
if (host_override) {
options['grpc.ssl_target_name_override'] = host_override;
}
}
var client = new testProto.TestService(address, options);
test_cases[test_case](client, done);
}
if (require.main === module) {
var parseArgs = require('minimist');
var argv = parseArgs(process.argv, {
string: ['server_host', 'server_host_override', 'server_port', 'test_case',
'use_tls', 'use_test_ca']
});
runTest(argv.server_host + ':' + argv.server_port, argv.server_host_override,
argv.test_case, argv.use_tls === 'true');
}
/**
* See docs for runTest
*/
exports.runTest = runTest;
/*
*
* Copyright 2014, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
var fs = require('fs');
var path = require('path');
var _ = require('underscore');
var grpc = require('..');
var testProto = grpc.load(__dirname + '/test.proto').grpc.testing;
var Server = grpc.buildServer([testProto.TestService.service]);
/**
* Create a buffer filled with size zeroes
* @param {number} size The length of the buffer
* @return {Buffer} The new buffer
*/
function zeroBuffer(size) {
var zeros = new Buffer(size);
zeros.fill(0);
return zeros;
}
/**
* Respond to an empty parameter with an empty response.
* NOTE: this currently does not work due to issue #137
* @param {Call} call Call to handle
* @param {function(Error, Object)} callback Callback to call with result
* or error
*/
function handleEmpty(call, callback) {
callback(null, {});
}
/**
* Handle a unary request by sending the requested payload
* @param {Call} call Call to handle
* @param {function(Error, Object)} callback Callback to call with result or
* error
*/
function handleUnary(call, callback) {
var req = call.request;
var zeros = zeroBuffer(req.response_size);
var payload_type = req.response_type;
if (payload_type === testProto.PayloadType.RANDOM) {
payload_type = [
testProto.PayloadType.COMPRESSABLE,
testProto.PayloadType.UNCOMPRESSABLE][Math.random() < 0.5 ? 0 : 1];
}
callback(null, {payload: {type: payload_type, body: zeros}});
}
/**
* Respond to a streaming call with the total size of all payloads
* @param {Call} call Call to handle
* @param {function(Error, Object)} callback Callback to call with result or
* error
*/
function handleStreamingInput(call, callback) {
var aggregate_size = 0;
call.on('data', function(value) {
aggregate_size += value.payload.body.limit - value.payload.body.offset;
});
call.on('end', function() {
callback(null, {aggregated_payload_size: aggregate_size});
});
}
/**
* Respond to a payload request with a stream of the requested payloads
* @param {Call} call Call to handle
*/
function handleStreamingOutput(call) {
var req = call.request;
var payload_type = req.response_type;
if (payload_type === testProto.PayloadType.RANDOM) {
payload_type = [
testProto.PayloadType.COMPRESSABLE,
testProto.PayloadType.UNCOMPRESSABLE][Math.random() < 0.5 ? 0 : 1];
}
_.each(req.response_parameters, function(resp_param) {
call.write({
payload: {
body: zeroBuffer(resp_param.size),
type: payload_type
}
});
});
call.end();
}
/**
* Respond to a stream of payload requests with a stream of payload responses as
* they arrive.
* @param {Call} call Call to handle
*/
function handleFullDuplex(call) {
call.on('data', function(value) {
var payload_type = value.response_type;
if (payload_type === testProto.PayloadType.RANDOM) {
payload_type = [
testProto.PayloadType.COMPRESSABLE,
testProto.PayloadType.UNCOMPRESSABLE][Math.random() < 0.5 ? 0 : 1];
}
_.each(value.response_parameters, function(resp_param) {
call.write({
payload: {
body: zeroBuffer(resp_param.size),
type: payload_type
}
});
});
});
call.on('end', function() {
call.end();
});
}
/**
* Respond to a stream of payload requests with a stream of payload responses
* after all requests have arrived
* @param {Call} call Call to handle
*/
function handleHalfDuplex(call) {
throw new Error('HalfDuplexCall not yet implemented');
}
/**
* Get a server object bound to the given port
* @param {string} port Port to which to bind
* @param {boolean} tls Indicates that the bound port should use TLS
* @return {Server} Server object bound to the support
*/
function getServer(port, tls) {
// TODO(mlumish): enable TLS functionality
var options = {};
if (tls) {
var key_path = path.join(__dirname, '../test/data/server1.key');
var pem_path = path.join(__dirname, '../test/data/server1.pem');
var key_data = fs.readFileSync(key_path);
var pem_data = fs.readFileSync(pem_path);
var server_creds = grpc.ServerCredentials.createSsl(null,
key_data,
pem_data);
options.credentials = server_creds;
}
var server = new Server({
'grpc.testing.TestService' : {
emptyCall: handleEmpty,
unaryCall: handleUnary,
streamingOutputCall: handleStreamingOutput,
streamingInputCall: handleStreamingInput,
fullDuplexCall: handleFullDuplex,
halfDuplexCall: handleHalfDuplex
}
}, options);
server.bind('0.0.0.0:' + port, tls);
return server;
}
if (require.main === module) {
var parseArgs = require('minimist');
var argv = parseArgs(process.argv, {
string: ['port', 'use_tls']
});
var server = getServer(argv.port, argv.use_tls === 'true');
server.start();
}
/**
* See docs for getServer
*/
exports.getServer = getServer;
// Message definitions to be used by integration test service definitions.
syntax = "proto2";
package grpc.testing;
// The type of payload that should be returned.
enum PayloadType {
// Compressable text format.
COMPRESSABLE = 0;
// Uncompressable binary format.
UNCOMPRESSABLE = 1;
// Randomly chosen from all other formats defined in this enum.
RANDOM = 2;
}
// A block of data, to simply increase gRPC message size.
message Payload {
// The type of data in body.
optional PayloadType type = 1;
// Primary contents of payload.
optional bytes body = 2;
}
// Unary request.
message SimpleRequest {
// Desired payload type in the response from the server.
// If response_type is RANDOM, server randomly chooses one from other formats.
optional PayloadType response_type = 1;
// Desired payload size in the response from the server.
// If response_type is COMPRESSABLE, this denotes the size before compression.
optional int32 response_size = 2;
// Optional input payload sent along with the request.
optional Payload payload = 3;
}
// Unary response, as configured by the request.
message SimpleResponse {
// Payload to increase message size.
optional Payload payload = 1;
// The user the request came from, for verifying authentication was
// successful when the client expected it.
optional int64 effective_gaia_user_id = 2;
}
// Client-streaming request.
message StreamingInputCallRequest {
// Optional input payload sent along with the request.
optional Payload payload = 1;
// Not expecting any payload from the response.
}
// Client-streaming response.
message StreamingInputCallResponse {
// Aggregated size of payloads received from the client.
optional int32 aggregated_payload_size = 1;
}
// Configuration for a particular response.
message ResponseParameters {
// Desired payload sizes in responses from the server.
// If response_type is COMPRESSABLE, this denotes the size before compression.
optional int32 size = 1;
// Desired interval between consecutive responses in the response stream in
// microseconds.
optional int32 interval_us = 2;
}
// Server-streaming request.
message StreamingOutputCallRequest {
// Desired payload type in the response from the server.
// If response_type is RANDOM, the payload from each response in the stream
// might be of different types. This is to simulate a mixed type of payload
// stream.
optional PayloadType response_type = 1;
// Configuration for each expected response message.
repeated ResponseParameters response_parameters = 2;
// Optional input payload sent along with the request.
optional Payload payload = 3;
}
// Server-streaming response, as configured by the request and parameters.
message StreamingOutputCallResponse {
// Payload to increase response size.
optional Payload payload = 1;
}
// An integration test service that covers all the method signature permutations
// of unary/streaming requests/responses.
syntax = "proto2";
import "empty.proto";
import "messages.proto";
package grpc.testing;
// A simple service to test the various types of RPCs and experiment with
// performance with various types of payload.
service TestService {
// One empty request followed by one empty response.
rpc EmptyCall(grpc.testing.Empty) returns (grpc.testing.Empty);
// One request followed by one response.
// The server returns the client payload as-is.
rpc UnaryCall(SimpleRequest) returns (SimpleResponse);
// One request followed by a sequence of responses (streamed download).
// The server returns the payload with client desired type and sizes.
rpc StreamingOutputCall(StreamingOutputCallRequest)
returns (stream StreamingOutputCallResponse);
// A sequence of requests followed by one response (streamed upload).
// The server returns the aggregated size of client payload as the result.
rpc StreamingInputCall(stream StreamingInputCallRequest)
returns (StreamingInputCallResponse);
// A sequence of requests with each request served by the server immediately.
// As one request could lead to multiple responses, this interface
// demonstrates the idea of full duplexing.
rpc FullDuplexCall(stream StreamingOutputCallRequest)
returns (stream StreamingOutputCallResponse);
// A sequence of requests followed by a sequence of responses.
// The server buffers all the client requests and then serves them in order. A
// stream of responses are returned to the client when the server starts with
// first request.
rpc HalfDuplexCall(stream StreamingOutputCallRequest)
returns (stream StreamingOutputCallResponse);
}
...@@ -55,7 +55,7 @@ function loadObject(value) { ...@@ -55,7 +55,7 @@ function loadObject(value) {
return result; return result;
} else if (value.className === 'Service') { } else if (value.className === 'Service') {
return surface_client.makeClientConstructor(value); return surface_client.makeClientConstructor(value);
} else if (value.className === 'Service.Message') { } else if (value.className === 'Message' || value.className === 'Enum') {
return value.build(); return value.build();
} else { } else {
return value; return value;
...@@ -96,3 +96,13 @@ exports.status = grpc.status; ...@@ -96,3 +96,13 @@ exports.status = grpc.status;
* Call error name to code number mapping * Call error name to code number mapping
*/ */
exports.callError = grpc.callError; exports.callError = grpc.callError;
/**
* Credentials factories
*/
exports.Credentials = grpc.Credentials;
/**
* ServerCredentials factories
*/
exports.ServerCredentials = grpc.ServerCredentials;
...@@ -8,12 +8,14 @@ ...@@ -8,12 +8,14 @@
"dependencies": { "dependencies": {
"bindings": "^1.2.1", "bindings": "^1.2.1",
"nan": "~1.3.0", "nan": "~1.3.0",
"protobufjs": "murgatroid99/ProtoBuf.js",
"underscore": "^1.7.0", "underscore": "^1.7.0",
"protobufjs": "murgatroid99/ProtoBuf.js" "underscore.string": "^3.0.0"
}, },
"devDependencies": { "devDependencies": {
"highland": "~2.2.0",
"mocha": "~1.21.0", "mocha": "~1.21.0",
"highland": "~2.0.0" "minimist": "^1.1.0"
}, },
"main": "main.js" "main": "main.js"
} }
...@@ -33,6 +33,9 @@ ...@@ -33,6 +33,9 @@
var _ = require('underscore'); var _ = require('underscore');
var capitalize = require('underscore.string/capitalize');
var decapitalize = require('underscore.string/decapitalize');
var client = require('./client.js'); var client = require('./client.js');
var common = require('./common.js'); var common = require('./common.js');
...@@ -352,8 +355,9 @@ function makeClientConstructor(service) { ...@@ -352,8 +355,9 @@ function makeClientConstructor(service) {
method_type = 'unary'; method_type = 'unary';
} }
} }
SurfaceClient.prototype[method.name] = requester_makers[method_type]( SurfaceClient.prototype[decapitalize(method.name)] =
prefix + method.name, requester_makers[method_type](
prefix + capitalize(method.name),
common.serializeCls(method.resolvedRequestType.build()), common.serializeCls(method.resolvedRequestType.build()),
common.deserializeCls(method.resolvedResponseType.build())); common.deserializeCls(method.resolvedResponseType.build()));
}); });
......
...@@ -33,6 +33,9 @@ ...@@ -33,6 +33,9 @@
var _ = require('underscore'); var _ = require('underscore');
var capitalize = require('underscore.string/capitalize');
var decapitalize = require('underscore.string/decapitalize');
var Server = require('./server.js'); var Server = require('./server.js');
var stream = require('stream'); var stream = require('stream');
...@@ -332,15 +335,16 @@ function makeServerConstructor(services) { ...@@ -332,15 +335,16 @@ function makeServerConstructor(services) {
method_type = 'unary'; method_type = 'unary';
} }
} }
if (service_handlers[service_name][method.name] === undefined) { if (service_handlers[service_name][decapitalize(method.name)] ===
undefined) {
throw new Error('Method handler for ' + throw new Error('Method handler for ' +
common.fullyQualifiedName(method) + ' not provided.'); common.fullyQualifiedName(method) + ' not provided.');
} }
var binary_handler = handler_makers[method_type]( var binary_handler = handler_makers[method_type](
service_handlers[service_name][method.name], service_handlers[service_name][decapitalize(method.name)],
common.serializeCls(method.resolvedResponseType.build()), common.serializeCls(method.resolvedResponseType.build()),
common.deserializeCls(method.resolvedRequestType.build())); common.deserializeCls(method.resolvedRequestType.build()));
server.register(prefix + method.name, binary_handler); server.register(prefix + capitalize(method.name), binary_handler);
}); });
}, this); }, this);
} }
......
/*
*
* Copyright 2014, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
var interop_server = require('../interop/interop_server.js');
var interop_client = require('../interop/interop_client.js');
var port_picker = require('../port_picker');
var server;
var port;
var name_override = 'foo.test.google.com';
describe('Interop tests', function() {
before(function(done) {
port_picker.nextAvailablePort(function(addr) {
server = interop_server.getServer(addr.substring(addr.indexOf(':') + 1), true);
server.listen();
port = addr;
done();
});
});
// This depends on not using a binary stream
it.skip('should pass empty_unary', function(done) {
interop_client.runTest(port, name_override, 'empty_unary', true, done);
});
it('should pass large_unary', function(done) {
interop_client.runTest(port, name_override, 'large_unary', true, done);
});
it('should pass client_streaming', function(done) {
interop_client.runTest(port, name_override, 'client_streaming', true, done);
});
it('should pass server_streaming', function(done) {
interop_client.runTest(port, name_override, 'server_streaming', true, done);
});
it('should pass ping_pong', function(done) {
interop_client.runTest(port, name_override, 'ping_pong', true, done);
});
// This depends on the new invoke API
it.skip('should pass empty_stream', function(done) {
interop_client.runTest(port, name_override, 'empty_stream', true, done);
});
});
...@@ -61,7 +61,7 @@ describe('Math client', function() { ...@@ -61,7 +61,7 @@ describe('Math client', function() {
}); });
it('should handle a single request', function(done) { it('should handle a single request', function(done) {
var arg = {dividend: 7, divisor: 4}; var arg = {dividend: 7, divisor: 4};
var call = math_client.Div(arg, function handleDivResult(err, value) { var call = math_client.div(arg, function handleDivResult(err, value) {
assert.ifError(err); assert.ifError(err);
assert.equal(value.quotient, 1); assert.equal(value.quotient, 1);
assert.equal(value.remainder, 3); assert.equal(value.remainder, 3);
...@@ -72,7 +72,7 @@ describe('Math client', function() { ...@@ -72,7 +72,7 @@ describe('Math client', function() {
}); });
}); });
it('should handle a server streaming request', function(done) { it('should handle a server streaming request', function(done) {
var call = math_client.Fib({limit: 7}); var call = math_client.fib({limit: 7});
var expected_results = [1, 1, 2, 3, 5, 8, 13]; var expected_results = [1, 1, 2, 3, 5, 8, 13];
var next_expected = 0; var next_expected = 0;
call.on('data', function checkResponse(value) { call.on('data', function checkResponse(value) {
...@@ -85,7 +85,7 @@ describe('Math client', function() { ...@@ -85,7 +85,7 @@ describe('Math client', function() {
}); });
}); });
it('should handle a client streaming request', function(done) { it('should handle a client streaming request', function(done) {
var call = math_client.Sum(function handleSumResult(err, value) { var call = math_client.sum(function handleSumResult(err, value) {
assert.ifError(err); assert.ifError(err);
assert.equal(value.num, 21); assert.equal(value.num, 21);
}); });
...@@ -103,7 +103,7 @@ describe('Math client', function() { ...@@ -103,7 +103,7 @@ describe('Math client', function() {
assert.equal(value.quotient, index); assert.equal(value.quotient, index);
assert.equal(value.remainder, 1); assert.equal(value.remainder, 1);
} }
var call = math_client.DivMany(); var call = math_client.divMany();
var response_index = 0; var response_index = 0;
call.on('data', function(value) { call.on('data', function(value) {
checkResponse(response_index, value); checkResponse(response_index, value);
......
...@@ -59,9 +59,9 @@ describe('Surface server constructor', function() { ...@@ -59,9 +59,9 @@ describe('Surface server constructor', function() {
assert.throws(function() { assert.throws(function() {
new Server({ new Server({
'math.Math': { 'math.Math': {
'Div': function() {}, 'div': function() {},
'DivMany': function() {}, 'divMany': function() {},
'Fib': function() {} 'fib': function() {}
} }
}); });
}, /math.Math.Sum/); }, /math.Math.Sum/);
......
...@@ -13,6 +13,7 @@ RUN apt-get update && apt-get install -y \ ...@@ -13,6 +13,7 @@ RUN apt-get update && apt-get install -y \
libc6 \ libc6 \
libc6-dbg \ libc6-dbg \
libc6-dev \ libc6-dev \
libgtest-dev \
libtool \ libtool \
make \ make \
strace \ strace \
...@@ -34,23 +35,13 @@ ENV CLOUD_SDK /google-cloud-sdk ...@@ -34,23 +35,13 @@ ENV CLOUD_SDK /google-cloud-sdk
RUN $CLOUD_SDK/install.sh --usage-reporting=true --path-update=true --bash-completion=true --rc-path=/.bashrc --disable-installation-options RUN $CLOUD_SDK/install.sh --usage-reporting=true --path-update=true --bash-completion=true --rc-path=/.bashrc --disable-installation-options
ENV PATH $CLOUD_SDK/bin:$PATH ENV PATH $CLOUD_SDK/bin:$PATH
# Install gcompute-tools to allow access to private git-on-borg repos # Install a GitHub SSH service credential that gives access to the GitHub repo while it's private
RUN git clone https://gerrit.googlesource.com/gcompute-tools /var/local/git/gcompute-tools # TODO: remove this once the repo is public
ADD .ssh .ssh
# Start the daemon that allows access to private git-on-borg repos RUN chmod 600 .ssh/github.rsa
RUN /var/local/git/gcompute-tools/git-cookie-authdaemon RUN mkdir -p $HOME/.ssh && echo 'Host github.com' > $HOME/.ssh/config
RUN echo " IdentityFile /.ssh/github.rsa" >> $HOME/.ssh/config
# Install the grpc-tools scripts dir from git RUN echo 'StrictHostKeyChecking no' >> $HOME/.ssh/config
RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc-tools /var/local/git/grpc-tools
# Install the grpc-protobuf dir that has the protoc patch
RUN git clone https://team.googlesource.com/one-platform-grpc-team/protobuf /var/local/git/protobuf
# Install the patched version of protoc
RUN cd /var/local/git/protobuf && \
./autogen.sh && \
./configure --prefix=/usr && \
make && make check && make install && make clean
# Define the default command. # Define the default command.
CMD ["bash"] CMD ["bash"]
# Dockerfile for gRPC C++ # Dockerfile for gRPC C++
FROM grpc/base FROM grpc/base
# Start the daemon that allows access to the protected git-on-borg repos # Get the source from GitHub
RUN /var/local/git/gcompute-tools/git-cookie-authdaemon RUN git clone git@github.com:google/grpc.git /var/local/git/grpc
RUN cd /var/local/git/grpc && \
RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc /var/local/git/grpc git pull --recurse-submodules && \
RUN cd /var/local/git/grpc \ git submodule update --init --recursive
&& git pull --recurse-submodules \
&& git submodule update --init --recursive
# Build the protobuf library; then the C core.
RUN cd /var/local/git/grpc/third_party/protobuf && \
./autogen.sh && \
./configure --prefix=/usr && \
make -j12 && make check && make install && make clean
RUN make install -C /var/local/git/grpc RUN make install -C /var/local/git/grpc
# Define the default command. # Define the default command.
......
# Dockerfile for gRPC PHP # Dockerfile for gRPC PHP
FROM grpc/php_base FROM grpc/php_base
# Start the daemon that allows access to the protected git-on-borg repos
RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
RUN cd /var/local/git/grpc \ RUN cd /var/local/git/grpc \
&& git pull --recurse-submodules \ && git pull --recurse-submodules \
&& git submodule update --init --recursive && git submodule update --init --recursive
......
...@@ -43,9 +43,10 @@ RUN cd /var/local \ ...@@ -43,9 +43,10 @@ RUN cd /var/local \
&& tar -xf php-5.5.17.tar.gz \ && tar -xf php-5.5.17.tar.gz \
&& cd php-5.5.17 \ && cd php-5.5.17 \
&& ./configure --with-zlib=/usr --with-libxml-dir=ext/libxml \ && ./configure --with-zlib=/usr --with-libxml-dir=ext/libxml \
&& make && make install && make -j12 && make install
# Start the daemon that allows access to the protected git-on-borg repos # Start the daemon that allows access to the protected git-on-borg repos
RUN git clone https://gerrit.googlesource.com/gcompute-tools /var/local/git/gcompute-tools
RUN /var/local/git/gcompute-tools/git-cookie-authdaemon RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
# Download the patched PHP protobuf so that PHP gRPC clients can be generated # Download the patched PHP protobuf so that PHP gRPC clients can be generated
...@@ -64,6 +65,18 @@ ENV PATH /usr/local/rvm/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/s ...@@ -64,6 +65,18 @@ ENV PATH /usr/local/rvm/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/s
# rake: a ruby version of make used to build the PHP Protobuf extension # rake: a ruby version of make used to build the PHP Protobuf extension
RUN rvm all do gem install ronn rake RUN rvm all do gem install ronn rake
# Get the source from GitHub, this gets the protobuf library as well
RUN git clone git@github.com:google/grpc.git /var/local/git/grpc
RUN cd /var/local/git/grpc && \
git pull --recurse-submodules && \
git submodule update --init --recursive
# Build and install the protobuf library
RUN cd /var/local/git/grpc/third_party/protobuf && \
./autogen.sh && \
./configure --prefix=/usr && \
make -j12 && make check && make install && make clean
# Install the patched PHP protobuf so that PHP gRPC clients can be generated # Install the patched PHP protobuf so that PHP gRPC clients can be generated
# from proto3 schemas. # from proto3 schemas.
RUN cd /var/local/git/protobuf-php \ RUN cd /var/local/git/protobuf-php \
...@@ -75,10 +88,7 @@ RUN wget https://phar.phpunit.de/phpunit.phar \ ...@@ -75,10 +88,7 @@ RUN wget https://phar.phpunit.de/phpunit.phar \
&& chmod +x phpunit.phar \ && chmod +x phpunit.phar \
&& mv phpunit.phar /usr/local/bin/phpunit && mv phpunit.phar /usr/local/bin/phpunit
RUN git clone https://team.googlesource.com/one-platform-grpc-team/grpc /var/local/git/grpc # Build the C core
RUN cd /var/local/git/grpc \
&& git submodule update --init --recursive
RUN make static_c shared_c -j12 -C /var/local/git/grpc RUN make static_c shared_c -j12 -C /var/local/git/grpc
# Define the default command. # Define the default command.
......
# Dockerfile for gRPC Ruby # Dockerfile for gRPC Ruby
FROM grpc/ruby_base FROM grpc/ruby_base
# Start the daemon that allows access to the protected git-on-borg repos # Build the C libary
RUN /var/local/git/gcompute-tools/git-cookie-authdaemon
RUN cd /var/local/git/grpc \ RUN cd /var/local/git/grpc \
&& git pull --recurse-submodules \ && git pull --recurse-submodules \
&& git submodule update --init --recursive && git submodule update --init --recursive
# Build the C core.
RUN make install_c -C /var/local/git/grpc RUN make install_c -C /var/local/git/grpc
# Install the grpc gem locally with its dependencies and build the extension.
RUN /bin/bash -l -c 'cd /var/local/git/beefcake && bundle && gem build beefcake.gemspec && gem install beefcake'
RUN /bin/bash -l -c 'cd /var/local/git/grpc/src/ruby && bundle && rake compile:grpc && gem build grpc.gemspec && gem install grpc'
# TODO add a command to run the unittest tests when the bug below is fixed # TODO add a command to run the unittest tests when the bug below is fixed
# - the tests fail due to an error in the C threading library: # - the tests fail due to an error in the C threading library:
# they fail with 'ruby: __pthread_mutex_cond_lock_adjust for unknown reasons' at the end of a testcase # they fail with 'ruby: __pthread_mutex_cond_lock_adjust for unknown reasons' at the end of a testcase
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment