Writing/reading protocol buffers
For sending complex JSON/JavaScript objects to C++ binaries over a wire, I am using Protocol Buffers. There is native protobuf support for Node.js these days, so I'm not using any other bindings.
// Set maximum execution time of binary to equal the
// remainder of processing time, minus a second to allow
// for parsing.
var timeLimit = context.getRemainingTimeInMillis() - 1000;
// Check if meta parameters are given in the request.
// Assign default values if they are not.
var model = new protocols.Model();
// Sort the resolutions.
function descending(a, b) {
if (a.width > b.width) {
return -1;
} else if (a.width < b.width) {
return 1;
}
return 0;
}
// Construct image objects.
var images = request.images.map(function(image) {
// Perform the sort.
image.resolutions.sort(descending);
// Create an image protobuffer.
var imageProto = new protocols.Model.Image();
// Assign the original's resolution to the image.
imageProto.setWidth(image.resolutions[0].width);
imageProto.setHeight(image.resolutions[0].height);
// Return the result.
return imageProto;
});
// Construct flag enumeration references.
var flags = request.flags.map(function(flag) {
return protocols.Model.Flag[flag];
});
// Assign request properties to protobuf.
model.setImagesList (images );
model.setFlagsList (flags );
model.setMinNoOfPages (request.minNoOfPages ? request.minNoOfPages : 1 );
model.setMaxNoOfPages (request.maxNoOfPages ? request.maxNoOfPages : 1 );
model.setMaxPerPage (request.maxPerPage ? request.maxPerPage : 5 );
model.setPageWidth (request.pageWidth ? request.pageWidth : 3508 );
model.setPageHeight (request.pageHeight ? request.pageHeight : 2480 );
model.setTimeLimit (request.timeLimit ? request.timeLimit : timeLimit );
model.setBorderWidth (request.borderWidth ? request.borderWidth : 0 );
model.setMinDim (request.minDim ? request.minDim : 0 );
// This is where things go wrong.
var serialized = model.serializeBinary();
fs.writeFileSync('model.pb', serialized);
var read = fs.readFileSync('model.pb'),
model2 = protocols.Model.deserializeBinary(read);
console.log(model.toObject());
console.log(model2.toObject());
Above is the piece of code I'm stuck on. I managed to compile a protobuf message:
syntax = "proto3";
package layout;
message Model {
enum Flag {
FILL_PAGE = 0;
BORDERS = 1;
}
message Image {
int32 width = 1;
int32 height = 2;
}
repeated Flag flags = 1;
repeated Image images = 2;
string avoid_layout = 3;
int32 min_no_of_pages = 4;
int32 max_no_of_pages = 5;
int32 max_per_page = 6;
int32 page_width = 7;
int32 page_height = 8;
int32 time_limit = 9;
int32 border_width = 10;
int32 min_dim = 11;
}
However, documentation on JavaScript support for protobuf is minimal (https://developers.google.com/protocol-buffers/docs/reference/javascript-generated#message) and I can't figure out how I can read my messages to a file, and then read them again. Can someone explain to me how to do this?
I imagine the solution is some variation on the last few lines of my code, but currently I'm getting this error:
AssertionError: Failure: Type not convertible to Uint8Array.
You are probably witnessing some obscure case of node Buffer
not being recognized as Uint8Array
. I found a relevant issue report. So try enforcing the type with
protocols.Model.deserializeBinary(new Uint8Array(read));
or replacing the constructor. That suggestion is particularly similar to yours - it is reading a binary file too.
链接地址: http://www.djcxy.com/p/48340.html下一篇: 写/读协议缓冲区