nghttpx: Add response mruby hook

This commit is contained in:
Tatsuhiro Tsujikawa 2015-09-03 01:32:15 +09:00
parent d20229d9b9
commit baadec5ef4
12 changed files with 100 additions and 22 deletions

View File

@ -99,6 +99,7 @@ OPTIONS = [
"tls-ticket-key-memcached-max-retry", "tls-ticket-key-memcached-max-retry",
"tls-ticket-key-memcached-max-fail", "tls-ticket-key-memcached-max-fail",
"on-request-mruby-file", "on-request-mruby-file",
"on-response-mruby-file",
"conf", "conf",
] ]

View File

@ -1904,6 +1904,7 @@ int main(int argc, char **argv) {
{SHRPX_OPT_TLS_TICKET_KEY_MEMCACHED_MAX_FAIL, required_argument, &flag, {SHRPX_OPT_TLS_TICKET_KEY_MEMCACHED_MAX_FAIL, required_argument, &flag,
90}, 90},
{SHRPX_OPT_ON_REQUEST_MRUBY_FILE, required_argument, &flag, 91}, {SHRPX_OPT_ON_REQUEST_MRUBY_FILE, required_argument, &flag, 91},
{SHRPX_OPT_ON_RESPONSE_MRUBY_FILE, required_argument, &flag, 92},
{nullptr, 0, nullptr, 0}}; {nullptr, 0, nullptr, 0}};
int option_index = 0; int option_index = 0;
@ -2305,6 +2306,9 @@ int main(int argc, char **argv) {
// --on-request-mruby-file // --on-request-mruby-file
cmdcfgs.emplace_back(SHRPX_OPT_ON_REQUEST_MRUBY_FILE, optarg); cmdcfgs.emplace_back(SHRPX_OPT_ON_REQUEST_MRUBY_FILE, optarg);
break; break;
case 92:
// --on-response-mruby-file
cmdcfgs.emplace_back(SHRPX_OPT_ON_RESPONSE_MRUBY_FILE, optarg);
default: default:
break; break;
} }

View File

@ -691,6 +691,7 @@ enum {
SHRPX_OPTID_NPN_LIST, SHRPX_OPTID_NPN_LIST,
SHRPX_OPTID_OCSP_UPDATE_INTERVAL, SHRPX_OPTID_OCSP_UPDATE_INTERVAL,
SHRPX_OPTID_ON_REQUEST_MRUBY_FILE, SHRPX_OPTID_ON_REQUEST_MRUBY_FILE,
SHRPX_OPTID_ON_RESPONSE_MRUBY_FILE,
SHRPX_OPTID_PADDING, SHRPX_OPTID_PADDING,
SHRPX_OPTID_PID_FILE, SHRPX_OPTID_PID_FILE,
SHRPX_OPTID_PRIVATE_KEY_FILE, SHRPX_OPTID_PRIVATE_KEY_FILE,
@ -1112,6 +1113,11 @@ int option_lookup_token(const char *name, size_t namelen) {
break; break;
case 22: case 22:
switch (name[21]) { switch (name[21]) {
case 'e':
if (util::strieq_l("on-response-mruby-fil", name, 21)) {
return SHRPX_OPTID_ON_RESPONSE_MRUBY_FILE;
}
break;
case 'i': case 'i':
if (util::strieq_l("backend-http-proxy-ur", name, 21)) { if (util::strieq_l("backend-http-proxy-ur", name, 21)) {
return SHRPX_OPTID_BACKEND_HTTP_PROXY_URI; return SHRPX_OPTID_BACKEND_HTTP_PROXY_URI;
@ -1947,6 +1953,10 @@ int parse_config(const char *opt, const char *optarg,
case SHRPX_OPTID_ON_REQUEST_MRUBY_FILE: case SHRPX_OPTID_ON_REQUEST_MRUBY_FILE:
mod_config()->on_request_mruby_file = strcopy(optarg); mod_config()->on_request_mruby_file = strcopy(optarg);
return 0;
case SHRPX_OPTID_ON_RESPONSE_MRUBY_FILE:
mod_config()->on_response_mruby_file = strcopy(optarg);
return 0; return 0;
case SHRPX_OPTID_CONF: case SHRPX_OPTID_CONF:
LOG(WARN) << "conf: ignored"; LOG(WARN) << "conf: ignored";

View File

@ -184,6 +184,7 @@ constexpr char SHRPX_OPT_TLS_TICKET_KEY_MEMCACHED_MAX_RETRY[] =
constexpr char SHRPX_OPT_TLS_TICKET_KEY_MEMCACHED_MAX_FAIL[] = constexpr char SHRPX_OPT_TLS_TICKET_KEY_MEMCACHED_MAX_FAIL[] =
"tls-ticket-key-memcached-max-fail"; "tls-ticket-key-memcached-max-fail";
constexpr char SHRPX_OPT_ON_REQUEST_MRUBY_FILE[] = "on-request-mruby-file"; constexpr char SHRPX_OPT_ON_REQUEST_MRUBY_FILE[] = "on-request-mruby-file";
constexpr char SHRPX_OPT_ON_RESPONSE_MRUBY_FILE[] = "on-response-mruby-file";
union sockaddr_union { union sockaddr_union {
sockaddr_storage storage; sockaddr_storage storage;

View File

@ -36,6 +36,7 @@ enum ErrorCode {
SHRPX_ERR_NETWORK = -100, SHRPX_ERR_NETWORK = -100,
SHRPX_ERR_EOF = -101, SHRPX_ERR_EOF = -101,
SHRPX_ERR_INPROGRESS = -102, SHRPX_ERR_INPROGRESS = -102,
SHRPX_ERR_DCONN_CANCELED = -103,
}; };
} // namespace shrpx } // namespace shrpx

View File

@ -903,10 +903,16 @@ int on_response_headers(Http2Session *http2session, Downstream *downstream,
rv = upstream->on_downstream_header_complete(downstream); rv = upstream->on_downstream_header_complete(downstream);
if (rv != 0) { if (rv != 0) {
// Handling early return (in other words, response was hijacked by
// mruby scripting).
if (downstream->get_response_state() == Downstream::MSG_COMPLETE) {
http2session->submit_rst_stream(frame->hd.stream_id, NGHTTP2_CANCEL);
} else {
http2session->submit_rst_stream(frame->hd.stream_id, http2session->submit_rst_stream(frame->hd.stream_id,
NGHTTP2_PROTOCOL_ERROR); NGHTTP2_INTERNAL_ERROR);
downstream->set_response_state(Downstream::MSG_RESET); downstream->set_response_state(Downstream::MSG_RESET);
} }
}
return 0; return 0;
} }

View File

@ -1209,6 +1209,22 @@ int Http2Upstream::on_downstream_header_complete(Downstream *downstream) {
downstream->get_request_http2_scheme()); downstream->get_request_http2_scheme());
} }
if (!downstream->get_non_final_response()) {
auto worker = handler_->get_worker();
auto mruby_ctx = worker->get_mruby_context();
if (mruby_ctx->run_on_response_proc(downstream) != 0) {
if (error_reply(downstream, 500) != 0) {
return -1;
}
return -1;
}
if (downstream->get_response_state() == Downstream::MSG_COMPLETE) {
return -1;
}
}
size_t nheader = downstream->get_response_headers().size(); size_t nheader = downstream->get_response_headers().size();
auto nva = std::vector<nghttp2_nv>(); auto nva = std::vector<nghttp2_nv>();
// 3 means :status and possible server and via header field. // 3 means :status and possible server and via header field.

View File

@ -774,6 +774,12 @@ int HttpDownstreamConnection::on_read() {
auto htperr = HTTP_PARSER_ERRNO(&response_htp_); auto htperr = HTTP_PARSER_ERRNO(&response_htp_);
if (htperr != HPE_OK) { if (htperr != HPE_OK) {
// Handling early return (in other words, response was hijacked
// by mruby scripting).
if (downstream_->get_response_state() == Downstream::MSG_COMPLETE) {
return SHRPX_ERR_DCONN_CANCELED;
}
if (LOG_ENABLED(INFO)) { if (LOG_ENABLED(INFO)) {
DCLOG(INFO, this) << "HTTP parser failure: " DCLOG(INFO, this) << "HTTP parser failure: "
<< "(" << http_errno_name(htperr) << ") " << "(" << http_errno_name(htperr) << ") "

View File

@ -611,6 +611,11 @@ int HttpsUpstream::downstream_read(DownstreamConnection *dconn) {
return downstream_eof(dconn); return downstream_eof(dconn);
} }
if (rv == SHRPX_ERR_DCONN_CANCELED) {
downstream->pop_downstream_connection();
goto end;
}
if (rv < 0) { if (rv < 0) {
return downstream_error(dconn, Downstream::EVENT_ERROR); return downstream_error(dconn, Downstream::EVENT_ERROR);
} }
@ -782,6 +787,20 @@ int HttpsUpstream::on_downstream_header_complete(Downstream *downstream) {
} }
} }
if (!downstream->get_non_final_response()) {
auto worker = handler_->get_worker();
auto mruby_ctx = worker->get_mruby_context();
if (mruby_ctx->run_on_response_proc(downstream) != 0) {
error_reply(500);
return -1;
}
if (downstream->get_response_state() == Downstream::MSG_COMPLETE) {
return -1;
}
}
auto connect_method = downstream->get_request_method() == HTTP_CONNECT; auto connect_method = downstream->get_request_method() == HTTP_CONNECT;
std::string hdrs = "HTTP/"; std::string hdrs = "HTTP/";

View File

@ -40,39 +40,40 @@ namespace mruby {
MRubyContext::MRubyContext(mrb_state *mrb, RProc *on_request_proc, MRubyContext::MRubyContext(mrb_state *mrb, RProc *on_request_proc,
RProc *on_response_proc) RProc *on_response_proc)
: mrb_(mrb), on_request_proc_(on_request_proc), : mrb_(mrb), on_request_proc_(on_request_proc),
on_response_proc_(on_response_proc) {} on_response_proc_(on_response_proc), running_(false) {}
MRubyContext::~MRubyContext() { mrb_close(mrb_); } MRubyContext::~MRubyContext() { mrb_close(mrb_); }
namespace { int MRubyContext::run_request_proc(Downstream *downstream, RProc *proc) {
int run_request_proc(mrb_state *mrb, Downstream *downstream, RProc *proc) { if (!proc || running_) {
if (!proc) {
return 0; return 0;
} }
running_ = true;
MRubyAssocData data{downstream}; MRubyAssocData data{downstream};
mrb->ud = &data; mrb_->ud = &data;
int rv = 0; int rv = 0;
auto ai = mrb_gc_arena_save(mrb); auto ai = mrb_gc_arena_save(mrb_);
auto res = mrb_run(mrb, proc, mrb_top_self(mrb)); auto res = mrb_run(mrb_, proc, mrb_top_self(mrb_));
(void)res; (void)res;
if (mrb->exc) { if (mrb_->exc) {
rv = -1; rv = -1;
auto error = auto error =
mrb_str_ptr(mrb_funcall(mrb, mrb_obj_value(mrb->exc), "inspect", 0)); mrb_str_ptr(mrb_funcall(mrb_, mrb_obj_value(mrb_->exc), "inspect", 0));
LOG(ERROR) << "Exception caught while executing mruby code: " LOG(ERROR) << "Exception caught while executing mruby code: "
<< error->as.heap.ptr; << error->as.heap.ptr;
mrb->exc = 0; mrb_->exc = 0;
} }
mrb->ud = nullptr; mrb_->ud = nullptr;
mrb_gc_arena_restore(mrb, ai); mrb_gc_arena_restore(mrb_, ai);
if (data.request_headers_dirty) { if (data.request_headers_dirty) {
downstream->index_request_headers(); downstream->index_request_headers();
@ -82,21 +83,17 @@ int run_request_proc(mrb_state *mrb, Downstream *downstream, RProc *proc) {
downstream->index_response_headers(); downstream->index_response_headers();
} }
if (downstream->get_response_state() == Downstream::MSG_COMPLETE) { running_ = false;
downstream->pop_downstream_connection();
}
return rv; return rv;
} }
} // namespace
int MRubyContext::run_on_request_proc(Downstream *downstream) { int MRubyContext::run_on_request_proc(Downstream *downstream) {
return run_request_proc(mrb_, downstream, on_request_proc_); return run_request_proc(downstream, on_request_proc_);
} }
int MRubyContext::run_on_response_proc(Downstream *downstream) { int MRubyContext::run_on_response_proc(Downstream *downstream) {
// TODO not implemented yet return run_request_proc(downstream, on_response_proc_);
return 0;
} }
// Based on // Based on

View File

@ -46,10 +46,13 @@ public:
int run_on_request_proc(Downstream *downstream); int run_on_request_proc(Downstream *downstream);
int run_on_response_proc(Downstream *downstream); int run_on_response_proc(Downstream *downstream);
int run_request_proc(Downstream *downstream, RProc *proc);
private: private:
mrb_state *mrb_; mrb_state *mrb_;
RProc *on_request_proc_; RProc *on_request_proc_;
RProc *on_response_proc_; RProc *on_response_proc_;
bool running_;
}; };
struct MRubyAssocData { struct MRubyAssocData {

View File

@ -865,6 +865,20 @@ int SpdyUpstream::on_downstream_header_complete(Downstream *downstream) {
return 0; return 0;
} }
auto worker = handler_->get_worker();
auto mruby_ctx = worker->get_mruby_context();
if (mruby_ctx->run_on_response_proc(downstream) != 0) {
if (error_reply(downstream, 500) != 0) {
return -1;
}
return -1;
}
if (downstream->get_response_state() == Downstream::MSG_COMPLETE) {
return -1;
}
if (LOG_ENABLED(INFO)) { if (LOG_ENABLED(INFO)) {
DLOG(INFO, downstream) << "HTTP response header completed"; DLOG(INFO, downstream) << "HTTP response header completed";
} }