http: remove legacy parser

Remove the legacy `http_parser` implementation as a dependency
and all code that uses it in favor of llhttp, given that the latter
has been the default for all of Node 12 with no outstanding issues.

PR-URL: https://github.com/nodejs/node/pull/29589
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Jiawen Geng <technicalcute@gmail.com>
Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de>
Reviewed-By: Minwoo Jung <minwoo@nodesource.com>
Reviewed-By: Trivikram Kamat <trivikr.dev@gmail.com>
This commit is contained in:
Anna Henningsen 2019-09-17 14:17:08 +02:00
parent ba3be578d8
commit ac59dc42ed
No known key found for this signature in database
GPG Key ID: 9C63F3A6CD2AD8F9
39 changed files with 99 additions and 8657 deletions

23
LICENSE
View File

@ -114,29 +114,6 @@ The externally maintained libraries used by Node.js are:
purpose. It is provided "as is" without express or implied warranty.
"""
- HTTP Parser, located at deps/http_parser, is licensed as follows:
"""
Copyright Joyent, Inc. and other Node contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
- ICU, located at deps/icu-small, is licensed as follows:
"""
COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)

View File

@ -141,7 +141,7 @@ test-code-cache: with-code-cache
echo "'test-code-cache' target is a noop"
out/Makefile: config.gypi common.gypi node.gyp \
deps/uv/uv.gyp deps/http_parser/http_parser.gyp deps/zlib/zlib.gyp \
deps/uv/uv.gyp deps/llhttp/llhttp.gyp deps/zlib/zlib.gyp \
tools/v8_gypfiles/toolchain.gypi tools/v8_gypfiles/features.gypi \
tools/v8_gypfiles/inspector.gypi tools/v8_gypfiles/v8.gyp
$(PYTHON) tools/gyp_node.py -f make

View File

@ -1,30 +0,0 @@
/out/
core
tags
*.o
test
test_g
test_fast
bench
url_parser
parsertrace
parsertrace_g
*.mk
*.Makefile
*.so.*
*.exe.*
*.exe
*.a
# Visual Studio uglies
*.suo
*.sln
*.vcxproj
*.vcxproj.filters
*.vcxproj.user
*.opensdf
*.ncrunchsolution*
*.sdf
*.vsp
*.psess

View File

@ -1,8 +0,0 @@
# update AUTHORS with:
# git log --all --reverse --format='%aN <%aE>' | perl -ne 'BEGIN{print "# Authors ordered by first contribution.\n"} print unless $h{$_}; $h{$_} = 1' > AUTHORS
Ryan Dahl <ry@tinyclouds.org>
Salman Haq <salman.haq@asti-usa.com>
Simon Zimmermann <simonz05@gmail.com>
Thomas LE ROUX <thomas@november-eleven.fr> LE ROUX Thomas <thomas@procheo.fr>
Thomas LE ROUX <thomas@november-eleven.fr> Thomas LE ROUX <thomas@procheo.fr>
Fedor Indutny <fedor@indutny.com>

View File

@ -1,13 +0,0 @@
language: c
compiler:
- clang
- gcc
script:
- "make"
notifications:
email: false
irc:
- "irc.freenode.net#node-ci"

View File

@ -1,68 +0,0 @@
# Authors ordered by first contribution.
Ryan Dahl <ry@tinyclouds.org>
Jeremy Hinegardner <jeremy@hinegardner.org>
Sergey Shepelev <temotor@gmail.com>
Joe Damato <ice799@gmail.com>
tomika <tomika_nospam@freemail.hu>
Phoenix Sol <phoenix@burninglabs.com>
Cliff Frey <cliff@meraki.com>
Ewen Cheslack-Postava <ewencp@cs.stanford.edu>
Santiago Gala <sgala@apache.org>
Tim Becker <tim.becker@syngenio.de>
Jeff Terrace <jterrace@gmail.com>
Ben Noordhuis <info@bnoordhuis.nl>
Nathan Rajlich <nathan@tootallnate.net>
Mark Nottingham <mnot@mnot.net>
Aman Gupta <aman@tmm1.net>
Tim Becker <tim.becker@kuriositaet.de>
Sean Cunningham <sean.cunningham@mandiant.com>
Peter Griess <pg@std.in>
Salman Haq <salman.haq@asti-usa.com>
Cliff Frey <clifffrey@gmail.com>
Jon Kolb <jon@b0g.us>
Fouad Mardini <f.mardini@gmail.com>
Paul Querna <pquerna@apache.org>
Felix Geisendörfer <felix@debuggable.com>
koichik <koichik@improvement.jp>
Andre Caron <andre.l.caron@gmail.com>
Ivo Raisr <ivosh@ivosh.net>
James McLaughlin <jamie@lacewing-project.org>
David Gwynne <loki@animata.net>
Thomas LE ROUX <thomas@november-eleven.fr>
Randy Rizun <rrizun@ortivawireless.com>
Andre Louis Caron <andre.louis.caron@usherbrooke.ca>
Simon Zimmermann <simonz05@gmail.com>
Erik Dubbelboer <erik@dubbelboer.com>
Martell Malone <martellmalone@gmail.com>
Bertrand Paquet <bpaquet@octo.com>
BogDan Vatra <bogdan@kde.org>
Peter Faiman <peter@thepicard.org>
Corey Richardson <corey@octayn.net>
Tóth Tamás <tomika_nospam@freemail.hu>
Cam Swords <cam.swords@gmail.com>
Chris Dickinson <christopher.s.dickinson@gmail.com>
Uli Köhler <ukoehler@btronik.de>
Charlie Somerville <charlie@charliesomerville.com>
Patrik Stutz <patrik.stutz@gmail.com>
Fedor Indutny <fedor.indutny@gmail.com>
runner <runner.mei@gmail.com>
Alexis Campailla <alexis@janeasystems.com>
David Wragg <david@wragg.org>
Vinnie Falco <vinnie.falco@gmail.com>
Alex Butum <alexbutum@linux.com>
Rex Feng <rexfeng@gmail.com>
Alex Kocharin <alex@kocharin.ru>
Mark Koopman <markmontymark@yahoo.com>
Helge Heß <me@helgehess.eu>
Alexis La Goutte <alexis.lagoutte@gmail.com>
George Miroshnykov <george.miroshnykov@gmail.com>
Maciej Małecki <me@mmalecki.com>
Marc O'Morain <github.com@marcomorain.com>
Jeff Pinner <jpinner@twitter.com>
Timothy J Fontaine <tjfontaine@gmail.com>
Akagi201 <akagi201@gmail.com>
Romain Giraud <giraud.romain@gmail.com>
Jay Satiro <raysatiro@yahoo.com>
Arne Steen <Arne.Steen@gmx.de>
Kjell Schubert <kjell.schubert@gmail.com>
Olivier Mengué <dolmen@cpan.org>

View File

@ -1,19 +0,0 @@
Copyright Joyent, Inc. and other Node contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.

View File

@ -1,160 +0,0 @@
# Copyright Joyent, Inc. and other Node contributors. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"')
HELPER ?=
BINEXT ?=
SOLIBNAME = libhttp_parser
SOMAJOR = 2
SOMINOR = 8
SOREV = 0
ifeq (darwin,$(PLATFORM))
SOEXT ?= dylib
SONAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOEXT)
LIBNAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOREV).$(SOEXT)
else ifeq (wine,$(PLATFORM))
CC = winegcc
BINEXT = .exe.so
HELPER = wine
else
SOEXT ?= so
SONAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR)
LIBNAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR).$(SOREV)
endif
CC?=gcc
AR?=ar
CPPFLAGS ?=
LDFLAGS ?=
CPPFLAGS += -I.
CPPFLAGS_DEBUG = $(CPPFLAGS) -DHTTP_PARSER_STRICT=1
CPPFLAGS_DEBUG += $(CPPFLAGS_DEBUG_EXTRA)
CPPFLAGS_FAST = $(CPPFLAGS) -DHTTP_PARSER_STRICT=0
CPPFLAGS_FAST += $(CPPFLAGS_FAST_EXTRA)
CPPFLAGS_BENCH = $(CPPFLAGS_FAST)
CFLAGS += -Wall -Wextra -Werror
CFLAGS_DEBUG = $(CFLAGS) -O0 -g $(CFLAGS_DEBUG_EXTRA)
CFLAGS_FAST = $(CFLAGS) -O3 $(CFLAGS_FAST_EXTRA)
CFLAGS_BENCH = $(CFLAGS_FAST) -Wno-unused-parameter
CFLAGS_LIB = $(CFLAGS_FAST) -fPIC
LDFLAGS_LIB = $(LDFLAGS) -shared
INSTALL ?= install
PREFIX ?= /usr/local
LIBDIR = $(PREFIX)/lib
INCLUDEDIR = $(PREFIX)/include
ifeq (darwin,$(PLATFORM))
LDFLAGS_LIB += -Wl,-install_name,$(LIBDIR)/$(SONAME)
else
# TODO(bnoordhuis) The native SunOS linker expects -h rather than -soname...
LDFLAGS_LIB += -Wl,-soname=$(SONAME)
endif
test: test_g test_fast
$(HELPER) ./test_g$(BINEXT)
$(HELPER) ./test_fast$(BINEXT)
test_g: http_parser_g.o test_g.o
$(CC) $(CFLAGS_DEBUG) $(LDFLAGS) http_parser_g.o test_g.o -o $@
test_g.o: test.c http_parser.h Makefile
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c test.c -o $@
http_parser_g.o: http_parser.c http_parser.h Makefile
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c http_parser.c -o $@
test_fast: http_parser.o test.o http_parser.h
$(CC) $(CFLAGS_FAST) $(LDFLAGS) http_parser.o test.o -o $@
test.o: test.c http_parser.h Makefile
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c test.c -o $@
bench: http_parser.o bench.o
$(CC) $(CFLAGS_BENCH) $(LDFLAGS) http_parser.o bench.o -o $@
bench.o: bench.c http_parser.h Makefile
$(CC) $(CPPFLAGS_BENCH) $(CFLAGS_BENCH) -c bench.c -o $@
http_parser.o: http_parser.c http_parser.h Makefile
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c http_parser.c
test-run-timed: test_fast
while(true) do time $(HELPER) ./test_fast$(BINEXT) > /dev/null; done
test-valgrind: test_g
valgrind ./test_g
libhttp_parser.o: http_parser.c http_parser.h Makefile
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_LIB) -c http_parser.c -o libhttp_parser.o
library: libhttp_parser.o
$(CC) $(LDFLAGS_LIB) -o $(LIBNAME) $<
package: http_parser.o
$(AR) rcs libhttp_parser.a http_parser.o
url_parser: http_parser.o contrib/url_parser.c
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o $@
url_parser_g: http_parser_g.o contrib/url_parser.c
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o $@
parsertrace: http_parser.o contrib/parsertrace.c
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o parsertrace$(BINEXT)
parsertrace_g: http_parser_g.o contrib/parsertrace.c
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o parsertrace_g$(BINEXT)
tags: http_parser.c http_parser.h test.c
ctags $^
install: library
$(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h
$(INSTALL) -D $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME)
ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME)
ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
install-strip: library
$(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h
$(INSTALL) -D -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME)
ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME)
ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
uninstall:
rm $(DESTDIR)$(INCLUDEDIR)/http_parser.h
rm $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
rm $(DESTDIR)$(LIBDIR)/$(SONAME)
rm $(DESTDIR)$(LIBDIR)/$(LIBNAME)
clean:
rm -f *.o *.a tags test test_fast test_g \
http_parser.tar libhttp_parser.so.* \
url_parser url_parser_g parsertrace parsertrace_g \
*.exe *.exe.so
contrib/url_parser.c: http_parser.h
contrib/parsertrace.c: http_parser.h
.PHONY: clean package test-run test-run-timed test-valgrind install install-strip uninstall

View File

@ -1,246 +0,0 @@
HTTP Parser
===========
[![Build Status](https://api.travis-ci.org/nodejs/http-parser.svg?branch=master)](https://travis-ci.org/nodejs/http-parser)
This is a parser for HTTP messages written in C. It parses both requests and
responses. The parser is designed to be used in performance HTTP
applications. It does not make any syscalls nor allocations, it does not
buffer data, it can be interrupted at anytime. Depending on your
architecture, it only requires about 40 bytes of data per message
stream (in a web server that is per connection).
Features:
* No dependencies
* Handles persistent streams (keep-alive).
* Decodes chunked encoding.
* Upgrade support
* Defends against buffer overflow attacks.
The parser extracts the following information from HTTP messages:
* Header fields and values
* Content-Length
* Request method
* Response status code
* Transfer-Encoding
* HTTP version
* Request URL
* Message body
Usage
-----
One `http_parser` object is used per TCP connection. Initialize the struct
using `http_parser_init()` and set the callbacks. That might look something
like this for a request parser:
```c
http_parser_settings settings;
settings.on_url = my_url_callback;
settings.on_header_field = my_header_field_callback;
/* ... */
http_parser *parser = malloc(sizeof(http_parser));
http_parser_init(parser, HTTP_REQUEST);
parser->data = my_socket;
```
When data is received on the socket execute the parser and check for errors.
```c
size_t len = 80*1024, nparsed;
char buf[len];
ssize_t recved;
recved = recv(fd, buf, len, 0);
if (recved < 0) {
/* Handle error. */
}
/* Start up / continue the parser.
* Note we pass recved==0 to signal that EOF has been received.
*/
nparsed = http_parser_execute(parser, &settings, buf, recved);
if (parser->upgrade) {
/* handle new protocol */
} else if (nparsed != recved) {
/* Handle error. Usually just close the connection. */
}
```
`http_parser` needs to know where the end of the stream is. For example, sometimes
servers send responses without Content-Length and expect the client to
consume input (for the body) until EOF. To tell `http_parser` about EOF, give
`0` as the fourth parameter to `http_parser_execute()`. Callbacks and errors
can still be encountered during an EOF, so one must still be prepared
to receive them.
Scalar valued message information such as `status_code`, `method`, and the
HTTP version are stored in the parser structure. This data is only
temporally stored in `http_parser` and gets reset on each new message. If
this information is needed later, copy it out of the structure during the
`headers_complete` callback.
The parser decodes the transfer-encoding for both requests and responses
transparently. That is, a chunked encoding is decoded before being sent to
the on_body callback.
The Special Problem of Upgrade
------------------------------
`http_parser` supports upgrading the connection to a different protocol. An
increasingly common example of this is the WebSocket protocol which sends
a request like
GET /demo HTTP/1.1
Upgrade: WebSocket
Connection: Upgrade
Host: example.com
Origin: http://example.com
WebSocket-Protocol: sample
followed by non-HTTP data.
(See [RFC6455](https://tools.ietf.org/html/rfc6455) for more information the
WebSocket protocol.)
To support this, the parser will treat this as a normal HTTP message without a
body, issuing both on_headers_complete and on_message_complete callbacks. However
http_parser_execute() will stop parsing at the end of the headers and return.
The user is expected to check if `parser->upgrade` has been set to 1 after
`http_parser_execute()` returns. Non-HTTP data begins at the buffer supplied
offset by the return value of `http_parser_execute()`.
Callbacks
---------
During the `http_parser_execute()` call, the callbacks set in
`http_parser_settings` will be executed. The parser maintains state and
never looks behind, so buffering the data is not necessary. If you need to
save certain data for later usage, you can do that from the callbacks.
There are two types of callbacks:
* notification `typedef int (*http_cb) (http_parser*);`
Callbacks: on_message_begin, on_headers_complete, on_message_complete.
* data `typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);`
Callbacks: (requests only) on_url,
(common) on_header_field, on_header_value, on_body;
Callbacks must return 0 on success. Returning a non-zero value indicates
error to the parser, making it exit immediately.
For cases where it is necessary to pass local information to/from a callback,
the `http_parser` object's `data` field can be used.
An example of such a case is when using threads to handle a socket connection,
parse a request, and then give a response over that socket. By instantiation
of a thread-local struct containing relevant data (e.g. accepted socket,
allocated memory for callbacks to write into, etc), a parser's callbacks are
able to communicate data between the scope of the thread and the scope of the
callback in a threadsafe manner. This allows `http_parser` to be used in
multi-threaded contexts.
Example:
```
typedef struct {
socket_t sock;
void* buffer;
int buf_len;
} custom_data_t;
int my_url_callback(http_parser* parser, const char *at, size_t length) {
/* access to thread local custom_data_t struct.
Use this access save parsed data for later use into thread local
buffer, or communicate over socket
*/
parser->data;
...
return 0;
}
...
void http_parser_thread(socket_t sock) {
int nparsed = 0;
/* allocate memory for user data */
custom_data_t *my_data = malloc(sizeof(custom_data_t));
/* some information for use by callbacks.
* achieves thread -> callback information flow */
my_data->sock = sock;
/* instantiate a thread-local parser */
http_parser *parser = malloc(sizeof(http_parser));
http_parser_init(parser, HTTP_REQUEST); /* initialise parser */
/* this custom data reference is accessible through the reference to the
parser supplied to callback functions */
parser->data = my_data;
http_parser_settings settings; / * set up callbacks */
settings.on_url = my_url_callback;
/* execute parser */
nparsed = http_parser_execute(parser, &settings, buf, recved);
...
/* parsed information copied from callback.
can now perform action on data copied into thread-local memory from callbacks.
achieves callback -> thread information flow */
my_data->buffer;
...
}
```
In case you parse HTTP message in chunks (i.e. `read()` request line
from socket, parse, read half headers, parse, etc) your data callbacks
may be called more than once. `http_parser` guarantees that data pointer is only
valid for the lifetime of callback. You can also `read()` into a heap allocated
buffer to avoid copying memory around if this fits your application.
Reading headers may be a tricky task if you read/parse headers partially.
Basically, you need to remember whether last header callback was field or value
and apply the following logic:
(on_header_field and on_header_value shortened to on_h_*)
------------------------ ------------ --------------------------------------------
| State (prev. callback) | Callback | Description/action |
------------------------ ------------ --------------------------------------------
| nothing (first call) | on_h_field | Allocate new buffer and copy callback data |
| | | into it |
------------------------ ------------ --------------------------------------------
| value | on_h_field | New header started. |
| | | Copy current name,value buffers to headers |
| | | list and allocate new buffer for new name |
------------------------ ------------ --------------------------------------------
| field | on_h_field | Previous name continues. Reallocate name |
| | | buffer and append callback data to it |
------------------------ ------------ --------------------------------------------
| field | on_h_value | Value for current header started. Allocate |
| | | new buffer and copy callback data to it |
------------------------ ------------ --------------------------------------------
| value | on_h_value | Value continues. Reallocate value buffer |
| | | and append callback data to it |
------------------------ ------------ --------------------------------------------
Parsing URLs
------------
A simplistic zero-copy URL parser is provided as `http_parser_parse_url()`.
Users of this library may wish to use it to parse URLs constructed from
consecutive `on_url` callbacks.
See examples of reading in headers:
* [partial example](http://gist.github.com/155877) in C
* [from http-parser tests](http://github.com/joyent/http-parser/blob/37a0ff8/test.c#L403) in C
* [from Node library](http://github.com/joyent/node/blob/842eaf4/src/http.js#L284) in Javascript

View File

@ -1,111 +0,0 @@
/* Copyright Fedor Indutny. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#include "http_parser.h"
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <sys/time.h>
static const char data[] =
"POST /joyent/http-parser HTTP/1.1\r\n"
"Host: github.com\r\n"
"DNT: 1\r\n"
"Accept-Encoding: gzip, deflate, sdch\r\n"
"Accept-Language: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4\r\n"
"User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/39.0.2171.65 Safari/537.36\r\n"
"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,"
"image/webp,*/*;q=0.8\r\n"
"Referer: https://github.com/joyent/http-parser\r\n"
"Connection: keep-alive\r\n"
"Transfer-Encoding: chunked\r\n"
"Cache-Control: max-age=0\r\n\r\nb\r\nhello world\r\n0\r\n\r\n";
static const size_t data_len = sizeof(data) - 1;
static int on_info(http_parser* p) {
return 0;
}
static int on_data(http_parser* p, const char *at, size_t length) {
return 0;
}
static http_parser_settings settings = {
.on_message_begin = on_info,
.on_headers_complete = on_info,
.on_message_complete = on_info,
.on_header_field = on_data,
.on_header_value = on_data,
.on_url = on_data,
.on_status = on_data,
.on_body = on_data
};
int bench(int iter_count, int silent) {
struct http_parser parser;
int i;
int err;
struct timeval start;
struct timeval end;
float rps;
if (!silent) {
err = gettimeofday(&start, NULL);
assert(err == 0);
}
for (i = 0; i < iter_count; i++) {
size_t parsed;
http_parser_init(&parser, HTTP_REQUEST);
parsed = http_parser_execute(&parser, &settings, data, data_len);
assert(parsed == data_len);
}
if (!silent) {
err = gettimeofday(&end, NULL);
assert(err == 0);
fprintf(stdout, "Benchmark result:\n");
rps = (float) (end.tv_sec - start.tv_sec) +
(end.tv_usec - start.tv_usec) * 1e-6f;
fprintf(stdout, "Took %f seconds to run\n", rps);
rps = (float) iter_count / rps;
fprintf(stdout, "%f req/sec\n", rps);
fflush(stdout);
}
return 0;
}
int main(int argc, char** argv) {
if (argc == 2 && strcmp(argv[1], "infinite") == 0) {
for (;;)
bench(5000000, 1);
return 0;
} else {
return bench(5000000, 0);
}
}

View File

@ -1,157 +0,0 @@
/* Copyright Joyent, Inc. and other Node contributors.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
/* Dump what the parser finds to stdout as it happen */
#include "http_parser.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int on_message_begin(http_parser* _) {
(void)_;
printf("\n***MESSAGE BEGIN***\n\n");
return 0;
}
int on_headers_complete(http_parser* _) {
(void)_;
printf("\n***HEADERS COMPLETE***\n\n");
return 0;
}
int on_message_complete(http_parser* _) {
(void)_;
printf("\n***MESSAGE COMPLETE***\n\n");
return 0;
}
int on_url(http_parser* _, const char* at, size_t length) {
(void)_;
printf("Url: %.*s\n", (int)length, at);
return 0;
}
int on_header_field(http_parser* _, const char* at, size_t length) {
(void)_;
printf("Header field: %.*s\n", (int)length, at);
return 0;
}
int on_header_value(http_parser* _, const char* at, size_t length) {
(void)_;
printf("Header value: %.*s\n", (int)length, at);
return 0;
}
int on_body(http_parser* _, const char* at, size_t length) {
(void)_;
printf("Body: %.*s\n", (int)length, at);
return 0;
}
void usage(const char* name) {
fprintf(stderr,
"Usage: %s $type $filename\n"
" type: -x, where x is one of {r,b,q}\n"
" parses file as a Response, reQuest, or Both\n",
name);
exit(EXIT_FAILURE);
}
int main(int argc, char* argv[]) {
enum http_parser_type file_type;
if (argc != 3) {
usage(argv[0]);
}
char* type = argv[1];
if (type[0] != '-') {
usage(argv[0]);
}
switch (type[1]) {
/* in the case of "-", type[1] will be NUL */
case 'r':
file_type = HTTP_RESPONSE;
break;
case 'q':
file_type = HTTP_REQUEST;
break;
case 'b':
file_type = HTTP_BOTH;
break;
default:
usage(argv[0]);
}
char* filename = argv[2];
FILE* file = fopen(filename, "r");
if (file == NULL) {
perror("fopen");
goto fail;
}
fseek(file, 0, SEEK_END);
long file_length = ftell(file);
if (file_length == -1) {
perror("ftell");
goto fail;
}
fseek(file, 0, SEEK_SET);
char* data = malloc(file_length);
if (fread(data, 1, file_length, file) != (size_t)file_length) {
fprintf(stderr, "couldn't read entire file\n");
free(data);
goto fail;
}
http_parser_settings settings;
memset(&settings, 0, sizeof(settings));
settings.on_message_begin = on_message_begin;
settings.on_url = on_url;
settings.on_header_field = on_header_field;
settings.on_header_value = on_header_value;
settings.on_headers_complete = on_headers_complete;
settings.on_body = on_body;
settings.on_message_complete = on_message_complete;
http_parser parser;
http_parser_init(&parser, file_type);
size_t nparsed = http_parser_execute(&parser, &settings, data, file_length);
free(data);
if (nparsed != (size_t)file_length) {
fprintf(stderr,
"Error: %s (%s)\n",
http_errno_description(HTTP_PARSER_ERRNO(&parser)),
http_errno_name(HTTP_PARSER_ERRNO(&parser)));
goto fail;
}
return EXIT_SUCCESS;
fail:
fclose(file);
return EXIT_FAILURE;
}

View File

@ -1,47 +0,0 @@
#include "http_parser.h"
#include <stdio.h>
#include <string.h>
void
dump_url (const char *url, const struct http_parser_url *u)
{
unsigned int i;
printf("\tfield_set: 0x%x, port: %u\n", u->field_set, u->port);
for (i = 0; i < UF_MAX; i++) {
if ((u->field_set & (1 << i)) == 0) {
printf("\tfield_data[%u]: unset\n", i);
continue;
}
printf("\tfield_data[%u]: off: %u, len: %u, part: %.*s\n",
i,
u->field_data[i].off,
u->field_data[i].len,
u->field_data[i].len,
url + u->field_data[i].off);
}
}
int main(int argc, char ** argv) {
struct http_parser_url u;
int len, connect, result;
if (argc != 3) {
printf("Syntax : %s connect|get url\n", argv[0]);
return 1;
}
len = strlen(argv[2]);
connect = strcmp("connect", argv[1]) == 0 ? 1 : 0;
printf("Parsing %s, connect %d\n", argv[2], connect);
http_parser_url_init(&u);
result = http_parser_parse_url(argv[2], len, connect, &u);
if (result != 0) {
printf("Parse error : %d\n", result);
return result;
}
printf("Parse ok, result : \n");
dump_url(argv[2], &u);
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@ -1,111 +0,0 @@
# This file is used with the GYP meta build system.
# http://code.google.com/p/gyp/
# To build try this:
# svn co http://gyp.googlecode.com/svn/trunk gyp
# ./gyp/gyp -f make --depth=`pwd` http_parser.gyp
# ./out/Debug/test
{
'target_defaults': {
'default_configuration': 'Debug',
'configurations': {
# TODO: hoist these out and put them somewhere common, because
# RuntimeLibrary MUST MATCH across the entire project
'Debug': {
'defines': [ 'DEBUG', '_DEBUG' ],
'cflags': [ '-Wall', '-Wextra', '-O0', '-g', '-ftrapv' ],
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeLibrary': 1, # static debug
},
},
},
'Release': {
'defines': [ 'NDEBUG' ],
'cflags': [ '-Wall', '-Wextra', '-O3' ],
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeLibrary': 0, # static release
},
},
}
},
'msvs_settings': {
'VCCLCompilerTool': {
},
'VCLibrarianTool': {
},
'VCLinkerTool': {
'GenerateDebugInformation': 'true',
},
},
'conditions': [
['OS == "win"', {
'defines': [
'WIN32'
],
}]
],
},
'targets': [
{
'target_name': 'http_parser',
'type': 'static_library',
'include_dirs': [ '.' ],
'direct_dependent_settings': {
'defines': [ 'HTTP_PARSER_STRICT=0' ],
'include_dirs': [ '.' ],
},
'defines': [ 'HTTP_MAX_HEADER_SIZE=8192', 'HTTP_PARSER_STRICT=0' ],
'sources': [ './http_parser.c', ],
'conditions': [
['OS=="win"', {
'msvs_settings': {
'VCCLCompilerTool': {
# Compile as C++. http_parser.c is actually C99, but C++ is
# close enough in this case.
'CompileAs': 2,
},
},
}]
],
},
{
'target_name': 'http_parser_strict',
'type': 'static_library',
'include_dirs': [ '.' ],
'direct_dependent_settings': {
'defines': [ 'HTTP_PARSER_STRICT=1' ],
'include_dirs': [ '.' ],
},
'defines': [ 'HTTP_MAX_HEADER_SIZE=8192', 'HTTP_PARSER_STRICT=1' ],
'sources': [ './http_parser.c', ],
'conditions': [
['OS=="win"', {
'msvs_settings': {
'VCCLCompilerTool': {
# Compile as C++. http_parser.c is actually C99, but C++ is
# close enough in this case.
'CompileAs': 2,
},
},
}]
],
},
{
'target_name': 'test-nonstrict',
'type': 'executable',
'dependencies': [ 'http_parser' ],
'sources': [ 'test.c' ]
},
{
'target_name': 'test-strict',
'type': 'executable',
'dependencies': [ 'http_parser_strict' ],
'sources': [ 'test.c' ]
}
]
}

View File

@ -1,436 +0,0 @@
/* Copyright Joyent, Inc. and other Node contributors. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef http_parser_h
#define http_parser_h
#ifdef __cplusplus
extern "C" {
#endif
/* Also update SONAME in the Makefile whenever you change these. */
#define HTTP_PARSER_VERSION_MAJOR 2
#define HTTP_PARSER_VERSION_MINOR 8
#define HTTP_PARSER_VERSION_PATCH 0
#include <stddef.h>
#if defined(_WIN32) && !defined(__MINGW32__) && \
(!defined(_MSC_VER) || _MSC_VER<1600) && !defined(__WINE__)
#include <BaseTsd.h>
typedef __int8 int8_t;
typedef unsigned __int8 uint8_t;
typedef __int16 int16_t;
typedef unsigned __int16 uint16_t;
typedef __int32 int32_t;
typedef unsigned __int32 uint32_t;
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
#else
#include <stdint.h>
#endif
/* Compile with -DHTTP_PARSER_STRICT=0 to make less checks, but run
* faster
*/
#ifndef HTTP_PARSER_STRICT
# define HTTP_PARSER_STRICT 1
#endif
/* Maximium header size allowed. If the macro is not defined
* before including this header then the default is used. To
* change the maximum header size, define the macro in the build
* environment (e.g. -DHTTP_MAX_HEADER_SIZE=<value>). To remove
* the effective limit on the size of the header, define the macro
* to a very large number (e.g. -DHTTP_MAX_HEADER_SIZE=0x7fffffff)
*/
#ifndef HTTP_MAX_HEADER_SIZE
# define HTTP_MAX_HEADER_SIZE (80*1024)
#endif
typedef struct http_parser http_parser;
typedef struct http_parser_settings http_parser_settings;
/* Callbacks should return non-zero to indicate an error. The parser will
* then halt execution.
*
* The one exception is on_headers_complete. In a HTTP_RESPONSE parser
* returning '1' from on_headers_complete will tell the parser that it
* should not expect a body. This is used when receiving a response to a
* HEAD request which may contain 'Content-Length' or 'Transfer-Encoding:
* chunked' headers that indicate the presence of a body.
*
* Returning `2` from on_headers_complete will tell parser that it should not
* expect neither a body nor any futher responses on this connection. This is
* useful for handling responses to a CONNECT request which may not contain
* `Upgrade` or `Connection: upgrade` headers.
*
* http_data_cb does not return data chunks. It will be called arbitrarily
* many times for each string. E.G. you might get 10 callbacks for "on_url"
* each providing just a few characters more data.
*/
typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);
typedef int (*http_cb) (http_parser*);
/* Status Codes */
#define HTTP_STATUS_MAP(XX) \
XX(100, CONTINUE, Continue) \
XX(101, SWITCHING_PROTOCOLS, Switching Protocols) \
XX(102, PROCESSING, Processing) \
XX(200, OK, OK) \
XX(201, CREATED, Created) \
XX(202, ACCEPTED, Accepted) \
XX(203, NON_AUTHORITATIVE_INFORMATION, Non-Authoritative Information) \
XX(204, NO_CONTENT, No Content) \
XX(205, RESET_CONTENT, Reset Content) \
XX(206, PARTIAL_CONTENT, Partial Content) \
XX(207, MULTI_STATUS, Multi-Status) \
XX(208, ALREADY_REPORTED, Already Reported) \
XX(226, IM_USED, IM Used) \
XX(300, MULTIPLE_CHOICES, Multiple Choices) \
XX(301, MOVED_PERMANENTLY, Moved Permanently) \
XX(302, FOUND, Found) \
XX(303, SEE_OTHER, See Other) \
XX(304, NOT_MODIFIED, Not Modified) \
XX(305, USE_PROXY, Use Proxy) \
XX(307, TEMPORARY_REDIRECT, Temporary Redirect) \
XX(308, PERMANENT_REDIRECT, Permanent Redirect) \
XX(400, BAD_REQUEST, Bad Request) \
XX(401, UNAUTHORIZED, Unauthorized) \
XX(402, PAYMENT_REQUIRED, Payment Required) \
XX(403, FORBIDDEN, Forbidden) \
XX(404, NOT_FOUND, Not Found) \
XX(405, METHOD_NOT_ALLOWED, Method Not Allowed) \
XX(406, NOT_ACCEPTABLE, Not Acceptable) \
XX(407, PROXY_AUTHENTICATION_REQUIRED, Proxy Authentication Required) \
XX(408, REQUEST_TIMEOUT, Request Timeout) \
XX(409, CONFLICT, Conflict) \
XX(410, GONE, Gone) \
XX(411, LENGTH_REQUIRED, Length Required) \
XX(412, PRECONDITION_FAILED, Precondition Failed) \
XX(413, PAYLOAD_TOO_LARGE, Payload Too Large) \
XX(414, URI_TOO_LONG, URI Too Long) \
XX(415, UNSUPPORTED_MEDIA_TYPE, Unsupported Media Type) \
XX(416, RANGE_NOT_SATISFIABLE, Range Not Satisfiable) \
XX(417, EXPECTATION_FAILED, Expectation Failed) \
XX(421, MISDIRECTED_REQUEST, Misdirected Request) \
XX(422, UNPROCESSABLE_ENTITY, Unprocessable Entity) \
XX(423, LOCKED, Locked) \
XX(424, FAILED_DEPENDENCY, Failed Dependency) \
XX(426, UPGRADE_REQUIRED, Upgrade Required) \
XX(428, PRECONDITION_REQUIRED, Precondition Required) \
XX(429, TOO_MANY_REQUESTS, Too Many Requests) \
XX(431, REQUEST_HEADER_FIELDS_TOO_LARGE, Request Header Fields Too Large) \
XX(451, UNAVAILABLE_FOR_LEGAL_REASONS, Unavailable For Legal Reasons) \
XX(500, INTERNAL_SERVER_ERROR, Internal Server Error) \
XX(501, NOT_IMPLEMENTED, Not Implemented) \
XX(502, BAD_GATEWAY, Bad Gateway) \
XX(503, SERVICE_UNAVAILABLE, Service Unavailable) \
XX(504, GATEWAY_TIMEOUT, Gateway Timeout) \
XX(505, HTTP_VERSION_NOT_SUPPORTED, HTTP Version Not Supported) \
XX(506, VARIANT_ALSO_NEGOTIATES, Variant Also Negotiates) \
XX(507, INSUFFICIENT_STORAGE, Insufficient Storage) \
XX(508, LOOP_DETECTED, Loop Detected) \
XX(510, NOT_EXTENDED, Not Extended) \
XX(511, NETWORK_AUTHENTICATION_REQUIRED, Network Authentication Required) \
enum http_status
{
#define XX(num, name, string) HTTP_STATUS_##name = num,
HTTP_STATUS_MAP(XX)
#undef XX
};
/* Request Methods */
#define HTTP_METHOD_MAP(XX) \
XX(0, DELETE, DELETE) \
XX(1, GET, GET) \
XX(2, HEAD, HEAD) \
XX(3, POST, POST) \
XX(4, PUT, PUT) \
/* pathological */ \
XX(5, CONNECT, CONNECT) \
XX(6, OPTIONS, OPTIONS) \
XX(7, TRACE, TRACE) \
/* WebDAV */ \
XX(8, COPY, COPY) \
XX(9, LOCK, LOCK) \
XX(10, MKCOL, MKCOL) \
XX(11, MOVE, MOVE) \
XX(12, PROPFIND, PROPFIND) \
XX(13, PROPPATCH, PROPPATCH) \
XX(14, SEARCH, SEARCH) \
XX(15, UNLOCK, UNLOCK) \
XX(16, BIND, BIND) \
XX(17, REBIND, REBIND) \
XX(18, UNBIND, UNBIND) \
XX(19, ACL, ACL) \
/* subversion */ \
XX(20, REPORT, REPORT) \
XX(21, MKACTIVITY, MKACTIVITY) \
XX(22, CHECKOUT, CHECKOUT) \
XX(23, MERGE, MERGE) \
/* upnp */ \
XX(24, MSEARCH, M-SEARCH) \
XX(25, NOTIFY, NOTIFY) \
XX(26, SUBSCRIBE, SUBSCRIBE) \
XX(27, UNSUBSCRIBE, UNSUBSCRIBE) \
/* RFC-5789 */ \
XX(28, PATCH, PATCH) \
XX(29, PURGE, PURGE) \
/* CalDAV */ \
XX(30, MKCALENDAR, MKCALENDAR) \
/* RFC-2068, section 19.6.1.2 */ \
XX(31, LINK, LINK) \
XX(32, UNLINK, UNLINK) \
/* icecast */ \
XX(33, SOURCE, SOURCE) \
enum http_method
{
#define XX(num, name, string) HTTP_##name = num,
HTTP_METHOD_MAP(XX)
#undef XX
};
enum http_parser_type { HTTP_REQUEST, HTTP_RESPONSE, HTTP_BOTH };
/* Flag values for http_parser.flags field */
enum flags
{ F_CHUNKED = 1 << 0
, F_CONNECTION_KEEP_ALIVE = 1 << 1
, F_CONNECTION_CLOSE = 1 << 2
, F_CONNECTION_UPGRADE = 1 << 3
, F_TRAILING = 1 << 4
, F_UPGRADE = 1 << 5
, F_SKIPBODY = 1 << 6
, F_CONTENTLENGTH = 1 << 7
};
/* Map for errno-related constants
*
* The provided argument should be a macro that takes 2 arguments.
*/
#define HTTP_ERRNO_MAP(XX) \
/* No error */ \
XX(OK, "success") \
\
/* Callback-related errors */ \
XX(CB_message_begin, "the on_message_begin callback failed") \
XX(CB_url, "the on_url callback failed") \
XX(CB_header_field, "the on_header_field callback failed") \
XX(CB_header_value, "the on_header_value callback failed") \
XX(CB_headers_complete, "the on_headers_complete callback failed") \
XX(CB_body, "the on_body callback failed") \
XX(CB_message_complete, "the on_message_complete callback failed") \
XX(CB_status, "the on_status callback failed") \
XX(CB_chunk_header, "the on_chunk_header callback failed") \
XX(CB_chunk_complete, "the on_chunk_complete callback failed") \
\
/* Parsing-related errors */ \
XX(INVALID_EOF_STATE, "stream ended at an unexpected time") \
XX(HEADER_OVERFLOW, \
"too many header bytes seen; overflow detected") \
XX(CLOSED_CONNECTION, \
"data received after completed connection: close message") \
XX(INVALID_VERSION, "invalid HTTP version") \
XX(INVALID_STATUS, "invalid HTTP status code") \
XX(INVALID_METHOD, "invalid HTTP method") \
XX(INVALID_URL, "invalid URL") \
XX(INVALID_HOST, "invalid host") \
XX(INVALID_PORT, "invalid port") \
XX(INVALID_PATH, "invalid path") \
XX(INVALID_QUERY_STRING, "invalid query string") \
XX(INVALID_FRAGMENT, "invalid fragment") \
XX(LF_EXPECTED, "LF character expected") \
XX(INVALID_HEADER_TOKEN, "invalid character in header") \
XX(INVALID_CONTENT_LENGTH, \
"invalid character in content-length header") \
XX(UNEXPECTED_CONTENT_LENGTH, \
"unexpected content-length header") \
XX(INVALID_CHUNK_SIZE, \
"invalid character in chunk size header") \
XX(INVALID_CONSTANT, "invalid constant string") \
XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\
XX(STRICT, "strict mode assertion failed") \
XX(PAUSED, "parser is paused") \
XX(UNKNOWN, "an unknown error occurred")
/* Define HPE_* values for each errno value above */
#define HTTP_ERRNO_GEN(n, s) HPE_##n,
enum http_errno {
HTTP_ERRNO_MAP(HTTP_ERRNO_GEN)
};
#undef HTTP_ERRNO_GEN
/* Get an http_errno value from an http_parser */
#define HTTP_PARSER_ERRNO(p) ((enum http_errno) (p)->http_errno)
struct http_parser {
/** PRIVATE **/
unsigned int type : 2; /* enum http_parser_type */
unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */
unsigned int state : 7; /* enum state from http_parser.c */
unsigned int header_state : 7; /* enum header_state from http_parser.c */
unsigned int index : 7; /* index into current matcher */
unsigned int lenient_http_headers : 1;
uint32_t nread; /* # bytes read in various scenarios */
uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */
/** READ-ONLY **/
unsigned short http_major;
unsigned short http_minor;
unsigned int status_code : 16; /* responses only */
unsigned int method : 8; /* requests only */
unsigned int http_errno : 7;
/* 1 = Upgrade header was present and the parser has exited because of that.
* 0 = No upgrade header present.
* Should be checked when http_parser_execute() returns in addition to
* error checking.
*/
unsigned int upgrade : 1;
/** PUBLIC **/
void *data; /* A pointer to get hook to the "connection" or "socket" object */
};
struct http_parser_settings {
http_cb on_message_begin;
http_data_cb on_url;
http_data_cb on_status;
http_data_cb on_header_field;
http_data_cb on_header_value;
http_cb on_headers_complete;
http_data_cb on_body;
http_cb on_message_complete;
/* When on_chunk_header is called, the current chunk length is stored
* in parser->content_length.
*/
http_cb on_chunk_header;
http_cb on_chunk_complete;
};
enum http_parser_url_fields
{ UF_SCHEMA = 0
, UF_HOST = 1
, UF_PORT = 2
, UF_PATH = 3
, UF_QUERY = 4
, UF_FRAGMENT = 5
, UF_USERINFO = 6
, UF_MAX = 7
};
/* Result structure for http_parser_parse_url().
*
* Callers should index into field_data[] with UF_* values iff field_set
* has the relevant (1 << UF_*) bit set. As a courtesy to clients (and
* because we probably have padding left over), we convert any port to
* a uint16_t.
*/
struct http_parser_url {
uint16_t field_set; /* Bitmask of (1 << UF_*) values */
uint16_t port; /* Converted UF_PORT string */
struct {
uint16_t off; /* Offset into buffer in which field starts */
uint16_t len; /* Length of run in buffer */
} field_data[UF_MAX];
};
/* Returns the library version. Bits 16-23 contain the major version number,
* bits 8-15 the minor version number and bits 0-7 the patch level.
* Usage example:
*
* unsigned long version = http_parser_version();
* unsigned major = (version >> 16) & 255;
* unsigned minor = (version >> 8) & 255;
* unsigned patch = version & 255;
* printf("http_parser v%u.%u.%u\n", major, minor, patch);
*/
unsigned long http_parser_version(void);
void http_parser_init(http_parser *parser, enum http_parser_type type);
/* Initialize http_parser_settings members to 0
*/
void http_parser_settings_init(http_parser_settings *settings);
/* Executes the parser. Returns number of parsed bytes. Sets
* `parser->http_errno` on error. */
size_t http_parser_execute(http_parser *parser,
const http_parser_settings *settings,
const char *data,
size_t len);
/* If http_should_keep_alive() in the on_headers_complete or
* on_message_complete callback returns 0, then this should be
* the last message on the connection.
* If you are the server, respond with the "Connection: close" header.
* If you are the client, close the connection.
*/
int http_should_keep_alive(const http_parser *parser);
/* Returns a string version of the HTTP method. */
const char *http_method_str(enum http_method m);
/* Return a string name of the given error */
const char *http_errno_name(enum http_errno err);
/* Return a string description of the given error */
const char *http_errno_description(enum http_errno err);
/* Initialize all http_parser_url members to 0 */
void http_parser_url_init(struct http_parser_url *u);
/* Parse a URL; return nonzero on failure */
int http_parser_parse_url(const char *buf, size_t buflen,
int is_connect,
struct http_parser_url *u);
/* Pause or un-pause the parser; a nonzero value pauses */
void http_parser_pause(http_parser *parser, int paused);
/* Checks if this is the final chunk of the body. */
int http_body_is_final(const http_parser *parser);
/* Change the maximum header size provided at compile time. */
void http_parser_set_max_header_size(uint32_t size);
#ifdef __cplusplus
}
#endif
#endif

4477
deps/http_parser/test.c vendored

File diff suppressed because it is too large Load Diff

View File

@ -294,22 +294,6 @@ Specify the file name of the heap profile generated by `--heap-prof`.
Generates a heap snapshot each time the process receives the specified signal.
`signal` must be a valid signal name. Disabled by default.
### `--http-parser=library`
<!-- YAML
added: v11.4.0
-->
Chooses an HTTP parser library. Available values are:
* `llhttp` for https://llhttp.org/
* `legacy` for https://github.com/nodejs/http-parser
The default is `llhttp`, unless otherwise specified when building Node.js.
This flag exists to aid in experimentation with the internal implementation of
the Node.js http parser.
This flag is likely to become a no-op and removed at some point in the future.
### `--icu-data-dir=file`
<!-- YAML
added: v0.11.15

View File

@ -2475,16 +2475,20 @@ Module.createRequireFromPath() is deprecated. Please use
### DEP0131: Legacy HTTP parser
<!-- YAML
changes:
- version: REPLACEME
pr-url: https://github.com/nodejs/node/pull/29589
description: This feature has been removed.
- version: v12.3.0
pr-url: https://github.com/nodejs/node/pull/27498
description: Documentation-only.
-->
Type: Documentation-only
Type: End-of-Life
The legacy HTTP parser, used by default in versions of Node.js prior to 12.0.0,
is deprecated. This deprecation applies to users of the
[`--http-parser=legacy`][] command-line flag.
is deprecated and has been removed in REPLACEME. Prior to REPLACEME, the
`--http-parser=legacy` command-line flag could be used to revert to using the
legacy parser.
<a id="DEP0132"></a>
### DEP0132: worker.terminate() with callback
@ -2514,7 +2518,6 @@ Type: Documentation-only
Prefer [`response.socket`][] over [`response.connection`] and
[`request.socket`][] over [`request.connection`].
[`--http-parser=legacy`]: cli.html#cli_http_parser_library
[`--pending-deprecation`]: cli.html#cli_pending_deprecation
[`--throw-deprecation`]: cli.html#cli_throw_deprecation
[`Buffer.allocUnsafeSlow(size)`]: buffer.html#buffer_class_method_buffer_allocunsafeslow_size

View File

@ -2348,7 +2348,6 @@ Will generate an object similar to:
nghttp2: '1.34.0',
napi: '4',
llhttp: '1.1.1',
http_parser: '2.8.0',
openssl: '1.1.1b',
cldr: '34.0',
icu: '63.1',

View File

@ -53,7 +53,6 @@ is provided below for reference.
"nghttp2": "1.34.0",
"napi": "3",
"llhttp": "1.0.1",
"http_parser": "2.8.0",
"openssl": "1.1.0j"
},
"release": {

View File

@ -164,12 +164,6 @@ The default is
File name of the V8 heap profile generated with
.Fl -heap-prof
.
.It Fl -http-parser Ns = Ns Ar library
Chooses an HTTP parser library. Available values are
.Sy llhttp
or
.Sy legacy .
.
.It Fl -icu-data-dir Ns = Ns Ar file
Specify ICU data load path.
Overrides

View File

@ -24,11 +24,7 @@
const { Math } = primordials;
const { setImmediate } = require('timers');
const { getOptionValue } = require('internal/options');
const { methods, HTTPParser } =
getOptionValue('--http-parser') === 'legacy' ?
internalBinding('http_parser') : internalBinding('http_parser_llhttp');
const { methods, HTTPParser } = internalBinding('http_parser');
const FreeList = require('internal/freelist');
const incoming = require('_http_incoming');

View File

@ -528,8 +528,7 @@
'src/node_env_var.cc',
'src/node_errors.cc',
'src/node_file.cc',
'src/node_http_parser_llhttp.cc',
'src/node_http_parser_traditional.cc',
'src/node_http_parser.cc',
'src/node_http2.cc',
'src/node_i18n.cc',
'src/node_main_instance.cc',
@ -593,7 +592,6 @@
'src/handle_wrap.h',
'src/histogram.h',
'src/histogram-inl.h',
'src/http_parser_adaptor.h',
'src/js_stream.h',
'src/memory_tracker.h',
'src/memory_tracker-inl.h',
@ -608,7 +606,6 @@
'src/node_contextify.h',
'src/node_errors.h',
'src/node_file.h',
'src/node_http_parser_impl.h',
'src/node_http2.h',
'src/node_http2_state.h',
'src/node_i18n.h',
@ -660,7 +657,6 @@
'src/util.h',
'src/util-inl.h',
# Dependency headers
'deps/http_parser/http_parser.h',
'deps/v8/include/v8.h',
# javascript files to make for an even more pleasant IDE experience
'<@(library_files)',

View File

@ -139,7 +139,6 @@
[ 'node_shared_http_parser=="false"', {
'dependencies': [
'deps/http_parser/http_parser.gyp:http_parser',
'deps/llhttp/llhttp.gyp:llhttp'
],
} ],

View File

@ -1,24 +0,0 @@
#ifndef SRC_HTTP_PARSER_ADAPTOR_H_
#define SRC_HTTP_PARSER_ADAPTOR_H_
#ifdef NODE_EXPERIMENTAL_HTTP
# include "llhttp.h"
typedef llhttp_type_t parser_type_t;
typedef llhttp_errno_t parser_errno_t;
typedef llhttp_settings_t parser_settings_t;
typedef llhttp_t parser_t;
#else /* !NODE_EXPERIMENTAL_HTTP */
# include "http_parser.h"
typedef enum http_parser_type parser_type_t;
typedef enum http_errno parser_errno_t;
typedef http_parser_settings parser_settings_t;
typedef http_parser parser_t;
#define HPE_USER HPE_UNKNOWN
#endif /* NODE_EXPERIMENTAL_HTTP */
#endif /* SRC_HTTP_PARSER_ADAPTOR_H_ */

View File

@ -1,12 +1,8 @@
#include "inspector_socket.h"
#include "llhttp.h"
#define NODE_EXPERIMENTAL_HTTP
#include "http_parser_adaptor.h"
#include "util-inl.h"
#define NODE_WANT_INTERNALS 1
#include "base64.h"
#include "util-inl.h"
#include "openssl/sha.h" // Sha-1 hash
@ -479,7 +475,7 @@ class HttpHandler : public ProtocolHandler {
}
void OnData(std::vector<char>* data) override {
parser_errno_t err;
llhttp_errno_t err;
err = llhttp_execute(&parser_, data->data(), data->size());
if (err == HPE_PAUSED_UPGRADE) {
@ -524,14 +520,14 @@ class HttpHandler : public ProtocolHandler {
handler->inspector()->SwitchProtocol(nullptr);
}
static int OnHeaderValue(parser_t* parser, const char* at, size_t length) {
static int OnHeaderValue(llhttp_t* parser, const char* at, size_t length) {
HttpHandler* handler = From(parser);
handler->parsing_value_ = true;
handler->headers_[handler->current_header_].append(at, length);
return 0;
}
static int OnHeaderField(parser_t* parser, const char* at, size_t length) {
static int OnHeaderField(llhttp_t* parser, const char* at, size_t length) {
HttpHandler* handler = From(parser);
if (handler->parsing_value_) {
handler->parsing_value_ = false;
@ -541,17 +537,17 @@ class HttpHandler : public ProtocolHandler {
return 0;
}
static int OnPath(parser_t* parser, const char* at, size_t length) {
static int OnPath(llhttp_t* parser, const char* at, size_t length) {
HttpHandler* handler = From(parser);
handler->path_.append(at, length);
return 0;
}
static HttpHandler* From(parser_t* parser) {
static HttpHandler* From(llhttp_t* parser) {
return node::ContainerOf(&HttpHandler::parser_, parser);
}
static int OnMessageComplete(parser_t* parser) {
static int OnMessageComplete(llhttp_t* parser) {
// Event needs to be fired after the parser is done.
HttpHandler* handler = From(parser);
handler->events_.emplace_back(handler->path_,
@ -589,8 +585,8 @@ class HttpHandler : public ProtocolHandler {
}
bool parsing_value_;
parser_t parser_;
parser_settings_t parser_settings;
llhttp_t parser_;
llhttp_settings_t parser_settings;
std::vector<HttpEvent> events_;
std::string current_header_;
std::map<std::string, std::string> headers_;

View File

@ -54,7 +54,6 @@
V(heap_utils) \
V(http2) \
V(http_parser) \
V(http_parser_llhttp) \
V(inspector) \
V(js_stream) \
V(messaging) \

View File

@ -19,27 +19,22 @@
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// This file is included from 2 files, node_http_parser_traditional.cc
// and node_http_parser_llhttp.cc.
#pragma once
#include "node.h"
#include "node_buffer.h"
#include "util.h"
#include "async_wrap-inl.h"
#include "env-inl.h"
#include "memory_tracker-inl.h"
#include "stream_base-inl.h"
#include "v8.h"
#include "http_parser_adaptor.h"
#include "llhttp.h"
#include <cstdlib> // free()
#include <cstring> // strdup(), strchr()
// This is a binding to http_parser (https://github.com/nodejs/http-parser)
// This is a binding to llhttp (https://github.com/nodejs/llhttp)
// The goal is to decouple sockets from parsing for more javascript-level
// agility. A Buffer is read from a socket and passed to parser.execute().
// The parser then issues callbacks with slices of the data
@ -247,9 +242,7 @@ class Parser : public AsyncWrap, public StreamListener {
int on_headers_complete() {
#ifdef NODE_EXPERIMENTAL_HTTP
header_nread_ = 0;
#endif /* NODE_EXPERIMENTAL_HTTP */
// Arguments for the on-headers-complete javascript callback. This
// list needs to be kept in sync with the actual argument list for
@ -310,11 +303,7 @@ class Parser : public AsyncWrap, public StreamListener {
argv[A_VERSION_MINOR] = Integer::New(env()->isolate(), parser_.http_minor);
bool should_keep_alive;
#ifdef NODE_EXPERIMENTAL_HTTP
should_keep_alive = llhttp_should_keep_alive(&parser_);
#else /* !NODE_EXPERIMENTAL_HTTP */
should_keep_alive = http_should_keep_alive(&parser_);
#endif /* NODE_EXPERIMENTAL_HTTP */
argv[A_SHOULD_KEEP_ALIVE] =
Boolean::New(env()->isolate(), should_keep_alive);
@ -369,9 +358,7 @@ class Parser : public AsyncWrap, public StreamListener {
if (r.IsEmpty()) {
got_exception_ = true;
#ifdef NODE_EXPERIMENTAL_HTTP
llhttp_set_error_reason(&parser_, "HPE_JS_EXCEPTION:JS Exception");
#endif /* NODE_EXPERIMENTAL_HTTP */
return HPE_USER;
}
@ -404,7 +391,6 @@ class Parser : public AsyncWrap, public StreamListener {
return 0;
}
#ifdef NODE_EXPERIMENTAL_HTTP
// Reset nread for the next chunk
int on_chunk_header() {
header_nread_ = 0;
@ -417,8 +403,6 @@ class Parser : public AsyncWrap, public StreamListener {
header_nread_ = 0;
return 0;
}
#endif /* NODE_EXPERIMENTAL_HTTP */
static void New(const FunctionCallbackInfo<Value>& args) {
Environment* env = Environment::GetCurrent(args);
@ -499,8 +483,8 @@ class Parser : public AsyncWrap, public StreamListener {
CHECK(args[0]->IsInt32());
CHECK(args[1]->IsObject());
parser_type_t type =
static_cast<parser_type_t>(args[0].As<Int32>()->Value());
llhttp_type_t type =
static_cast<llhttp_type_t>(args[0].As<Int32>()->Value());
CHECK(type == HTTP_REQUEST || type == HTTP_RESPONSE);
Parser* parser;
@ -526,7 +510,6 @@ class Parser : public AsyncWrap, public StreamListener {
// Should always be called from the same context.
CHECK_EQ(env, parser->env());
#ifdef NODE_EXPERIMENTAL_HTTP
if (parser->execute_depth_) {
parser->pending_pause_ = should_pause;
return;
@ -537,9 +520,6 @@ class Parser : public AsyncWrap, public StreamListener {
} else {
llhttp_resume(&parser->parser_);
}
#else /* !NODE_EXPERIMENTAL_HTTP */
http_parser_pause(&parser->parser_, should_pause);
#endif /* NODE_EXPERIMENTAL_HTTP */
}
@ -647,9 +627,8 @@ class Parser : public AsyncWrap, public StreamListener {
current_buffer_data_ = data;
got_exception_ = false;
parser_errno_t err;
llhttp_errno_t err;
#ifdef NODE_EXPERIMENTAL_HTTP
// Do not allow re-entering `http_parser_execute()`
CHECK_EQ(execute_depth_, 0);
@ -679,31 +658,6 @@ class Parser : public AsyncWrap, public StreamListener {
pending_pause_ = false;
llhttp_pause(&parser_);
}
#else /* !NODE_EXPERIMENTAL_HTTP */
size_t nread = http_parser_execute(&parser_, &settings, data, len);
err = HTTP_PARSER_ERRNO(&parser_);
// Finish()
if (data == nullptr) {
// `http_parser_execute()` returns either `0` or `1` when `len` is 0
// (part of the finishing sequence).
CHECK_EQ(len, 0);
switch (nread) {
case 0:
err = HPE_OK;
break;
case 1:
nread = 0;
break;
default:
UNREACHABLE();
}
// Regular Execute()
} else {
Save();
}
#endif /* NODE_EXPERIMENTAL_HTTP */
// Unassign the 'buffer_' variable
current_buffer_.Clear();
@ -725,7 +679,6 @@ class Parser : public AsyncWrap, public StreamListener {
obj->Set(env()->context(),
env()->bytes_parsed_string(),
nread_obj).Check();
#ifdef NODE_EXPERIMENTAL_HTTP
const char* errno_reason = llhttp_get_error_reason(&parser_);
Local<String> code;
@ -743,12 +696,6 @@ class Parser : public AsyncWrap, public StreamListener {
obj->Set(env()->context(), env()->code_string(), code).Check();
obj->Set(env()->context(), env()->reason_string(), reason).Check();
#else /* !NODE_EXPERIMENTAL_HTTP */
obj->Set(env()->context(),
env()->code_string(),
OneByteString(env()->isolate(),
http_errno_name(err))).Check();
#endif /* NODE_EXPERIMENTAL_HTTP */
return scope.Escape(e);
}
@ -799,13 +746,9 @@ class Parser : public AsyncWrap, public StreamListener {
}
void Init(parser_type_t type) {
#ifdef NODE_EXPERIMENTAL_HTTP
void Init(llhttp_type_t type) {
llhttp_init(&parser_, type, &settings);
header_nread_ = 0;
#else /* !NODE_EXPERIMENTAL_HTTP */
http_parser_init(&parser_, type);
#endif /* NODE_EXPERIMENTAL_HTTP */
url_.Reset();
status_message_.Reset();
num_fields_ = 0;
@ -816,19 +759,16 @@ class Parser : public AsyncWrap, public StreamListener {
int TrackHeader(size_t len) {
#ifdef NODE_EXPERIMENTAL_HTTP
header_nread_ += len;
if (header_nread_ >= per_process::cli_options->max_http_header_size) {
llhttp_set_error_reason(&parser_, "HPE_HEADER_OVERFLOW:Header overflow");
return HPE_USER;
}
#endif /* NODE_EXPERIMENTAL_HTTP */
return 0;
}
int MaybePause() {
#ifdef NODE_EXPERIMENTAL_HTTP
CHECK_NE(execute_depth_, 0);
if (!pending_pause_) {
@ -838,12 +778,9 @@ class Parser : public AsyncWrap, public StreamListener {
pending_pause_ = false;
llhttp_set_error_reason(&parser_, "Paused in callback");
return HPE_PAUSED;
#else /* !NODE_EXPERIMENTAL_HTTP */
return 0;
#endif /* NODE_EXPERIMENTAL_HTTP */
}
parser_t parser_;
llhttp_t parser_;
StringPtr fields_[kMaxHeaderFieldsCount]; // header fields
StringPtr values_[kMaxHeaderFieldsCount]; // header values
StringPtr url_;
@ -855,18 +792,16 @@ class Parser : public AsyncWrap, public StreamListener {
Local<Object> current_buffer_;
size_t current_buffer_len_;
const char* current_buffer_data_;
#ifdef NODE_EXPERIMENTAL_HTTP
unsigned int execute_depth_ = 0;
bool pending_pause_ = false;
uint64_t header_nread_ = 0;
#endif /* NODE_EXPERIMENTAL_HTTP */
// These are helper functions for filling `http_parser_settings`, which turn
// a member function of Parser into a C-style HTTP parser callback.
template <typename Parser, Parser> struct Proxy;
template <typename Parser, typename ...Args, int (Parser::*Member)(Args...)>
struct Proxy<int (Parser::*)(Args...), Member> {
static int Raw(parser_t* p, Args ... args) {
static int Raw(llhttp_t* p, Args ... args) {
Parser* parser = ContainerOf(&Parser::parser_, p);
int rv = (parser->*Member)(std::forward<Args>(args)...);
if (rv == 0) {
@ -879,10 +814,10 @@ class Parser : public AsyncWrap, public StreamListener {
typedef int (Parser::*Call)();
typedef int (Parser::*DataCall)(const char* at, size_t length);
static const parser_settings_t settings;
static const llhttp_settings_t settings;
};
const parser_settings_t Parser::settings = {
const llhttp_settings_t Parser::settings = {
Proxy<Call, &Parser::on_message_begin>::Raw,
Proxy<DataCall, &Parser::on_url>::Raw,
Proxy<DataCall, &Parser::on_status>::Raw,
@ -891,25 +826,11 @@ const parser_settings_t Parser::settings = {
Proxy<Call, &Parser::on_headers_complete>::Raw,
Proxy<DataCall, &Parser::on_body>::Raw,
Proxy<Call, &Parser::on_message_complete>::Raw,
#ifdef NODE_EXPERIMENTAL_HTTP
Proxy<Call, &Parser::on_chunk_header>::Raw,
Proxy<Call, &Parser::on_chunk_complete>::Raw,
#else /* !NODE_EXPERIMENTAL_HTTP */
nullptr,
nullptr,
#endif /* NODE_EXPERIMENTAL_HTTP */
};
#ifndef NODE_EXPERIMENTAL_HTTP
void InitMaxHttpHeaderSizeOnce() {
const uint32_t max_http_header_size =
per_process::cli_options->max_http_header_size;
http_parser_set_max_header_size(max_http_header_size);
}
#endif /* NODE_EXPERIMENTAL_HTTP */
void InitializeHttpParser(Local<Object> target,
Local<Value> unused,
Local<Context> context,
@ -959,12 +880,9 @@ void InitializeHttpParser(Local<Object> target,
target->Set(env->context(),
FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser"),
t->GetFunction(env->context()).ToLocalChecked()).Check();
#ifndef NODE_EXPERIMENTAL_HTTP
static uv_once_t init_once = UV_ONCE_INIT;
uv_once(&init_once, InitMaxHttpHeaderSizeOnce);
#endif /* NODE_EXPERIMENTAL_HTTP */
}
} // anonymous namespace
} // namespace node
NODE_MODULE_CONTEXT_AWARE_INTERNAL(http_parser, node::InitializeHttpParser)

View File

@ -1,21 +0,0 @@
#define NODE_EXPERIMENTAL_HTTP 1
#include "node_http_parser_impl.h"
#include "memory_tracker-inl.h"
#include "node_metadata.h"
#include "util-inl.h"
namespace node {
namespace per_process {
const char* const llhttp_version =
NODE_STRINGIFY(LLHTTP_VERSION_MAJOR)
"."
NODE_STRINGIFY(LLHTTP_VERSION_MINOR)
"."
NODE_STRINGIFY(LLHTTP_VERSION_PATCH);
} // namespace per_process
} // namespace node
NODE_MODULE_CONTEXT_AWARE_INTERNAL(http_parser_llhttp,
node::InitializeHttpParser)

View File

@ -1,21 +0,0 @@
#ifdef NODE_EXPERIMENTAL_HTTP
#undef NODE_EXPERIMENTAL_HTTP
#endif
#include "memory_tracker-inl.h"
#include "node_http_parser_impl.h"
#include "node_metadata.h"
#include "util-inl.h"
namespace node {
namespace per_process {
const char* const http_parser_version =
NODE_STRINGIFY(HTTP_PARSER_VERSION_MAJOR)
"."
NODE_STRINGIFY(HTTP_PARSER_VERSION_MINOR)
"."
NODE_STRINGIFY(HTTP_PARSER_VERSION_PATCH);
} // namespace per_process
} // namespace node
NODE_MODULE_CONTEXT_AWARE_INTERNAL(http_parser, node::InitializeHttpParser)

View File

@ -1,6 +1,7 @@
#include "node_metadata.h"
#include "ares.h"
#include "brotli/encode.h"
#include "llhttp.h"
#include "nghttp2/nghttp2ver.h"
#include "node.h"
#include "util.h"
@ -72,8 +73,12 @@ Metadata::Versions::Versions() {
modules = NODE_STRINGIFY(NODE_MODULE_VERSION);
nghttp2 = NGHTTP2_VERSION;
napi = NODE_STRINGIFY(NAPI_VERSION);
llhttp = per_process::llhttp_version;
http_parser = per_process::http_parser_version;
llhttp =
NODE_STRINGIFY(LLHTTP_VERSION_MAJOR)
"."
NODE_STRINGIFY(LLHTTP_VERSION_MINOR)
"."
NODE_STRINGIFY(LLHTTP_VERSION_PATCH);
brotli =
std::to_string(BrotliEncoderVersion() >> 24) +

View File

@ -31,7 +31,6 @@ namespace node {
V(nghttp2) \
V(napi) \
V(llhttp) \
V(http_parser) \
#if HAVE_OPENSSL
#define NODE_VERSIONS_KEY_CRYPTO(V) V(openssl)
@ -102,8 +101,6 @@ class Metadata {
// Per-process global
namespace per_process {
extern Metadata metadata;
extern const char* const llhttp_version;
extern const char* const http_parser_version;
}
} // namespace node

View File

@ -154,10 +154,6 @@ void EnvironmentOptions::CheckOptions(std::vector<std::string>* errors) {
errors->push_back("either --check or --eval can be used, not both");
}
if (http_parser != "legacy" && http_parser != "llhttp") {
errors->push_back("invalid value for --http-parser");
}
if (!unhandled_rejections.empty() &&
unhandled_rejections != "strict" &&
unhandled_rejections != "warn" &&
@ -362,11 +358,7 @@ EnvironmentOptionsParser::EnvironmentOptionsParser() {
"Generate heap snapshot on specified signal",
&EnvironmentOptions::heap_snapshot_signal,
kAllowedInEnvironment);
AddOption("--http-parser",
"Select which HTTP parser to use; either 'legacy' or 'llhttp' "
"(default: llhttp).",
&EnvironmentOptions::http_parser,
kAllowedInEnvironment);
AddOption("--http-parser", "", NoOp{}, kAllowedInEnvironment);
AddOption("--input-type",
"set module type for string input",
&EnvironmentOptions::module_type,

View File

@ -113,7 +113,6 @@ class EnvironmentOptions : public Options {
bool expose_internals = false;
bool frozen_intrinsics = false;
std::string heap_snapshot_signal;
std::string http_parser = "llhttp";
bool no_deprecation = false;
bool no_force_async_hooks_checks = false;
bool no_warnings = false;

View File

@ -3,7 +3,6 @@
'use strict';
const common = require('../common');
const { internalBinding } = require('internal/test/binding');
const { getOptionValue } = require('internal/options');
// Monkey patch before requiring anything
class DummyParser {
@ -16,9 +15,7 @@ class DummyParser {
}
DummyParser.REQUEST = Symbol();
const binding =
getOptionValue('--http-parser') === 'legacy' ?
internalBinding('http_parser') : internalBinding('http_parser_llhttp');
const binding = internalBinding('http_parser');
binding.HTTPParser = DummyParser;
const assert = require('assert');

View File

@ -3,8 +3,7 @@ const common = require('../common');
const assert = require('assert');
const expected_keys = ['ares', 'brotli', 'modules', 'node',
'uv', 'v8', 'zlib', 'nghttp2', 'napi',
'http_parser', 'llhttp'];
'uv', 'v8', 'zlib', 'nghttp2', 'napi', 'llhttp'];
if (common.hasCrypto) {
expected_keys.push('openssl');
@ -27,7 +26,6 @@ const commonTemplate = /^\d+\.\d+\.\d+(?:-.*)?$/;
assert(commonTemplate.test(process.versions.ares));
assert(commonTemplate.test(process.versions.brotli));
assert(commonTemplate.test(process.versions.llhttp));
assert(commonTemplate.test(process.versions.http_parser));
assert(commonTemplate.test(process.versions.node));
assert(commonTemplate.test(process.versions.uv));
assert(commonTemplate.test(process.versions.zlib));

View File

@ -10,7 +10,6 @@ const { getOptionValue } = require('internal/options');
console.log('pid is', process.pid);
console.log('max header size is', getOptionValue('--max-http-header-size'));
console.log('current http parser is', getOptionValue('--http-parser'));
// Verify that we cannot receive more than 8KB of headers.
@ -33,12 +32,8 @@ function finished(client, callback) {
function fillHeaders(headers, currentSize, valid = false) {
// `llhttp` counts actual header name/value sizes, excluding the whitespace
// and stripped chars.
if (getOptionValue('--http-parser') === 'llhttp') {
// OK, Content-Length, 0, X-CRASH, aaa...
headers += 'a'.repeat(MAX - currentSize);
} else {
headers += 'a'.repeat(MAX - headers.length - 3);
}
// OK, Content-Length, 0, X-CRASH, aaa...
headers += 'a'.repeat(MAX - currentSize);
// Generate valid headers
if (valid) {

View File

@ -5,7 +5,6 @@ const assert = require('assert');
const { spawn } = require('child_process');
const path = require('path');
const testName = path.join(__dirname, 'test-http-max-http-headers.js');
const parsers = ['legacy', 'llhttp'];
const timeout = common.platformTimeout(100);
@ -15,16 +14,59 @@ function test(fn) {
tests.push(fn);
}
parsers.forEach((parser) => {
test(function(cb) {
console.log('running subtest expecting failure');
// Validate that the test fails if the max header size is too small.
const args = ['--expose-internals',
'--max-http-header-size=1024',
testName];
const cp = spawn(process.execPath, args, { stdio: 'inherit' });
cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 1);
assert.strictEqual(signal, null);
cb();
}));
});
test(function(cb) {
console.log('running subtest expecting success');
const env = Object.assign({}, process.env, {
NODE_DEBUG: 'http'
});
// Validate that the test fails if the max header size is too small.
// Validate that the test now passes if the same limit becomes large enough.
const args = ['--expose-internals',
'--max-http-header-size=1024',
testName,
'1024'];
const cp = spawn(process.execPath, args, {
env,
stdio: 'inherit'
});
cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
cb();
}));
});
// Next, repeat the same checks using NODE_OPTIONS if it is supported.
if (!process.config.variables.node_without_node_options) {
const env = Object.assign({}, process.env, {
NODE_OPTIONS: '--max-http-header-size=1024'
});
test(function(cb) {
console.log('running subtest expecting failure');
// Validate that the test fails if the max header size is too small.
const args = ['--expose-internals',
`--http-parser=${parser}`,
'--max-http-header-size=1024',
testName];
const cp = spawn(process.execPath, args, { stdio: 'inherit' });
const args = ['--expose-internals', testName];
const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });
cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 1);
@ -34,23 +76,10 @@ parsers.forEach((parser) => {
});
test(function(cb) {
console.log('running subtest expecting success');
const env = Object.assign({}, process.env, {
NODE_DEBUG: 'http'
});
// Validate that the test fails if the max header size is too small.
// Validate that the test now passes if the same limit becomes large enough.
const args = ['--expose-internals',
`--http-parser=${parser}`,
'--max-http-header-size=1024',
testName,
'1024'];
const cp = spawn(process.execPath, args, {
env,
stdio: 'inherit'
});
// Validate that the test now passes if the same limit
// becomes large enough.
const args = ['--expose-internals', testName, '1024'];
const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });
cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 0);
@ -58,41 +87,7 @@ parsers.forEach((parser) => {
cb();
}));
});
// Next, repeat the same checks using NODE_OPTIONS if it is supported.
if (!process.config.variables.node_without_node_options) {
const env = Object.assign({}, process.env, {
NODE_OPTIONS: `--http-parser=${parser} --max-http-header-size=1024`
});
test(function(cb) {
console.log('running subtest expecting failure');
// Validate that the test fails if the max header size is too small.
const args = ['--expose-internals', testName];
const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });
cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 1);
assert.strictEqual(signal, null);
cb();
}));
});
test(function(cb) {
// Validate that the test now passes if the same limit
// becomes large enough.
const args = ['--expose-internals', testName, '1024'];
const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });
cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
cb();
}));
});
}
});
}
function runTest() {
const fn = tests.shift();

View File

@ -32,7 +32,6 @@ fi
addlicense "Acorn" "deps/acorn" "$(cat ${rootdir}/deps/acorn/acorn/LICENSE)"
addlicense "Acorn plugins" "deps/acorn-plugins" "$(cat ${rootdir}/deps/acorn-plugins/acorn-class-fields/LICENSE)"
addlicense "c-ares" "deps/cares" "$(tail -n +3 ${rootdir}/deps/cares/LICENSE.md)"
addlicense "HTTP Parser" "deps/http_parser" "$(cat deps/http_parser/LICENSE-MIT)"
if [ -f "${rootdir}/deps/icu/LICENSE" ]; then
# ICU 57 and following. Drop the BOM
addlicense "ICU" "deps/icu" \