Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Modify Handler.finished API to remove assumption all responses are one-shot. #81

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
154 changes: 154 additions & 0 deletions examples/gradual/gradual.pony
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
use "../../http_server"
use "net"
use "valbytes"
use "debug"

actor Main
"""
A simple example of how to send your response body gradually. When sending
large responses you don't want the entire payload in memory at the same
time.
"""
new create(env: Env) =>
for arg in env.args.values() do
if (arg == "-h") or (arg == "--help") then
_print_help(env)
return
end
end

let port = try env.args(1)? else "50000" end
let limit = try env.args(2)?.usize()? else 100 end
let host = "localhost"

// Start the top server control actor.
let server = Server(
TCPListenAuth(env.root),
LoggingServerNotify(env), // notify for server lifecycle events
BackendMaker.create(env) // factory for session-based application backend
where config = ServerConfig( // configuration of Server
where host' = host,
port' = port,
max_concurrent_connections' = limit)
)
// everything is initialized, if all goes well
// the server is listening on the given port
// and thus kept alive by the runtime, as long its listening socket is not
// closed.

fun _print_help(env: Env) =>
env.err.print(
"""
Usage:

gradual [<PORT> = 50000] [<MAX_CONCURRENT_CONNECTIONS> = 100]

"""
)


class LoggingServerNotify is ServerNotify
"""
Notification class that is notified about
important lifecycle events for the Server
"""
let _env: Env

new iso create(env: Env) =>
_env = env

fun ref listening(server: Server ref) =>
"""
Called when the Server starts listening on its host:port pair via TCP.
"""
try
(let host, let service) = server.local_address().name()?
_env.err.print("connected: " + host + ":" + service)
else
_env.err.print("Couldn't get local address.")
_env.exitcode(1)
server.dispose()
end

fun ref not_listening(server: Server ref) =>
"""
Called when the Server was not able to start listening on its host:port pair via TCP.
"""
_env.err.print("Failed to listen.")
_env.exitcode(1)

fun ref closed(server: Server ref) =>
"""
Called when the Server is closed.
"""
_env.err.print("Shutdown.")

class BackendMaker is HandlerFactory
"""
Fatory to instantiate a new HTTP-session-scoped backend instance.
"""
let _env: Env

new val create(env: Env) =>
_env = env

fun apply(session: Session): Handler^ =>
BackendHandler.create(_env, session)

class BackendHandler is Handler
"""
Backend application instance for a single HTTP session.

Executed on an actor representing the HTTP Session.
That means we have 1 actor per TCP Connection
(to be exact it is 2 as the TCPConnection is also an actor).
"""
let _env: Env
let _session: Session

var _response: BuildableResponse
var stage: (ExHdrs | ExHello | ExWorld) = ExHdrs

new ref create(env: Env, session: Session) =>
_env = env
_session = session
_response = BuildableResponse(where status' = StatusOK)

fun ref finished(request_id: RequestID): Bool =>
"""
Start processing a request.

Called when request-line and all headers have been parsed.
Body is not yet parsed, not even received maybe.

In this example we have a simple State Machine which we
use to demonstrate how replies can be chunked in such a
way as we trade memory efficiency for speed.

This tradeoff is needed when sending huge files.

"""

match stage
| ExHdrs =>
var response: BuildableResponse iso = BuildableResponse(where status' = StatusOK)
response.add_header("Content-Type", "text/plain")
response.add_header("Server", "http_server.pony/0.2.1")
response.add_header("Content-Length", "12")

_session.send_start(consume response, request_id)
stage = ExHello
return false
| ExHello => _session.send_chunk("Hello ", request_id)
stage = ExWorld
return false
| ExWorld =>
_session.send_chunk("World!", request_id)
_session.send_finished(request_id)
return true
end
true // Never Reached

primitive ExHdrs
primitive ExHello
primitive ExWorld
2 changes: 1 addition & 1 deletion examples/hello_world/main.pony
Original file line number Diff line number Diff line change
Expand Up @@ -103,5 +103,5 @@ class BackendHandler is Handler
_session.send_raw(_response, request_id)
_session.send_finished(request_id)

fun ref finished(request_id: RequestID) => None
// fun ref finished(request_id: RequestID): Bool => true
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why is the commented out?


3 changes: 2 additions & 1 deletion examples/httpserver/httpserver.pony
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ class BackendHandler is Handler
)
end

fun ref finished(request_id: RequestID) =>
fun ref finished(request_id: RequestID): Bool =>
"""
Called when the last chunk has been handled and the full request has been received.

Expand All @@ -229,4 +229,5 @@ class BackendHandler is Handler
// Required call to finish request handling
// if missed out, the server will misbehave
_session.send_finished(request_id)
true

4 changes: 3 additions & 1 deletion http_server/_server_connection.pony
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@ actor _ServerConnection is (Session & HTTP11RequestHandler)
Indicates that the last *inbound* body chunk has been sent to
`_chunk`. This is passed on to the back end.
"""
_backend.finished(request_id)
if not _backend.finished(request_id) then
this._receive_finished(request_id)
end

be _receive_failed(parse_error: RequestParseError, request_id: RequestID) =>
_backend.failed(parse_error, request_id)
Expand Down
3 changes: 2 additions & 1 deletion http_server/_test_pipelining.pony
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class \nodoc\ val _PipeliningOrderHandlerFactory is HandlerFactory
object ref is Handler
let _session: Session = session

fun ref finished(request_id: RequestID) =>
fun ref finished(request_id: RequestID): Bool =>
let rid = request_id.string()
let res = Responses.builder()
.set_status(StatusOK)
Expand All @@ -48,6 +48,7 @@ class \nodoc\ val _PipeliningOrderHandlerFactory is HandlerFactory
0
)
)
true
end

class \nodoc\ iso _PipeliningOrderTest is UnitTest
Expand Down
6 changes: 4 additions & 2 deletions http_server/handler.pony
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ interface Handler
fun ref chunk(data: ByteSeq val, request_id: RequestID) =>
_body = _body + data

fun ref finished(request_id: RequestID) =>
fun ref finished(request_id: RequestID): Bool =>
_session.send_raw(
Responses.builder()
.set_status(StatusOk)
Expand All @@ -96,6 +96,7 @@ interface Handler
request_id
)
_session.send_finished(request_id)
true
```

"""
Expand All @@ -113,11 +114,12 @@ interface Handler
recent `Request` delivered by an `apply` notification.
"""

fun ref finished(request_id: RequestID) =>
fun ref finished(request_id: RequestID): Bool =>
"""
Notification that no more body chunks are coming. Delivery of this HTTP
message is complete.
"""
true

fun ref cancelled(request_id: RequestID) =>
"""
Expand Down
3 changes: 2 additions & 1 deletion http_server/sync_handler.pony
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,14 @@ class SyncHandlerWrapper is Handler
fun ref chunk(data: ByteSeq val, request_id: RequestID) =>
_body_buffer = _body_buffer + data

fun ref finished(request_id: RequestID) =>
fun ref finished(request_id: RequestID): Bool =>
if not _sent then
// resetting _body_buffer
let res = _run_handler(_request, _body_buffer = ByteArrays)
_session.send_raw(res, request_id)
end
_session.send_finished(request_id)
true



Expand Down
Loading