@@ -103,12 +103,12 @@ Task<bool> Connection::do_read() {
103103 auto gzip_encoding_requested = accept_encoding.contains (kGzipEncoding ) && http_compression_;
104104
105105 RequestData request_data{
106- .request_keep_alive_ = parser.get ().keep_alive (),
107- .request_http_version_ = parser.get ().version (),
108- .gzip_encoding_requested_ = gzip_encoding_requested,
109- .vary_ = req[boost::beast::http::field::vary],
110- .origin_ = req[boost::beast::http::field::origin],
111- .method_ = req.method (),
106+ .request_keep_alive = parser.get ().keep_alive (),
107+ .request_http_version = parser.get ().version (),
108+ .gzip_encoding_requested = gzip_encoding_requested,
109+ .vary = req[boost::beast::http::field::vary],
110+ .origin = req[boost::beast::http::field::origin],
111+ .method = req.method (),
112112 };
113113
114114 if (boost::beast::websocket::is_upgrade (parser.get ())) {
@@ -153,7 +153,7 @@ Task<void> Connection::handle_request(const RequestWithStringBody& req, RequestD
153153}
154154
155155Task<void > Connection::handle_preflight (const RequestWithStringBody& req, RequestData& request_data) {
156- boost::beast::http::response<boost::beast::http::string_body> res{boost::beast::http::status::no_content, request_data.request_http_version_ };
156+ boost::beast::http::response<boost::beast::http::string_body> res{boost::beast::http::status::no_content, request_data.request_http_version };
157157 std::string vary = req[boost::beast::http::field::vary];
158158
159159 if (vary.empty ()) {
@@ -193,7 +193,7 @@ Task<void> Connection::handle_actual_request(const RequestWithStringBody& req, R
193193 co_return ;
194194 }
195195
196- if (http_compression_ && !accept_encoding.empty () && !accept_encoding.contains (kIdentity ) && !request_data.gzip_encoding_requested_ ) {
196+ if (http_compression_ && !accept_encoding.empty () && !accept_encoding.contains (kIdentity ) && !request_data.gzip_encoding_requested ) {
197197 co_await do_write (" unsupported requested compression\n " , boost::beast::http::status::unsupported_media_type, request_data, kGzipEncoding );
198198 co_return ;
199199 }
@@ -217,28 +217,25 @@ Task<void> Connection::handle_actual_request(const RequestWithStringBody& req, R
217217 co_return ;
218218 }
219219
220- request_map_.emplace (request_id_, std::move (request_data));
221220 auto rsp_content = co_await handler_->handle (req.body (), request_id_);
222221 if (rsp_content) {
223- // no streaming api
224- co_await do_write (rsp_content->append (" \n " ), boost::beast::http::status::ok, request_data, request_data.gzip_encoding_requested_ ? kGzipEncoding : " " , request_data.gzip_encoding_requested_ );
225- auto it = request_map_.find (request_id_);
226- if (it != request_map_.end ()) {
227- request_map_.erase (it);
228- }
222+ // no streaming
223+ co_await do_write (rsp_content->append (" \n " ), boost::beast::http::status::ok, request_data, request_data.gzip_encoding_requested ? kGzipEncoding : " " , request_data.gzip_encoding_requested );
224+ } else {
225+ request_map_.emplace (request_id_, std::move (request_data));
229226 }
230227 request_id_++;
231228}
232229
233230// ! Write chunked response headers
234231Task<void > Connection::create_chunk_header (RequestData& request_data) {
235232 try {
236- boost::beast::http::response<boost::beast::http::empty_body> rsp{boost::beast::http::status::ok, request_data.request_http_version_ };
233+ boost::beast::http::response<boost::beast::http::empty_body> rsp{boost::beast::http::status::ok, request_data.request_http_version };
237234 rsp.set (boost::beast::http::field::content_type, " application/json" );
238235 rsp.set (boost::beast::http::field::date, get_date_time ());
239236 rsp.chunked (true );
240237
241- if (request_data.gzip_encoding_requested_ ) {
238+ if (request_data.gzip_encoding_requested ) {
242239 rsp.set (boost::beast::http::field::content_encoding, kGzipEncoding );
243240 }
244241
@@ -266,7 +263,7 @@ Task<void> Connection::open_stream(uint64_t request_id) {
266263 auto & request_data = request_data_it->second ;
267264
268265 // add chunking supports
269- request_data.chunk_ = std::make_unique<Chunker>();
266+ request_data.chunk = std::make_unique<Chunker>();
270267
271268 co_return ;
272269}
@@ -280,17 +277,17 @@ Task<void> Connection::close_stream(uint64_t request_id) {
280277 auto & request_data = request_data_it->second ;
281278
282279 try {
283- // get chunk remainder and flush it
284- auto [chunk, first_chunk] = request_data.chunk_ ->get_remainder ();
280+ // Get remianing chunk and flush it
281+ auto [chunk, first_chunk] = request_data.chunk ->get_remainder ();
285282 if (first_chunk) {
286283 if (!chunk.empty ()) {
287- // it is first chunk so send full msg without chunking
288- co_await do_write (chunk, boost::beast::http::status::ok, request_data, request_data.gzip_encoding_requested_ ? kGzipEncoding : " " , /* to_be_compressed */ false ); // data already compressed if nec
284+ // If it is the first chunk, send without chunking
285+ co_await do_write (chunk, boost::beast::http::status::ok, request_data, request_data.gzip_encoding_requested ? kGzipEncoding : " " , /* to_be_compressed */ false ); // data already compressed if nec
289286 }
290287 } else {
291- // already a chunk is generated
288+ // A previous chunk was already generated
292289 if (!chunk.empty ()) {
293- // send new one
290+ // Send the new one
294291 co_await send_chunk (chunk);
295292 }
296293 co_await boost::asio::async_write (socket_, boost::beast::http::make_chunk_last (), boost::asio::use_awaitable);
@@ -321,19 +318,19 @@ Task<size_t> Connection::write(uint64_t request_id, std::string_view content, bo
321318 response.append (" \n " );
322319 }
323320
324- if (request_data.gzip_encoding_requested_ ) {
321+ if (request_data.gzip_encoding_requested ) {
325322 std::string compressed_content;
326- co_await compress (response. data () , compressed_content);
323+ co_await compress (response, compressed_content);
327324 // queued compressed buffer
328- request_data.chunk_ ->queue_data (std::move ( compressed_content) );
325+ request_data.chunk ->queue_data (compressed_content);
329326 } else {
330327 // queued clear buffer
331- request_data.chunk_ ->queue_data (std::move ( response) );
328+ request_data.chunk ->queue_data (response);
332329 }
333330
334331 // until completed chunk are present
335- while (request_data.chunk_ ->has_chunks ()) {
336- auto [complete_chunk, first_chunk] = request_data.chunk_ ->get_complete_chunk ();
332+ while (request_data.chunk ->has_chunks ()) {
333+ auto [complete_chunk, first_chunk] = request_data.chunk ->get_complete_chunk ();
337334
338335 if (first_chunk) {
339336 co_await create_chunk_header (request_data);
@@ -363,7 +360,7 @@ Task<size_t> Connection::send_chunk(const std::string& content) {
363360Task<void > Connection::do_write (const std::string& content, boost::beast::http::status http_status, RequestData& request_data, std::string_view content_encoding, bool to_be_compressed) {
364361 try {
365362 SILK_TRACE << " Connection::do_write response: " << http_status << " content: " << content;
366- boost::beast::http::response<boost::beast::http::string_body> res{http_status, request_data.request_http_version_ };
363+ boost::beast::http::response<boost::beast::http::string_body> res{http_status, request_data.request_http_version };
367364
368365 if (http_status != boost::beast::http::status::ok) {
369366 res.set (boost::beast::http::field::content_type, " text/plain" );
@@ -373,7 +370,7 @@ Task<void> Connection::do_write(const std::string& content, boost::beast::http::
373370
374371 res.set (boost::beast::http::field::date, get_date_time ());
375372 res.erase (boost::beast::http::field::host);
376- res.keep_alive (request_data.request_keep_alive_ );
373+ res.keep_alive (request_data.request_keep_alive );
377374 if (http_status == boost::beast::http::status::ok && !content_encoding.empty ()) {
378375 // Positive response w/ compression required
379376 res.set (boost::beast::http::field::content_encoding, content_encoding);
@@ -385,7 +382,7 @@ Task<void> Connection::do_write(const std::string& content, boost::beast::http::
385382 res.body () = std::move (compressed_content);
386383 } else {
387384 res.content_length (content.size ());
388- res.body () = std::move ( content) ;
385+ res.body () = content;
389386 }
390387
391388 } else {
@@ -394,7 +391,7 @@ Task<void> Connection::do_write(const std::string& content, boost::beast::http::
394391 res.set (boost::beast::http::field::accept_encoding, content_encoding); // Indicate the supported encoding
395392 }
396393 res.content_length (content.size ());
397- res.body () = std::move ( content) ;
394+ res.body () = content;
398395 }
399396
400397 set_cors<boost::beast::http::string_body>(res, request_data);
@@ -464,29 +461,29 @@ Connection::AuthorizationResult Connection::is_request_authorized(const RequestW
464461
465462template <class Body >
466463void Connection::set_cors (boost::beast::http::response<Body>& res, RequestData& request_data) {
467- if (request_data.vary_ .empty ()) {
464+ if (request_data.vary .empty ()) {
468465 res.set (boost::beast::http::field::vary, " Origin" );
469466 } else {
470- auto vary{request_data.vary_ };
467+ auto vary{request_data.vary };
471468 res.set (boost::beast::http::field::vary, vary.append (" Origin" ));
472469 }
473470
474- if (request_data.origin_ .empty ()) {
471+ if (request_data.origin .empty ()) {
475472 return ;
476473 }
477474
478- if (!is_origin_allowed (allowed_origins_, request_data.origin_ )) {
475+ if (!is_origin_allowed (allowed_origins_, request_data.origin )) {
479476 return ;
480477 }
481478
482- if (!is_method_allowed (request_data.method_ )) {
479+ if (!is_method_allowed (request_data.method )) {
483480 return ;
484481 }
485482
486483 if (allowed_origins_.at (0 ) == " *" ) {
487484 res.set (boost::beast::http::field::access_control_allow_origin, " *" );
488485 } else {
489- res.set (boost::beast::http::field::access_control_allow_origin, request_data.origin_ );
486+ res.set (boost::beast::http::field::access_control_allow_origin, request_data.origin );
490487 }
491488}
492489
0 commit comments