
Hello, I've read asio docs and was really impressed by this library's design. Thank you Christopher for this contribution. However, error handling was very annoying through all examples. Consider handle_read_body function from chat_client.cpp: void handle_read_body(const boost::asio::error& error) { if (!error) { std::cout.write(read_msg_.body(), read_msg_.body_length()); std::cout << "\n"; boost::asio::async_read(socket_, boost::asio::buffer(read_msg_.data(), chat_message::header_length), boost::bind(&chat_client::handle_read_header, this, boost::asio::placeholders::error)); } else { do_close(); } } It is a common idiom in current asio lib: void some_handler(const boost::asio::error& error) { if (!error) { // asynchronous operation succeed } else { // asynchronous operation failed } } This reminds me the motivation for the Null Object pattern: The object has a trivial state that should be checked explicitly in every method. It is often better to apply the degenerated case of the State pattern (The Null Object pattern) and eliminate all this annoying checks in object's methods. If we apply this to asio's async* methods, we should provide two handlers: first for success case and second for fail, for example: socket_.async_connect(endpoint, boost::bind(&chat_client::handle_connect, this), // success boost::bind(&chat_client::handle_connect_error, this, // fail boost::asio::placeholders::error, ++endpoint_iterator)); I've reworked the chat_client.cpp example with an assumption that such a two-handlers methods are provided and the result is below. I want to ask people in this list if they like this approach better, or not? class chat_client { public: chat_client(boost::asio::io_service& io_service, tcp::resolver::iterator endpoint_iterator) : io_service_(io_service), socket_(io_service) { do_async_connect(endpoint_iterator); } void write(const chat_message& msg) { io_service_.post(boost::bind(&chat_client::do_write, this, msg)); } void close() { io_service_.post(boost::bind(&chat_client::do_close, this)); } private: void handle_connect() { do_async_read_header(); } void handle_connect_error(const boost::asio::error& /*error*/, tcp::resolver::iterator endpoint_iterator) { if (endpoint_iterator != tcp::resolver::iterator()) { do_close(); do_async_connect(endpoint_iterator); } } // helper function to eliminate code duplication void do_async_connect(tcp::resolver::iterator endpoint_iterator) { tcp::endpoint endpoint = *endpoint_iterator; socket_.async_connect(endpoint, boost::bind(&chat_client::handle_connect, this), boost::bind(&chat_client::handle_connect_error, this, // Note there boost::asio::placeholders::error, ++endpoint_iterator)); } void handle_read_header() { if (read_msg_.decode_header()) { boost::asio::async_read(socket_, boost::asio::buffer(read_msg_.body(), read_msg_.body_length()), boost::bind(&chat_client::handle_read_body, this), boost::bind(&chat_client::do_close, this)); // Note there } else { do_close(); } } void handle_read_body() { std::cout.write(read_msg_.body(), read_msg_.body_length()); std::cout << "\n"; do_async_read_header(); } // helper function to eliminate code duplication void do_async_read_header() { boost::asio::async_read(socket_, boost::asio::buffer(read_msg_.data(), chat_message::header_length), boost::bind(&chat_client::handle_read_header, this), boost::bind(&chat_client::do_close, this)); // Note there } void do_write(chat_message msg) { bool write_in_progress = !write_msgs_.empty(); write_msgs_.push_back(msg); if (!write_in_progress) { do_async_write(); } } void handle_write() { write_msgs_.pop_front(); if (!write_msgs_.empty()) { do_async_write(); } } // helper function to eliminate code duplication void do_async_write() { boost::asio::async_write(socket_, boost::asio::buffer(write_msgs_.front().data(), write_msgs_.front().length()), boost::bind(&chat_client::handle_write, this), boost::bind(&chat_client::do_close, this)); // Note here } void do_close() { socket_.close(); } private: boost::asio::io_service& io_service_; tcp::socket socket_; chat_message read_msg_; chat_message_queue write_msgs_; };