1
0
forked from Alepha/Alepha

Stream builder notation for strings.

This should be less boilerplate than repeated `lexical_cast`
or `stringify` calls when building strings.
This commit is contained in:
2023-10-25 04:03:39 -04:00
parent bacaa9fcee
commit c2bc8dae2e
5 changed files with 98 additions and 13 deletions

View File

@ -10,6 +10,7 @@ static_assert( __cplusplus > 2020'00 );
#include <Alepha/auto_comparable.h> #include <Alepha/auto_comparable.h>
#include <Alepha/IOStreams/delimiters.h> #include <Alepha/IOStreams/delimiters.h>
#include <Alepha/IOStreams/Stream.h>
namespace namespace
{ {
@ -27,7 +28,7 @@ namespace
auto auto
stringify( const Agg &agg, const std::string delim ) stringify_specific( const Agg &agg, const std::string delim )
{ {
std::ostringstream oss; std::ostringstream oss;
Alepha::IOStreams::setGlobalFieldDelimiter( "YOU SHOULD NOT SEE THIS" ); Alepha::IOStreams::setGlobalFieldDelimiter( "YOU SHOULD NOT SEE THIS" );
@ -58,6 +59,7 @@ static auto init= Alepha::Utility::enroll <=[]
{ {
using namespace Alepha::Testing::exports; using namespace Alepha::Testing::exports;
using namespace Alepha::Testing::literals::test_literals; using namespace Alepha::Testing::literals::test_literals;
using namespace Alepha::IOStreams::exports::stream;
"Simple OStream (default delimiter)"_test <=TableTest< stringify_default > "Simple OStream (default delimiter)"_test <=TableTest< stringify_default >
::Cases ::Cases
@ -65,7 +67,24 @@ static auto init= Alepha::Utility::enroll <=[]
{ "smoke test", { { 1, 2, 3 } }, { "1\t2\t3" } }, { "smoke test", { { 1, 2, 3 } }, { "1\t2\t3" } },
}; };
"Simple OStream (specific delimiter)"_test <=TableTest< stringify > "Simple OStream (specific delimiter)"_test <=TableTest< stringify_specific >
::Cases
{
{ "smoke test", { { 1, 2, 3 }, "\t" }, { "1\t2\t3" } },
{ "smoke test", { { 1, 2, 3 }, "," }, { "1,2,3" } },
{ "smoke test", { { 1, 2, 3 }, ";;" }, { "1;;2;;3" } },
{ "smoke test", { { 1, 2, 3 }, ", " }, { "1, 2, 3" } },
};
"Simple OStream (stream builder)"_test <=TableTest
<
[]( const Agg agg, const std::string delim )
{
using Alepha::IOStreams::Stream;
using Alepha::IOStreams::setFieldDelimiter;
return Stream{} << setFieldDelimiter( delim ) << agg << FinishString;
}
>
::Cases ::Cases
{ {
{ "smoke test", { { 1, 2, 3 }, "\t" }, { "1\t2\t3" } }, { "smoke test", { { 1, 2, 3 }, "\t" }, { "1\t2\t3" } },

61
IOStreams/Stream.h Normal file
View File

@ -0,0 +1,61 @@
static_assert( __cplusplus > 2020'00 );
#pragma once
#include <Alepha/Alepha.h>
#include <sstream>
#include <Alepha/Concepts.h>
namespace Alepha::Hydrogen::IOStreams ::detail:: stream
{
inline namespace exports
{
class Stream;
enum { FinishString };
std::string stringify( const Alepha::OStreamable auto &item, Alepha::OStreamable auto && ... params );
}
class exports::Stream
{
private:
// TODO: We need the exception throwing capabilities of the
// `boost::lexical_cast` operation. But this stream technique
// lets us build strings using stream modifiers and manipulators,
// which `boost::lexical_cast` doesn't support.
std::ostringstream oss;
public:
Stream &&
operator << ( const Alepha::OStreamable auto &t ) &&
{
oss << t;
return std::move( *this );
}
std::string
operator << ( decltype( FinishString ) ) &&
{
return std::move( oss ).str();
}
operator std::string () &&
{
return std::move( *this ) << FinishString;
}
};
inline std::string
exports::stringify( const Alepha::OStreamable auto &item, Alepha::OStreamable auto && ... params )
{
return ( Stream{} << ... << params ) << item << FinishString;
}
}
namespace Alepha::Hydrogen::IOStreams::inline exports::inline stream
{
using namespace detail::stream::exports;
}

View File

@ -89,10 +89,11 @@ static_assert( __cplusplus > 2020'00 );
#include <boost/lexical_cast.hpp> #include <boost/lexical_cast.hpp>
#include <Alepha/Alepha.h>
#include <Alepha/Concepts.h> #include <Alepha/Concepts.h>
#include <Alepha/string_algorithms.h> #include <Alepha/string_algorithms.h>
#include <Alepha/IOStreams/Stream.h>
#include <Alepha/Utility/evaluation_helpers.h> #include <Alepha/Utility/evaluation_helpers.h>
namespace Alepha::inline Cavorite ::detail:: program_options namespace Alepha::inline Cavorite ::detail:: program_options
@ -265,7 +266,7 @@ namespace Alepha::inline Cavorite ::detail:: program_options
// variable's value in C++ at runtime. // variable's value in C++ at runtime.
auto defaultBuilder= [&value] auto defaultBuilder= [&value]
{ {
auto text= boost::lexical_cast< std::string >( value ); auto text= IOStreams::stringify( value );
if( text.find_first_of( " \n\t" ) != std::string::npos ) if( text.find_first_of( " \n\t" ) != std::string::npos )
{ {

View File

@ -29,6 +29,8 @@ static_assert( __cplusplus > 2020'00 );
#include <Alepha/function_traits.h> #include <Alepha/function_traits.h>
#include <Alepha/template_for_each.h> #include <Alepha/template_for_each.h>
#include <Alepha/IOStreams/Stream.h>
#include <Alepha/Utility/evaluation_helpers.h> #include <Alepha/Utility/evaluation_helpers.h>
#include <Alepha/TotalOrder.h> #include <Alepha/TotalOrder.h>
@ -122,7 +124,7 @@ namespace Alepha::Hydrogen::Testing ::detail:: table_test
} }
else if constexpr( Meta::is_ostreamable_v< T > ) else if constexpr( Meta::is_ostreamable_v< T > )
{ {
return boost::lexical_cast< std::string >( v ); return IOStreams::stringify( v );
} }
else if constexpr( Meta::is_optional_v< T > ) else if constexpr( Meta::is_optional_v< T > )
{ {
@ -269,11 +271,11 @@ namespace Alepha::Hydrogen::Testing ::detail:: table_test
const auto result= witness == expected; const auto result= witness == expected;
if( not result ) if( not result )
{ {
std::cout << " " << C::testFail << "FAILED CASE" << resetStyle << ": " << comment << std::endl; std::cout << " " << C::testFail << "FAILED CASE" << resetStyle << ": " << comment << std::endl;
++failureCount; ++failureCount;
printDebugging< outputMode >( witness, expected ); printDebugging< outputMode >( witness, expected );
} }
else std::cout << " " << C::testPass << "PASSED CASE" << resetStyle << ": " << comment << std::endl; else std::cout << " " << C::testPass << "PASSED CASE" << resetStyle << ": " << comment << std::endl;
} }
return failureCount; return failureCount;

View File

@ -12,10 +12,12 @@ static_assert( __cplusplus > 2020'00 );
#include <vector> #include <vector>
#include <string> #include <string>
#include <optional> #include <optional>
#include <tuplle> #include <tuple>
#include <boost/lexical_cast.hpp> #include <boost/lexical_cast.hpp>
#include <Alepha/IOStreams/Stream.h>
#include "meta.h" #include "meta.h"
#include "error.h" #include "error.h"
#include "Concepts.h" #include "Concepts.h"
@ -48,10 +50,10 @@ namespace Alepha::Hydrogen ::detail:: tuplize_args
explicit explicit
ArityMismatchError( const std::size_t remaining, const std::size_t processed, const std::string &clarification= "" ) ArityMismatchError( const std::size_t remaining, const std::size_t processed, const std::string &clarification= "" )
: remaining_( remaining ), processed_( processed ), clarification( clarification ), : remaining_( remaining ), processed_( processed ), clarification( clarification ),
message( ( clarification.empty() ? "" : ( clarification + ": " ) ) message( IOStream::Stream{} << ( clarification.empty() ? "" : ( clarification + ": " ) )
+ "Argument count mismatch. " << "Argument count mismatch. "
+ boost::lexical_cast< std::string >( remaining ) + " remaining " << remaining << " remaining "
+ boost::lexical_cast< std::string >( processed ) + " processed" ) {} << processed j< " processed" ) {}
const char * const char *
@ -109,7 +111,7 @@ namespace Alepha::Hydrogen ::detail:: tuplize_args
const std::vector< std::string > rv; const std::vector< std::string > rv;
std::transform( begin( args ) + offset, end( args ), back_inserter( rv ), std::transform( begin( args ) + offset, end( args ), back_inserter( rv ),
boost::lexical_cast< type, std::string > ); IOStreams::stringify< type > );
return std::tuple_cat( std::tuple{ arv }, tuplizeArgsBackend( tail{}, args, offset + rv.size() ) ); return std::tuple_cat( std::tuple{ arv }, tuplizeArgsBackend( tail{}, args, offset + rv.size() ) );
} }
else else