forked from Alepha/Alepha
68 lines
1.6 KiB
C++
68 lines
1.6 KiB
C++
static_assert( __cplusplus > 2020'99 );
|
|
|
|
#pragma once
|
|
|
|
#include <Alepha/Alepha.h>
|
|
|
|
#include <istream>
|
|
|
|
#include <Alepha/Capabilities.h>
|
|
#include <Alepha/template_for.h>
|
|
#include <Alepha/string_algorithms.h>
|
|
#include <Alepha/Concepts.h>
|
|
|
|
#include <Alepha/Reflection/tuplizeAggregate.h>
|
|
|
|
#include "delimiters.h"
|
|
|
|
namespace Alepha::Hydrogen::IOStreams ::detail:: IStreamable_m
|
|
{
|
|
inline namespace exports
|
|
{
|
|
struct IStreamable {};
|
|
}
|
|
|
|
template< typename T >
|
|
concept IStreamableAggregate= Aggregate< T > and Capability< T, IStreamable >;
|
|
|
|
|
|
std::istream &
|
|
operator >> ( std::istream &is, IStreamableAggregate auto &istreamable )
|
|
{
|
|
std::istream::sentry sentry{ is };
|
|
|
|
if( not sentry ) return is;
|
|
|
|
std::string line;
|
|
std::getline( is, line );
|
|
const auto commentChar= line.find( "#" );
|
|
if( commentChar != std::string::npos ) line= line.substr( line.find( "#" ) );
|
|
|
|
const auto delim= getDelimiter( fieldDelimiter, is );
|
|
const auto tokens= split( line, delim );
|
|
|
|
auto decomposed= Alepha::Reflection::tuplizeAggregate( istreamable );
|
|
|
|
if( tokens.size() != std::tuple_size_v< std::decay_t< decltype( decomposed ) > > )
|
|
{
|
|
throw 0;
|
|
}
|
|
|
|
int index= 0;
|
|
// TODO: Consider the lens system here... but the basic use case seems to be for
|
|
// aggregates, so we'll go with this simple case for now...
|
|
template_for( decomposed ) <=[&]( auto &element )
|
|
{
|
|
std::istringstream iss{ tokens.at( index++ ) };
|
|
iss >> element;
|
|
};
|
|
|
|
return is;
|
|
}
|
|
}
|
|
|
|
namespace Alepha::Hydrogen::IOStreams::inline exports::inline IStreamable_m
|
|
{
|
|
using namespace detail::IStreamable_m::exports;
|
|
}
|