Custom strbuf doesn't put anything in the file

206 Views Asked by At

I am trying to make a custom strbuf that takes in 32-byte characters and puts them in another ostream object in bytes.

Following is the code for it:

I personally suspect the way I have linked it in main.cpp though I can't pinpoint the error in it. According to me, ofile_cp should declare its own buffer during its construction which should get replaced by my utf32_buffer object mybuf in set_rdbuf(). After that, any I/O should go through my buffer to the underlying ofstream ofile_c1. But, it prints nothing in the file. Can anyone please point out the bug here?

main.cpp (uses my custom strbuf)

#include "utf8.h"
#include <fstream>
#include <cstdint>
#include <iostream>

int main()
{
    std::basic_ofstream<my::code_point> ofile_cp("temp1");
    std::ofstream ofile_c1("temp3");
    utf32_buffer mybuf(ofile_c1, 4096);
    ofile_cp.set_rdbuf(&mybuf);

    std::ofstream ofile_c2("temp2");

    char c = 'a';
    ofile_cp << c;
    ofile_c2 << c;

    return 0;
}

utf.cpp defines my custom strbuf

#include <streambuf>
#include <vector>       //as buffer in utf32_buffer
#include <functional>   //for less_equal()
#include <cassert>      //for assert()
#include <ostream>
#include <cstdint>      //for uint32_t

namespace my
{ 
    typedef std::uint32_t code_point;
}

using namespace my;

class utf32_buffer : public std::basic_streambuf<code_point>
{
public:
    explicit utf32_buffer(std::ostream &sink, std::size_t buff_sz = 256);

private:
    int_type overflow(int_type ch);
    int sync();
    bool flush();
    utf32_buffer(const utf32_buffer &);
    utf32_buffer &operator=(const utf32_buffer &);

private:
    std::ostream &sink_;
    std::vector<code_point> buffer_;
};

utf32_buffer::utf32_buffer(std::ostream &sink, std::size_t buff_sz) :
    sink_(sink),
    buffer_(buff_sz + 1)
{
    sink_.clear();
    code_point *base = &buffer_.front();
    setp(base, base + buffer_.size() - 1);
}

utf32_buffer::int_type utf32_buffer::overflow(utf32_buffer::int_type ch)
{
    if (sink_ && ch != traits_type::eof())
    {
        assert(std::less_equal<code_point *>()(pptr(), epptr()));

        *pptr() = std::char_traits<code_point>::to_char_type(ch);
        pbump(1);

        if (flush())
        {
            return 0;
        }
    }

    return traits_type::eof();
}

int utf32_buffer::sync()
{
    return flush() ? 0 : -1;
}

bool utf32_buffer::flush()
{
    assert(std::less_equal<code_point *>()(pptr(), epptr()));

    std::ptrdiff_t n = pptr() - pbase();
    pbump(-n);

    const int cpsz = sizeof(code_point *);

    union
    {
        char * cp_char;
        code_point *cpptr;
    };

    cpptr = pbase();

    return bool(sink_.write(cp_char, n * cpsz));
}
0

There are 0 best solutions below