Parse the file using C ++, load the value into the structure

I have the following file / line:

pc=1 ct=1 av=112 cv=1100 cp=1700 rec=2 p=10001 g=0 a=0 sz=5 cr=200
pc=1 ct=1 av=113 cv=1110 cp=1800 rec=2 p=10001 g=0 a=10 sz=5 cr=200

etc. I want to analyze this and take pairs of key values ​​and put them in a structure:

struct pky
{
    pky() :
      a_id(0),
      sz_id(0),
      cr_id(0),
      cp_id(0),
      cv_id(0),
      ct_id(0),
      fr(0),
      g('U'),
      a(0),
      pc(0),
      p_id(0)
    { }
};

in which either all fields of the structure are used, or some may be omitted.

How to create a C ++ class that will do the same? I am new to C ++ and do not know about any functions or library that would do the job.

Each line must be processed, and the structure will be filled with one line each time and used before it is cleared. The structure will later be used as a parameter to the function.

+3
source share
5 answers

You can do something like this:

std::string line;
std::map<std::string, std::string> props;
std::ifstream file("foo.txt");
while(std::getline(file, line)) {
    std::string token;
    std::istringstream tokens(line);
    while(tokens >> token) {
        std::size_t pos = token.find('=');
        if(pos != std::string::npos) {
            props[token.substr(0, pos)] = token.substr(pos + 1);
        }
    }

    /* work with those keys/values by doing properties["name"] */
    Line l(props["pc"], props["ct"], ...);

    /* clear the map for the next line */
    props.clear();
}

Hope this will be helpful. The line could be like this:

struct Line { 
    std::string pc, ct; 
    Line(std::string const& pc, std::string const& ct):pc(pc), ct(ct) {

    }
};

, . .

while(tokens >> token) {

, , , :

while(std::getline(tokens, token, ';')) {

, , , - .

    std::string token;
    std::istringstream tokens(line);
    while(tokens >> token) {
        std::size_t pos = token.find('=');
        if(pos != std::string::npos) {
            props[token.substr(0, pos)] = token.substr(pos + 1);
        }
    }

:

    int value;
    std::string key;
    std::istringstream tokens(line);
    while(tokens >> std::ws && std::getline(tokens, key, '=') && 
          tokens >> std::ws >> value) {
            props[key] = value;
    }

std::ws .

std::map<std::string, int> props;

, int std::string. , .

+10

:

#include <string>
#include <fstream>
#include <sstream>
#include <istream>
#include <vector>
#include <algorithm>
#include <iterator>

std::istream& operator>> (std::istream& str,pky& value)
{
    std::string line;
    std::getline(str,line);

    std::stringstream dataStr(line);

    static const std::streamsize max = std::numeric_limits<std::streamsize>::max();

    // Code assumes the ordering is always as follows
    // pc=1 ct=1 av=112 cv=1100 cp=1700 rec=2 p=10001 g=0 a=0 sz=5 cr=200
    dataStr.ignore(max,'=') >> value.pc;
    dataStr.ignore(max,'=') >> value.ct_id;
    dataStr.ignore(max,'=') >> value.a; // Guessing av=
    dataStr.ignore(max,'=') >> value.cv_id;
    dataStr.ignore(max,'=') >> value.cp_id;
    dataStr.ignore(max,'=') >> value.fr; // Guessing rec=
    dataStr.ignore(max,'=') >> value.p_id;
    dataStr.ignore(max,'=') >> value.g;
    dataStr.ignore(max,'=') >> value.a_id;
    dataStr.ignore(max,'=') >> value.sz_id;
    dataStr.ignore(max,'=') >> value.cr_id;

    return str;
}

int main()
{
    std::ifstream  file("plop");

    std::vector<pky>  v;
    pky data;

    while(file >> data)
    {
        // Do Somthing with data
        v.push_back(data);
    }

    // Even use the istream_iterators
    std::ifstream    file2("plop2");
    std::vector<pky> v2;

    std::copy(std::istream_iterator<pky>(file2),
              std::istream_iterator<pky>(),
              std::back_inserter(v2)
             );
}
+4

, . , , , - , .

#include <sstream>
#include <string>
#include <vector>
#include <map>

using namespace std;

vector<string> Tokenize(const string &str, const string &delim)
{
    vector<string> tokens;

    size_t p0 = 0, p1 = string::npos;
    while(p0 != string::npos)
    {
        p1 = str.find_first_of(delim, p0);
        if(p1 != p0)
        {
            string token = str.substr(p0, p1 - p0);
            tokens.push_back(token);
        }
        p0 = str.find_first_not_of(delim, p1);
    }

    return tokens;
}

int main()
{
    string data = "pc=1 ct=1 av=112 cv=1100 cp=1700 rec=2 p=10001 g=0 a=0 sz=5 cr=200 pc=1 ct=1 av=113 cv=1110 cp=1800 rec=2 p=10001 g=0 a=10 sz=5 cr=200";
    vector<string> entries = Tokenize(data, " ");
    map<string, int> items;

    for (size_t i = 0; i < entries.size(); ++i)
    {
        string item = entries[i];

        size_t pos = item.find_first_of('=');
        if(pos == string::npos)
            continue;

        string key = item.substr(0, pos);
        int value;
        stringstream stream(item.substr(pos + 1));
        stream >> value;
        items.insert (pair<string, int>(key, value));
    }

}
+2

, , , , . , , fread(), .

"" (.. ), , strchr(), "=", , atoi(), int, , . , - , .

, -, ( ), , strucutre.

+1
source

What you are being taught here is monsters .

http://en.wikipedia.org/wiki/Scanf

Do not use this function to extract rows from unreliable data, but for now you either trust the data or only get numbers, why not.

If you are familiar with regular expressions using a different language, use std::tr1::regexor boost::regex- they are the same. If you are not familiar, you will do yourself a service by reading it.

+1
source

Source: https://habr.com/ru/post/1699263/


All Articles