Reading a large JSON file in Python (raw_decode)

I am trying to read in a large JSON file (data.json) in Python. Since the JSON file has several JSON objects, and multiple dictionaries will be created in Python (the number of dictionaries is unknown), I used decoder.raw_decode () and generator. Below is the code:

import json
import pprint
import io
import pprint

def parse():

    with open('data.json',encoding='utf-8') as jfile:
        try:
            while True:
                decoder = json.JSONDecoder()
                obj, idx = decoder.raw_decode(jfile)
                yield obj
        except ValueError as e:
            print(e)
            pass
        else:
            print("aha")


def main():
    imputd=parse()
    if imputd: 
        while True:
            try:
                print(str(next(imputd)).readlines())
            except StopIteration as e:
                print(e)
                break

main()

I get an error:

Traceback (most recent call last):
  File "H:\Document\Python\j10.py", line 57, in <module>
    main()
  File "H:\Document\Python\j10.py", line 36, in main
    print(str(next(imputd)).readlines())
  File "H:\Document\Python\j10.py", line 21, in parse
    obj, idx = decoder.raw_decode(jfile)
  File "C:\Python34\lib\json\decoder.py", line 360, in raw_decode
    obj, end = self.scan_once(s, idx)
TypeError: first argument must be a string, not _io.TextIOWrapper

I edited the code based on Martin's answer:

import json
import io



file=open('data.json.txt')
def readin():
    return file.read(2000)


def parse():
    decoder = json.JSONDecoder()
    buffer = ''    
    for chunk in iter(readin, ''):
        buffer += chunk
        while buffer:
            try:
                result, index = decoder.raw_decode(buffer)
                yield result
                buffer = buffer[index:]
            except ValueError:
                 # Not enough data to decode, read more
                break

def main():
    imputd=parse()
    if imputd: 
        while True:
            try:
                print(str(next(imputd)).readlines())
            except StopIteration as e:
                print(e)
                break

main()

and I get a UnicodeError:

Traceback (most recent call last):
  File "H:\Document\Python\j11.py", line 35, in <module>
    main()
  File "H:\Document\Python\j11.py", line 30, in main
    print(str(next(imputd)).readlines())
  File "H:\Document\Python\j11.py", line 14, in parse
    for chunk in iter(readin, ''):
  File "H:\Document\Python\j11.py", line 8, in readin
    return file.read(2000)
  File "C:\Python34\lib\encodings\cp1252.py", line 23, in decode
    return codecs.charmap_decode(input,self.errors,decoding_table)[0]
UnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 4217: character maps to <undefined>
+4
source share
1 answer

You pass a file object, but decoder.raw_decode()only accepts text data. You need to do it yourself:

obj, idx = decoder.raw_decode(jfile.read())

Then you get Python objects created from JSON data, so your call .readlines()in your function loop main()will also fail.

raw_decode() . , . , JSON, :

decoder = json.JSONDecoder()
buffer = ''
for chunk in iter(partial(jfile.read, buffersize), ''):
     buffer += chunk
     while buffer:
         try:
             result, index = decoder.raw_decode(buffer)
             yield result
             buffer = buffer[index:]
         except ValueError:
             # Not enough data to decode, read more
             break

; JSON (, ), , ; .

+2

Source: https://habr.com/ru/post/1570716/


All Articles