6060
6161NEW_LINE = "\n "
6262
63+
6364class SourceFile (object ):
6465 format = None # set by subclasses
6566
@@ -136,10 +137,10 @@ def __init__(
136137 try :
137138 self ._source = codecs .open (source , mode = "r" , encoding = "utf-8" )
138139 except IOError as exc :
139- default_logger .exception (exc )
140+ # default_logger.exception(exc)
140141 raise ValueError (
141- f'Unable to open source file "{ str (source )} ": ' \
142- f'{ str (exc )} '
142+ f'Unable to open source file "{ str (source )} ": '
143+ f'{ str (exc )} '
143144 )
144145
145146 if (
@@ -298,8 +299,8 @@ def setup_table(self):
298299 self .primary_key = primary_key
299300 elif primary_key != self .primary_key :
300301 raise RuntimeError (
301- f"Error: table { self .db } .{ self .table } primary key was " \
302- f"`{ primary_key } ` rather than the expected: { self .primary_key } "
302+ f"Error: table { self .db } .{ self .table } primary key was "
303+ f"`{ primary_key } ` rather than the expected: { self .primary_key } "
303304 )
304305
305306 def restore_indexes (self , warning_queue ):
@@ -411,7 +412,7 @@ def batches(self, batch_size=None, warning_queue=None):
411412 yield batch
412413 batch = []
413414
414- except StopIteration as e :
415+ except StopIteration :
415416 # yield any final batch
416417 if batch :
417418 yield batch
@@ -476,7 +477,7 @@ def read_to_queue(
476477
477478 # - report relevant errors
478479 except Exception as exc :
479- default_logger .exception (exc )
480+ # default_logger.exception(exc)
480481 error_queue .put (Error (str (exc ), traceback .format_exc (), self .name ))
481482 exit_event .set ()
482483 raise
@@ -509,9 +510,9 @@ def fill_buffer(self):
509510 # double the buffer under the assumption that the documents are too large to fit
510511 if self ._buffer_size == JSON_MAX_BUFFER_SIZE :
511512 raise Exception (
512- f"Error: JSON max buffer size exceeded on file " \
513- f"{ self .name } (from position { self .bytes_processed } ). " \
514- f"Use '--max-document-size' to extend your buffer."
513+ f"Error: JSON max buffer size exceeded on file "
514+ f"{ self .name } (from position { self .bytes_processed } ). "
515+ f"Use '--max-document-size' to extend your buffer."
515516 )
516517 self ._buffer_size = min (self ._buffer_size * 2 , JSON_MAX_BUFFER_SIZE )
517518
@@ -614,8 +615,8 @@ def teardown(self):
614615 else f" and { len (snippit ) - 100 } more characters"
615616 )
616617 raise ValueError (
617- f"Error: JSON array did not end cleanly, " \
618- f"rather with: <<{ snippit [:100 ]} >>{ extra } "
618+ f"Error: JSON array did not end cleanly, "
619+ f"rather with: <<{ snippit [:100 ]} >>{ extra } "
619620 )
620621 self ._buffer_pos += 1
621622
@@ -631,8 +632,8 @@ def teardown(self):
631632 else f" and { len (snippit ) - 100 } more characters"
632633 )
633634 raise ValueError (
634- f"Error: extra data after JSON data: <<{ snippit [:100 ]} >>" \
635- f"{ extra } "
635+ f"Error: extra data after JSON data: <<{ snippit [:100 ]} >>"
636+ f"{ extra } "
636637 )
637638
638639
@@ -680,8 +681,8 @@ def setup_file(self, warning_queue=None):
680681 if self .custom_header is not None :
681682 if not self .no_header_row :
682683 warning_queue .put (
683- f"Ignoring header row on { self .name } : " \
684- f"{ str (self ._columns )} "
684+ f"Ignoring header row on { self .name } : "
685+ f"{ str (self ._columns )} "
685686 )
686687 self ._columns = self .custom_header
687688 elif self .no_header_row :
@@ -691,8 +692,8 @@ def get_line(self):
691692 raw_row = next (self ._reader )
692693 if len (self ._columns ) != len (raw_row ):
693694 raise Exception (
694- f"Error: '{ self .name } ' line { self ._reader .line_num } " \
695- f"has an inconsistent number of columns: { str (raw_row )} "
695+ f"Error: '{ self .name } ' line { self ._reader .line_num } "
696+ f"has an inconsistent number of columns: { str (raw_row )} "
696697 )
697698
698699 row = {}
0 commit comments