Skip to content

Commit

Permalink
Fix performance overhead due to logging (#21)
Browse files Browse the repository at this point in the history
* fix: properly detect logging level

* perf: eliminate string formatting overhead in logging calls

* style: apply autopep8
  • Loading branch information
kmnhan authored Nov 6, 2024
1 parent 43fccf5 commit 2f64d27
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 80 deletions.
68 changes: 34 additions & 34 deletions igor2/binarywave.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,8 +419,8 @@ def unpack(self, stream):
)
except BaseException:
logger.error(
'could not reshape data from {} to {}'.format(
self.shape, data_b))
'could not reshape data from %s to %s',
self.shape, data_b)
raise
return data

Expand Down Expand Up @@ -602,8 +602,8 @@ def post_unpack(self, parents, data):
wdata = wdata.reshape(shape)
except ValueError:
logger.error(
'could not reshape strings from {} to {}'.format(
shape, wdata.shape))
'could not reshape strings from %s to %s',
shape, wdata.shape)
raise
wave_data['wData'] = wdata

Expand All @@ -620,8 +620,8 @@ def post_unpack(self, parents, data):
need_to_reorder_bytes = _need_to_reorder_bytes(version)
wave_structure.byte_order = _byte_order(need_to_reorder_bytes)
logger.debug(
'get byte order from version: {} (reorder? {})'.format(
wave_structure.byte_order, need_to_reorder_bytes))
'get byte order from version: %s (reorder? %s)',
wave_structure.byte_order, need_to_reorder_bytes)
else:
need_to_reorder_bytes = False

Expand All @@ -639,8 +639,8 @@ def post_unpack(self, parents, data):
'invalid binary wave version: {}'.format(version))

if wave_structure.fields[-1].format != old_format:
logger.debug('change wave headers from {} to {}'.format(
old_format, wave_structure.fields[-1].format))
logger.debug('change wave headers from %s to %s',
old_format, wave_structure.fields[-1].format)
wave_structure.setup()
elif need_to_reorder_bytes:
wave_structure.setup()
Expand Down Expand Up @@ -758,40 +758,40 @@ def post_unpack(self, parents, data):
name='Wave5',
fields=[
_Field(
BinHeader5, 'bin_header',
help='Binary wave header'),
BinHeader5, 'bin_header',
help='Binary wave header'),
_Field(
WaveHeader5, 'wave_header',
help='Wave header'),
WaveHeader5, 'wave_header',
help='Wave header'),
DynamicWaveDataField5(
'f', 'wData',
help='The start of the array of waveform data.',
count=0, array=True),
'f', 'wData',
help='The start of the array of waveform data.',
count=0, array=True),
DynamicDependencyFormulaField(
'c', 'formula',
help='Optional wave dependency formula.',
count=0, array=True),
'c', 'formula',
help='Optional wave dependency formula.',
count=0, array=True),
DynamicWaveNoteField(
'c', 'note',
help='Optional wave note data.', count=0,
array=True),
'c', 'note',
help='Optional wave note data.', count=0,
array=True),
DynamicDataUnitsField(
'c', 'data_units',
help='Optional extended data units data.',
count=0, array=True),
'c', 'data_units',
help='Optional extended data units data.',
count=0, array=True),
DynamicDimensionUnitsField(
'c', 'dimension_units',
help='Optional dimension label data',
count=0, array=True),
'c', 'dimension_units',
help='Optional dimension label data',
count=0, array=True),
DynamicLabelsField(
'c', 'labels',
help="Optional dimension label data",
count=0, array=True),
'c', 'labels',
help="Optional dimension label data",
count=0, array=True),
DynamicStringIndicesDataField(
'P', 'sIndices',
help='Dynamic string indices for text waves.',
count=0, array=True),
])
'P', 'sIndices',
help='Dynamic string indices for text waves.',
count=0, array=True),
])

Wave = _DynamicStructure(
name='Wave',
Expand Down
2 changes: 1 addition & 1 deletion igor2/cli/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def plot_wave(self, args, wave, title=None):
try:
axes.plot(wave['wave']['wData'], 'r.')
except ValueError as error:
logger.error('error plotting {}: {}'.format(title, error))
logger.error('error plotting %s: %s', title, error)
pass
self._num_plots += 1

Expand Down
16 changes: 8 additions & 8 deletions igor2/packed.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def load(filename, strict=True, ignore_unknown=True, initial_byte_order=None):
filesystem : dict
The filesystem structure of the packed experiment file.
"""
logger.debug('loading a packed experiment file from {}'.format(filename))
logger.debug('loading a packed experiment file from %s', filename)
records = []
if hasattr(filename, 'read'):
f = filename # filename is actually a stream object
Expand All @@ -92,14 +92,14 @@ def load(filename, strict=True, ignore_unknown=True, initial_byte_order=None):
need_to_reorder = _need_to_reorder_bytes(header['version'])
byte_order = initial_byte_order = _byte_order(need_to_reorder)
logger.debug(
'get byte order from version: {} (reorder? {})'.format(
byte_order, need_to_reorder))
'get byte order from version: %s (reorder? %s)',
byte_order, need_to_reorder)
if need_to_reorder:
PackedFileRecordHeader.byte_order = byte_order
PackedFileRecordHeader.setup()
header = PackedFileRecordHeader.unpack_from(b)
logger.debug(
'reordered version: {}'.format(header['version']))
'reordered version: %s', header['version'])
data = bytes(f.read(header['numDataBytes']))
if len(data) < header['numDataBytes']:
raise ValueError(
Expand All @@ -108,16 +108,16 @@ def load(filename, strict=True, ignore_unknown=True, initial_byte_order=None):
).format(len(b), header['numDataBytes']))
record_type = _RECORD_TYPE.get(
header['recordType'] & PACKEDRECTYPE_MASK, _UnknownRecord)
logger.debug('the new record has type {} ({}).'.format(
record_type, header['recordType']))
logger.debug('the new record has type %s (%s).',
record_type, header['recordType'])
if record_type in [_UnknownRecord, _UnusedRecord
] and not ignore_unknown:
raise KeyError('unkown record type {}'.format(
header['recordType']))
records.append(record_type(header, data, byte_order=byte_order))
finally:
logger.debug('finished loading {} records from {}'.format(
len(records), filename))
logger.debug('finished loading %s records from %s',
len(records), filename)
if not hasattr(filename, 'read'):
f.close()

Expand Down
13 changes: 7 additions & 6 deletions igor2/record/variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,8 +292,8 @@ def post_unpack(self, parents, data):
need_to_reorder_bytes = _need_to_reorder_bytes(version)
variables_structure.byte_order = _byte_order(need_to_reorder_bytes)
logger.debug(
'get byte order from version: {} (reorder? {})'.format(
variables_structure.byte_order, need_to_reorder_bytes))
'get byte order from version: %s (reorder? %s)',
variables_structure.byte_order, need_to_reorder_bytes)
else:
need_to_reorder_bytes = False

Expand All @@ -307,8 +307,9 @@ def post_unpack(self, parents, data):
'invalid variables record version: {}'.format(version))

if variables_structure.fields[-1].format != old_format:
logger.debug('change variables record from {} to {}'.format(
old_format, variables_structure.fields[-1].format))
logger.debug('change variables record from %s to %s',
old_format, variables_structure.fields[-1].format)

variables_structure.setup()
elif need_to_reorder_bytes:
variables_structure.setup()
Expand Down Expand Up @@ -340,6 +341,6 @@ def __init__(self, *args, **kwargs):
self.namespace = {}
for key, value in self.variables['variables'].items():
if key not in ['var_header']:
logger.debug('update namespace {} with {} for {}'.format(
self.namespace, value, key))
logger.debug('update namespace %s with %s for %s',
self.namespace, value, key)
self.namespace.update(value)
62 changes: 31 additions & 31 deletions igor2/struct.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from __future__ import absolute_import
import io as _io
import logging
import pprint as _pprint
import struct as _struct

import numpy as _numpy
Expand Down Expand Up @@ -147,7 +146,7 @@ def setup(self):
Use this method to recalculate dynamic properities after
changing the basic properties set during initialization.
"""
logger.debug('setup {}'.format(self))
logger.debug('setup %s', self)
self.item_count = _numpy.prod(self.count) # number of item repeats
if not self.array and self.item_count != 1:
raise ValueError(
Expand Down Expand Up @@ -244,7 +243,7 @@ def pack_item(self, item=None):

def unpack_data(self, data):
"""Inverse of .pack_data"""
logger.debug('unpack {} for {} {}'.format(data, self, self.format))
logger.debug('unpack %s for %s %s', data, self, self.format)
iterator = iter(data)
try:
items = [next(iterator) for i in range(self.arg_count)]
Expand Down Expand Up @@ -279,8 +278,8 @@ def unpack_data(self, data):
raise NotImplementedError('reshape Structure field')
else:
unpacked = _numpy.array(unpacked)
logger.debug('reshape {} data from {} to {}'.format(
self, unpacked.shape, count))
logger.debug('reshape %s data from %s to %s',
self, unpacked.shape, count)
unpacked = unpacked.reshape(count)
return unpacked

Expand Down Expand Up @@ -492,14 +491,14 @@ def setup(self):
Use this method to recalculate dynamic properities after
changing the basic properties set during initialization.
"""
logger.debug('setup {!r}'.format(self))
logger.debug('setup %r', self)
self.set_byte_order(self.byte_order)
self.get_format()

def set_byte_order(self, byte_order):
"""Allow changing the format byte_order on the fly.
"""
logger.debug('set byte order for {!r} to {}'.format(self, byte_order))
logger.debug('set byte order for %r to %s', self, byte_order)
self.byte_order = byte_order
for field in self.fields:
if isinstance(field.format, Structure):
Expand All @@ -517,7 +516,7 @@ def get_format(self):
return format

def sub_format(self):
logger.debug('calculate sub-format for {!r}'.format(self))
logger.debug('calculate sub-format for %r', self)
for field in self.fields:
if isinstance(field.format, Structure):
field_format = list(
Expand Down Expand Up @@ -579,8 +578,8 @@ def unpack(self, *args, **kwargs):

def unpack_from(self, buffer, offset=0, *args, **kwargs):
logger.debug(
'unpack {!r} for {!r} ({}, offset={}) with {} ({})'.format(
buffer, self, len(buffer), offset, self.format, self.size))
'unpack %r for %r (%s, offset=%s) with %s (%s)',
buffer, self, len(buffer), offset, self.format, self.size)
args = super(Structure, self).unpack_from(
buffer, offset, *args, **kwargs)
return self._unpack_item(args)
Expand All @@ -595,8 +594,8 @@ def __init__(self, stream):

def read(self, size):
data = self.stream.read(size)
logger.debug('read {} from {}: ({}) {!r}'.format(
size, self.stream, len(data), data))
logger.debug('read %s from %s: (%s) %r',
size, self.stream, len(data), data)
return data


Expand Down Expand Up @@ -698,10 +697,10 @@ def _pre_pack(self, parents=None, data=None):
parents = parents + [self]
for f in self.fields:
if hasattr(f, 'pre_pack'):
logger.debug('pre-pack {}'.format(f))
logger.debug('pre-pack %s', f)
f.pre_pack(parents=parents, data=data)
if isinstance(f.format, DynamicStructure):
logger.debug('pre-pack {!r}'.format(f.format))
logger.debug('pre-pack %r', f.format)
f._pre_pack(parents=parents, data=data)

def pack(self, data):
Expand All @@ -720,22 +719,23 @@ def unpack_stream(self, stream, parents=None, data=None, d=None):
if data is None:
parents = [self]
data = d = {}
if logger.level <= logging.DEBUG:
if logger.isEnabledFor(logging.DEBUG):
stream = DebuggingStream(stream)
else:
parents = parents + [self]

for f in self.fields:
logger.debug('parsing {!r}.{} (count={}, item_count={})'.format(
self, f, f.count, f.item_count))
if logger.level <= logging.DEBUG:
logger.debug('data:\n{}'.format(_pprint.pformat(data)))
logger.debug('parsing %r.%s (count=%s, item_count=%s)',
self, f, f.count, f.item_count)
if logger.isEnabledFor(logging.DEBUG):
import pprint as _pprint
logger.debug('data:\n%s', _pprint.pformat(data))
if hasattr(f, 'pre_unpack'):
logger.debug('pre-unpack {}'.format(f))
logger.debug('pre-unpack %s', f)
f.pre_unpack(parents=parents, data=data)

if hasattr(f, 'unpack'): # override default unpacking
logger.debug('override unpack for {}'.format(f))
logger.debug('override unpack for %s', f)
d[f.name] = f.unpack(stream)
continue

Expand All @@ -758,15 +758,15 @@ def unpack_stream(self, stream, parents=None, data=None, d=None):
f.format.unpack_stream(
stream, parents=parents, data=data, d=d[f.name])
if hasattr(f, 'post_unpack'):
logger.debug('post-unpack {}'.format(f))
logger.debug('post-unpack %s', f)
repeat = f.post_unpack(parents=parents, data=data)
if repeat:
raise NotImplementedError(
'cannot repeat unpack for dynamic structures')
continue
if isinstance(f.format, Structure):
logger.debug('parsing {} bytes for {}'.format(
f.format.size, f.format.format))
logger.debug('parsing %s bytes for %s',
f.format.size, f.format.format)
bs = [stream.read(f.format.size) for i in range(f.item_count)]

def unpack():
Expand All @@ -785,10 +785,10 @@ def unpack():
size = _struct.calcsize(field_format)
except _struct.error as e:
logger.error(e)
logger.error('{}.{}: {}'.format(self, f, field_format))
logger.error('%s.%s: %s', self, f, field_format)
raise
logger.debug('parsing {} bytes for preliminary {}'.format(
size, field_format))
logger.debug('parsing %s bytes for preliminary %s',
size, field_format)
raw = stream.read(size)
if len(raw) < size:
raise ValueError(
Expand All @@ -798,8 +798,8 @@ def unpack():
def unpack():
field_format = self.byte_order + f.format * f.item_count
field_format = field_format.replace('P', 'I')
logger.debug('parse previous bytes using {}'.format(
field_format))
logger.debug('parse previous bytes using %s',
field_format)
struct = _struct.Struct(field_format)
items = struct.unpack(raw)
return f.unpack_data(items)
Expand All @@ -809,12 +809,12 @@ def unpack():
while repeat:
d[f.name] = unpack()
if hasattr(f, 'post_unpack'):
logger.debug('post-unpack {}'.format(f))
logger.debug('post-unpack %s', f)
repeat = f.post_unpack(parents=parents, data=data)
else:
repeat = False
if repeat:
logger.debug('repeat unpack for {}'.format(f))
logger.debug('repeat unpack for %s', f)

return data

Expand Down

0 comments on commit 2f64d27

Please sign in to comment.