From 97b874bd0b6913ed52df1b8aac5e7491479fac9a Mon Sep 17 00:00:00 2001 From: Uwe Hermann Date: Thu, 11 May 2017 23:20:58 +0200 Subject: Add srd_inst_initial_pins_set_all() and support code. This allows frontends to set the assumed initial pins (i.e., the assumed state of the pins before the first sample of a capture) to user-specified values. The assumed initial pins can be either low, or high, or "use same value as the first sample of the capture". The special self.initial_pins decoder attribute is now removed. --- decoders/i2c/pd.py | 4 ---- 1 file changed, 4 deletions(-) (limited to 'decoders/i2c') diff --git a/decoders/i2c/pd.py b/decoders/i2c/pd.py index d2f8bc4..0e7f769 100644 --- a/decoders/i2c/pd.py +++ b/decoders/i2c/pd.py @@ -130,10 +130,6 @@ class Decoder(srd.Decoder): self.out_bitrate = self.register(srd.OUTPUT_META, meta=(int, 'Bitrate', 'Bitrate from Start bit to Stop bit')) - # Assume that the initial SCL/SDA pin state is high (logic 1). - # This is a good default, since both pins have pullups as per spec. - self.initial_pins = [1, 1] - def putx(self, data): self.put(self.ss, self.es, self.out_ann, data) -- cgit v1.2.3-70-g09d2