Skip to content

Commit

Permalink
Fix SNMP dropping some of the queue counter when create_only_config_d…
Browse files Browse the repository at this point in the history
…b_buffers is set to true (#303)

This happened because the MIB assumed that half the queues configured are for mcast. When create_only_config_db_buffers is set to true this is not the case

**- What I did**
The ciscoSwitchQosMIB MIB assumed that all the counter are configured and that half of the configured queues are for mcast.
This is no longer true, the feature "polling only configured ports buffer queue" make it possible for port to not have MC counters.
This wrong assumption caused issue sonic-net/sonic-buildimage#17448

To fix this, I instead used the BUFFER_MAX_PARAM_TABLE to find the max possible queues
**- How I did it**

**- How to verify it**
Inside the SNMP docker run snmp walk:
$ snmpwalk -v2c -c msft 10.64.247.240 1.3.6.1.4.1.9.9.580.1.5.5.1.4.1
Check that the results is not missing counters from any queue
  • Loading branch information
DavidZagury authored and mssonicbld committed Jan 9, 2024
1 parent f9849f0 commit e5fd192
Show file tree
Hide file tree
Showing 15 changed files with 632 additions and 19 deletions.
13 changes: 12 additions & 1 deletion src/sonic_ax_impl/mibs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@

COUNTERS_PORT_NAME_MAP = 'COUNTERS_PORT_NAME_MAP'
COUNTERS_QUEUE_NAME_MAP = 'COUNTERS_QUEUE_NAME_MAP'
BUFFER_MAX_PARAM_TABLE = 'BUFFER_MAX_PARAM_TABLE'

LAG_TABLE = 'LAG_TABLE'
LAG_MEMBER_TABLE = 'LAG_MEMBER_TABLE'
LOC_CHASSIS_TABLE = 'LLDP_LOC_CHASSIS'
Expand Down Expand Up @@ -60,6 +62,16 @@ def chassis_info_table(chassis_name):

return "CHASSIS_INFO" + TABLE_NAME_SEPARATOR_VBAR + chassis_name


def buffer_max_parm_table(port_name):
"""
:param: port_name: port name
:return: max buffer parametes info for this port
"""

return "BUFFER_MAX_PARAM_TABLE" + TABLE_NAME_SEPARATOR_VBAR + port_name


def fan_info_table(fan_name):
"""
:param: fan_name: fan name
Expand Down Expand Up @@ -439,7 +451,6 @@ def init_sync_d_queue_tables(db_conn):
port_index = get_index_from_str(port_name)
key = queue_key(port_index, queue_index)
port_queues_map[key] = sai_id

queue_stat_name = queue_table(sai_id)
queue_stat = db_conn.get_all(COUNTERS_DB, queue_stat_name, blocking=False)
if queue_stat is not None:
Expand Down
9 changes: 7 additions & 2 deletions src/sonic_ax_impl/mibs/vendor/cisco/ciscoSwitchQosMIB.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ def __init__(self):
"""
super().__init__()
self.db_conn = Namespace.init_namespace_dbs()
# establish connection to state database.
Namespace.connect_all_dbs(self.db_conn, mibs.STATE_DB)

self.lag_name_if_name_map = {}
self.if_name_lag_name_map = {}
self.oid_lag_name_map = {}
Expand Down Expand Up @@ -129,8 +132,10 @@ def update_stats(self):
namespace = self.port_index_namespace[if_index]

# The first half of queue id is for ucast, and second half is for mcast
# To simulate vendor OID, we wrap queues by half distance
pq_count = math.ceil((max(if_queues) + 1) / 2)
# To simulate vendor OID, we wrap queues by max priority groups
port_max_queues = Namespace.dbs_get_all(self.db_conn, mibs.STATE_DB,
mibs.buffer_max_parm_table(self.oid_name_map[if_index]))['max_queues']
pq_count = math.ceil(int(port_max_queues) / 2)

for queue in if_queues:
# Get queue type and statistics
Expand Down
4 changes: 4 additions & 0 deletions tests/mock_tables/asic0/appl_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@
"alias": "etp2",
"speed": 100000
},
"PORT_TABLE:Ethernet24": {
"alias": "etp17",
"speed": 100000
},
"PORT_TABLE:Ethernet-BP0": {
"description": "snowflake",
"alias": "etp3",
Expand Down
70 changes: 66 additions & 4 deletions tests/mock_tables/asic0/counters_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,8 @@
"Ethernet-BP0": "oid:0x1000000000005",
"Ethernet-BP4": "oid:0x1000000000006",
"Ethernet-IB0": "oid:0x1000000000080",
"Ethernet-Rec0": "oid:0x1000000000081"
"Ethernet-Rec0": "oid:0x1000000000081",
"Ethernet24": "oid:0x1000000000014"
},
"COUNTERS_LAG_NAME_MAP": {
"PortChannel01": "oid:0x1000000000007"
Expand Down Expand Up @@ -592,7 +593,13 @@
"Ethernet4:12": "oid:0x15000000010244",
"Ethernet4:13": "oid:0x15000000010245",
"Ethernet4:14": "oid:0x15000000010246",
"Ethernet4:15": "oid:0x15000000010247"
"Ethernet4:15": "oid:0x15000000010247",
"Ethernet24:0": "oid:0x15000000000260",
"Ethernet24:1": "oid:0x15000000000261",
"Ethernet24:2": "oid:0x15000000000262",
"Ethernet24:3": "oid:0x15000000000263",
"Ethernet24:4": "oid:0x15000000000264",
"Ethernet24:6": "oid:0x15000000000266"
},
"COUNTERS_QUEUE_TYPE_MAP": {
"oid:0x15000000000230": "SAI_QUEUE_TYPE_UNICAST",
Expand Down Expand Up @@ -626,7 +633,13 @@
"oid:0x15000000010244": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010245": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010246": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010247": "SAI_QUEUE_TYPE_MULTICAST"
"oid:0x15000000010247": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000000260": "SAI_QUEUE_TYPE_UNICAST",
"oid:0x15000000000261": "SAI_QUEUE_TYPE_UNICAST",
"oid:0x15000000000262": "SAI_QUEUE_TYPE_UNICAST",
"oid:0x15000000000263": "SAI_QUEUE_TYPE_UNICAST",
"oid:0x15000000000264": "SAI_QUEUE_TYPE_UNICAST",
"oid:0x15000000000266": "SAI_QUEUE_TYPE_UNICAST"
},
"COUNTERS:oid:0x15000000000230": {
"SAI_QUEUE_STAT_PACKETS": "1",
Expand Down Expand Up @@ -883,4 +896,53 @@
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
}}
},
"COUNTERS:oid:0x15000000000260": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "23492723984237432",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000000261": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000000262": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000000263": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000000264": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000000266": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
}
}
9 changes: 9 additions & 0 deletions tests/mock_tables/asic0/state_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,14 @@
},
"NEIGH_STATE_TABLE|fec0::ffff:afa:07": {
"state": "Active"
},
"BUFFER_MAX_PARAM_TABLE|Ethernet0": {
"max_queues": "16"
},
"BUFFER_MAX_PARAM_TABLE|Ethernet4": {
"max_queues": "16"
},
"BUFFER_MAX_PARAM_TABLE|Ethernet24": {
"max_queues": "16"
}
}
4 changes: 4 additions & 0 deletions tests/mock_tables/asic1/appl_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@
"speed": 1000,
"alias": "etp16"
},
"PORT_TABLE:Ethernet32": {
"speed": 1000,
"alias": "etp18"
},
"PORT_TABLE:Ethernet-BP8": {
"alias": "etp7"
},
Expand Down
70 changes: 66 additions & 4 deletions tests/mock_tables/asic1/counters_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,8 @@
"Ethernet12": "oid:0x1000000000004",
"Ethernet-BP8": "oid:0x1000000000005",
"Ethernet-BP12": "oid:0x1000000000006",
"Ethernet16": "oid:0x1000000000010"
"Ethernet16": "oid:0x1000000000010",
"Ethernet32": "oid:0x1000000000011"
},
"COUNTERS_LAG_NAME_MAP": {
"PortChannel02": "oid:0x1000000000007"
Expand Down Expand Up @@ -697,7 +698,13 @@
"Ethernet12:12": "oid:0x15000000010244",
"Ethernet12:13": "oid:0x15000000010245",
"Ethernet12:14": "oid:0x15000000010246",
"Ethernet12:15": "oid:0x15000000010247"
"Ethernet12:15": "oid:0x15000000010247",
"Ethernet32:8": "oid:0x15000000010270",
"Ethernet32:10": "oid:0x15000000010272",
"Ethernet32:11": "oid:0x15000000010273",
"Ethernet32:13": "oid:0x15000000010275",
"Ethernet32:14": "oid:0x15000000010276",
"Ethernet32:15": "oid:0x15000000010277"
},
"COUNTERS_QUEUE_TYPE_MAP": {
"oid:0x15000000000230": "SAI_QUEUE_TYPE_UNICAST",
Expand Down Expand Up @@ -731,7 +738,13 @@
"oid:0x15000000010244": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010245": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010246": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010247": "SAI_QUEUE_TYPE_MULTICAST"
"oid:0x15000000010247": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010270": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010272": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010273": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010275": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010276": "SAI_QUEUE_TYPE_MULTICAST",
"oid:0x15000000010277": "SAI_QUEUE_TYPE_MULTICAST"
},
"COUNTERS:oid:0x15000000000230": {
"SAI_QUEUE_STAT_PACKETS": "1",
Expand Down Expand Up @@ -988,4 +1001,53 @@
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
}}
},
"COUNTERS:oid:0x15000000010270": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000010272": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000010273": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000010275": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000010276": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
},
"COUNTERS:oid:0x15000000010277": {
"SAI_QUEUE_STAT_PACKETS": "1",
"SAI_QUEUE_STAT_BYTES": "2",
"SAI_QUEUE_STAT_DROPPED_PACKETS": "3",
"SAI_QUEUE_STAT_DISCARD_DROPPED_PACKETS": "4",
"SAI_QUEUE_STAT_CURR_OCCUPANCY_BYTES": "5",
"SAI_QUEUE_STAT_WATERMARK_BYTES": "6"
}
}
9 changes: 9 additions & 0 deletions tests/mock_tables/asic1/state_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,5 +30,14 @@
},
"NEIGH_STATE_TABLE|10.10.0.4": {
"state": "Established"
},
"BUFFER_MAX_PARAM_TABLE|Ethernet8": {
"max_queues": "16"
},
"BUFFER_MAX_PARAM_TABLE|Ethernet12": {
"max_queues": "16"
},
"BUFFER_MAX_PARAM_TABLE|Ethernet32": {
"max_queues": "16"
}
}
5 changes: 5 additions & 0 deletions tests/mock_tables/asic2/appl_db.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,11 @@
"alias": "etp12",
"speed": 100000
},
"PORT_TABLE:Ethernet40": {
"description": "snowflake",
"alias": "etp19",
"speed": 100000
},
"ROUTE_TABLE:10.1.0.32": {
"nexthop": "",
"ifname": "lo"
Expand Down
Loading

0 comments on commit e5fd192

Please sign in to comment.