Skip to content

Commit

Permalink
Remove the retransmits from the files
Browse files Browse the repository at this point in the history
  • Loading branch information
petya-vasileva committed Nov 13, 2023
1 parent 93f979f commit e44ac38
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 59 deletions.
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -129,3 +129,10 @@ dmypy.json

# Pyre type checker
.pyre/
<<<<<<< Updated upstream
=======

# preloaded datasets
src/parquet/
src/*.ipynb
>>>>>>> Stashed changes
34 changes: 18 additions & 16 deletions src/model/Updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def __init__(self):
@timer
def queryData(self, idx, dateFrom, dateTo):
intv = int(hp.CalcMinutes4Period(dateFrom, dateTo)/30)
if idx in ['ps_throughput','ps_retransmits']:
if idx in ['ps_throughput']:
dateFrom, dateTo = hp.defaultTimeRange(21)
intv = 42 # 12 hour bins

Expand All @@ -51,7 +51,7 @@ def queryData(self, idx, dateFrom, dateTo):
def cacheIndexData(self):
location = 'parquet/raw/'
dateFrom, dateTo = hp.defaultTimeRange(1)
INDICES = ['ps_packetloss', 'ps_owd', 'ps_retransmits', 'ps_throughput']
INDICES = ['ps_packetloss', 'ps_owd', 'ps_throughput']
measures = pd.DataFrame()
for idx in INDICES:
df = pd.DataFrame(self.queryData(idx, dateFrom, dateTo))
Expand Down Expand Up @@ -80,7 +80,7 @@ def cacheIndexData(self):
@timer
def storeAlarms(self):
dateFrom, dateTo = hp.defaultTimeRange(60)
print("Update data. Get all alrms for the past 60 days...", dateFrom, dateTo)
print("Update data. Get all alarms for the past 60 days...", dateFrom, dateTo)
oa = Alarms()
frames, pivotFrames = oa.getAllAlarms(dateFrom, dateTo)

Expand Down Expand Up @@ -114,9 +114,11 @@ def descChange(chdf, posDf):
for newASN in atPos:
if newASN not in [0, -1]:
if P < 1:
if str(newASN) in owners.keys():
owner = owners[str(newASN)]
howPathChanged.append({'diff': diff,
'diffOwner': owners[str(diff)], 'atPos': pos,
'jumpedFrom': newASN, 'jumpedFromOwner': owners[str(newASN)]})
'jumpedFrom': newASN, 'jumpedFromOwner': owner})
# the following check is covering the cases when the change happened at the very end of the path
# i.e. the only ASN that appears at that position is the diff detected
if len(atPos) == 0:
Expand All @@ -134,20 +136,20 @@ def descChange(chdf, posDf):
def storePathChangeDescDf(self):
dateFrom, dateTo = hp.defaultTimeRange(days=3)
chdf, posDf, baseline = qrs.queryTraceChanges(dateFrom, dateTo)[:3]
posDf['asn'] = posDf['asn'].astype(int)

df = pd.DataFrame()
for p in posDf['pair'].unique():
# print(p)
temp = self.descChange(chdf[chdf['pair'] == p], posDf[posDf['pair'] == p])
temp['src_site'] = baseline[baseline['pair'] == p]['src_site'].values[0]
temp['dest_site'] = baseline[baseline['pair'] == p]['dest_site'].values[0]
temp['count'] = len(chdf[chdf['pair'] == p])
df = pd.concat([df, temp])

df['jumpedFrom'] = df['jumpedFrom'].astype(int)
df['diff'] = df['diff'].astype(int)
self.pq.writeToFile(df, f"parquet/frames/prev_next_asn")
if len(chdf) > 0:
for p in posDf['pair'].unique():
# print(p)
temp = self.descChange(chdf[chdf['pair'] == p], posDf[posDf['pair'] == p])
temp['src_site'] = baseline[baseline['pair'] == p]['src_site'].values[0]
temp['dest_site'] = baseline[baseline['pair'] == p]['dest_site'].values[0]
temp['count'] = len(chdf[chdf['pair'] == p])
df = pd.concat([df, temp])

df['jumpedFrom'] = df['jumpedFrom'].astype(int)
df['diff'] = df['diff'].astype(int)
self.pq.writeToFile(df, f"parquet/frames/prev_next_asn")


@staticmethod
Expand Down
26 changes: 3 additions & 23 deletions src/pages/home.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,7 @@ def SitesOverviewPlots(site_name, direction, metaDf, measures):
units = {
'ps_packetloss': 'packets',
'ps_throughput': 'MBps',
'ps_owd': 'ms',
'ps_retransmits': 'packets'
'ps_owd': 'ms'
}

colors = ['#720026', '#e4ac05', '#00bcd4', '#1768AC', '#ffa822', '#134e6f', '#ff6150', '#1ac0c6', '#492b7c', '#9467bd',
Expand All @@ -103,23 +102,20 @@ def SitesOverviewPlots(site_name, direction, metaDf, measures):
measures.loc[measures['idx']=='ps_throughput', 'value'] = measures[measures['idx']=='ps_throughput']['value'].apply(lambda x: round(x/1e+6, 2))

fig = go.Figure()
fig = make_subplots(rows=2, cols=2, subplot_titles=("Packet loss", "Throughput", 'One-way delay', 'Retransmits'))
fig = make_subplots(rows=2, cols=2, subplot_titles=("Packet loss", "Throughput", 'One-way delay'))
for i, ip in enumerate(ips):

# The following code sets the visibility to True only for the first occurence of an IP
visible = {'ps_packetloss': False,
'ps_throughput': False,
'ps_owd': False,
'ps_retransmits': False}
'ps_owd': False}

if ip in measures[measures['idx']=='ps_packetloss'][direction].unique():
visible['ps_packetloss'] = True
elif ip in measures[measures['idx']=='ps_throughput'][direction].unique():
visible['ps_throughput'] = True
elif ip in measures[measures['idx']=='ps_owd'][direction].unique():
visible['ps_owd'] = True
elif ip in measures[measures['idx']=='ps_retransmits'][direction].unique():
visible['ps_retransmits'] = True


fig.add_trace(
Expand Down Expand Up @@ -164,21 +160,6 @@ def SitesOverviewPlots(site_name, direction, metaDf, measures):
row=2, col=1
)

fig.add_trace(
go.Scattergl(
x=measures[(measures[direction]==ip) & (measures['idx']=='ps_retransmits')]['dt'],
y=measures[(measures[direction]==ip) & (measures['idx']=='ps_retransmits')]['value'],
mode='markers',
marker=dict(
color=colors[i]),
name=ip,
yaxis="y1",
legendgroup=ip,
showlegend = visible['ps_retransmits'],
),
row=2, col=2
)


fig.update_layout(
showlegend=True,
Expand All @@ -198,7 +179,6 @@ def SitesOverviewPlots(site_name, direction, metaDf, measures):
fig.update_yaxes(title_text=units['ps_packetloss'], row=1, col=1)
fig.update_yaxes(title_text=units['ps_throughput'], row=1, col=2)
fig.update_yaxes(title_text=units['ps_owd'], row=2, col=1)
fig.update_yaxes(title_text=units['ps_retransmits'], row=2, col=2)

fig.layout.template = 'plotly_white'
# py.offline.plot(fig)
Expand Down
12 changes: 6 additions & 6 deletions src/pages/paths_site.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,22 +36,22 @@ def description(q=None):


@timer
def getStats(fromDate, toDate, site):
def getStats(dateFrom, dateTo, site):
q = {
"query": {
"bool": {
"must": [
{
"range": {
"from_date.keyword": {
"gte": fromDate
"from_date": {
"gte": dateFrom
}
}
},
{
"range": {
"to_date.keyword": {
"lte": toDate
"to_date": {
"lte": dateTo
}
}
},
Expand All @@ -77,7 +77,7 @@ def getStats(fromDate, toDate, site):
}

# print(str(q).replace("\'", "\""))
result = scan(client=hp.es,index='ps_trace_changes',query=q)
result = scan(client=hp.es,index='ps_traces_changes',query=q)
data, positions, baseline, altPaths = [],[],[],[]
positions = []
for item in result:
Expand Down
12 changes: 3 additions & 9 deletions src/pages/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,6 @@ def description(q=None):
title=title,
description=description,
)


def convertTime(ts):
stripped = datetime.strptime(ts, '%Y-%m-%d %H:%M')
return int((stripped - datetime(1970, 1, 1)).total_seconds()*1000)



def layout(**other_unknown_query_strings):
Expand Down Expand Up @@ -141,11 +135,11 @@ def colorMap(eventTypes):
def update_output(start_date, end_date, sites, all, events, allevents, sitesState, eventsState ):

if start_date and end_date:
period = [f'{start_date} 00:01', f'{end_date} 23:59']
else: period = hp.defaultTimeRange(1)
start_date, end_date = [f'{start_date}T00:01:00.000Z', f'{end_date}T23:59:59.000Z']
else: start_date, end_date = hp.defaultTimeRange(1)

alarmsInst = Alarms()
frames, pivotFrames = alarmsInst.loadData(period[0], period[1])
frames, pivotFrames = alarmsInst.loadData(start_date, end_date)

scntdf = pd.DataFrame()
for e, df in pivotFrames.items():
Expand Down
6 changes: 1 addition & 5 deletions src/pages/throughput.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def description(q=None):


def convertTime(ts):
stripped = datetime.strptime(ts, '%Y-%m-%d %H:%M')
stripped = datetime.strptime(ts, '%Y-%m-%dT%H:%M:%S.000Z')
return int((stripped - datetime(1970, 1, 1)).total_seconds()*1000)


Expand Down Expand Up @@ -103,10 +103,6 @@ def getRawDataFromES(src, dest, ipv6, dateFrom, dateTo):
return df


@timer



@timer
def buildPlot(df):
fig = go.Figure(data=px.scatter(
Expand Down

0 comments on commit e44ac38

Please sign in to comment.