Skip to content

Commit

Permalink
Merge branch 'main' into chrome-generator
Browse files Browse the repository at this point in the history
  • Loading branch information
Smejky338 authored Jun 25, 2024
2 parents 66aa65a + 7fb348c commit 367648c
Show file tree
Hide file tree
Showing 28 changed files with 441 additions and 121 deletions.
3 changes: 2 additions & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[flake8]
ignore = E501,W503,E203
exclude = .git,__pycache__,venv
disable = W391
exclude = .git,__pycache__,venv,.venv
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ Install with:

If you have cloned the git and want to develop locally, replace last step with:

python -m pip install --editable .
python -m pip install --editable .[dev]

Running unit tests
------------------
Expand Down
9 changes: 7 additions & 2 deletions core/opl/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,12 @@ def add_kafka_opts(parser):
parser.add_argument(
"--kafka-host",
default=os.getenv("KAFKA_HOST", "localhost"),
help="Kafka host (also use env variable KAFKA_HOST)",
help="Kafka host (also use env variable KAFKA_HOST). Can get overriden by --kafka-hosts arg or KAFKA_HOSTS envvar.",
)
parser.add_argument(
"--kafka-hosts",
default=os.getenv("KAFKA_HOSTS", ""),
help="Comma-separated list of hosts, including their ports (also use env variable KAFKA_HOSTS). Takes precedence over --kafka-host and --kafka-port or their envvar variants.",
)
parser.add_argument(
"--kafka-port",
Expand Down Expand Up @@ -131,7 +136,7 @@ def add_kafka_opts(parser):
help="The client is going to wait this much time for the server to respond to a request (also use env variable KAFKA_REQUEST_TIMEOUT_MS)",
)
parser.add_argument(
"--kafka-max_block_ms",
"--kafka-max-block-ms",
type=int,
default=int(os.getenv("KAFKA_MAX_BLOCK_MS", 60000)),
help="Max time to block send e.g. because buffer is full (also use env variable KAFKA_MAX_BLOCK_MS)",
Expand Down
23 changes: 14 additions & 9 deletions core/opl/cluster_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@
import jinja2.exceptions
import boto3
import urllib3
import tempfile

from . import data
from . import date
from tenacity import * # noqa: F403
from . import status_data
from . import retry


def execute(command):
Expand Down Expand Up @@ -179,8 +181,7 @@ def _sanitize_target(self, target):
target = target.replace("$Cloud", self.args.grafana_prefix)
return target

# pylint: disable-next=undefined-variable
@retry(stop=(stop_after_delay(10) | stop_after_attempt(10))) # noqa: F405
@retry.retry_on_traceback(max_attempts=10, wait_seconds=1)
def measure(self, ri, name, grafana_target):
assert (
ri.start is not None and ri.end is not None
Expand Down Expand Up @@ -378,10 +379,10 @@ def measure(self, ri, name, copy_from):
"""
Just return value from previously answered item
"""
try:
return name, dict(ri._responses)[copy_from]
except KeyError:
if ri.sd is None:
return name, None
else:
return name, ri.sd.get(copy_from)


class TestFailMePlugin(BasePlugin):
Expand Down Expand Up @@ -455,7 +456,9 @@ def get_source(self, environment, path):


class RequestedInfo:
def __init__(self, config, start=None, end=None, args=argparse.Namespace()):
def __init__(
self, config, start=None, end=None, args=argparse.Namespace(), sd=None
):
"""
"config" is input for config_stuff function
"start" and "end" are datetimes needed if config file contains some
Expand All @@ -466,9 +469,9 @@ def __init__(self, config, start=None, end=None, args=argparse.Namespace()):
self.start = start
self.end = end
self.args = args
self.sd = sd

self._index = 0 # which config item are we processing?
self._responses = [] # what responses we have gave so far
self._token = None # OCP token - we will take it from `oc whoami -t` if needed
self.measurement_plugins = (
{}
Expand Down Expand Up @@ -512,7 +515,6 @@ def __next__(self):
f"Failed to measure {self.config[i]['name']}: {e}"
)
output = (None, None)
self._responses.append(output)
return output
else:
raise Exception(f"Unknown config '{self.config[i]}'")
Expand All @@ -530,11 +532,14 @@ def doit(args):
else:
config = args.requested_info_config

sd = status_data.StatusData(tempfile.NamedTemporaryFile().name)

requested_info = RequestedInfo(
config,
args.monitoring_start,
args.monitoring_end,
args=args,
sd=sd,
)

if args.render_config:
Expand Down
12 changes: 6 additions & 6 deletions core/opl/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,13 +217,13 @@ def data_stats(data):
"max": max(data),
"sum": sum(data),
"mean": statistics.mean(data),
"non_zero_mean": statistics.mean(non_zero_data)
if len(non_zero_data) > 0
else 0.0,
"non_zero_mean": (
statistics.mean(non_zero_data) if len(non_zero_data) > 0 else 0.0
),
"median": statistics.median(data),
"non_zero_median": statistics.median(non_zero_data)
if len(non_zero_data) > 0
else 0.0,
"non_zero_median": (
statistics.median(non_zero_data) if len(non_zero_data) > 0 else 0.0
),
"stdev": statistics.stdev(data) if len(data) > 1 else 0.0,
"range": max(data) - min(data),
"percentile25": q25,
Expand Down
14 changes: 14 additions & 0 deletions core/opl/date.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,17 @@ def my_fromisoformat(string):
out = datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%S")
out = out.replace(tzinfo=string_tz)
return out


def get_now_str() -> str:
"""
return current datetime in UTC string format
"""
return datetime.datetime.now(datetime.timezone.utc).isoformat()


def get_now() -> datetime.datetime:
"""
return current datetime in UTC datetime format
"""
return datetime.datetime.now(datetime.timezone.utc)
43 changes: 43 additions & 0 deletions core/opl/retry.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import logging
import time
from functools import wraps


def retry_on_traceback(max_attempts=10, wait_seconds=1):
"""
Retries a function until it succeeds or the maximum number of attempts
or wait time is reached.
This is to mimic `@retry` decorator from Tenacity so we do not depend
on it.
Args:
max_attempts: The maximum number of attempts to retry the function.
wait_seconds: The number of seconds to wait between retries.
Returns:
A decorator that retries the wrapped function.
"""
assert max_attempts >= 0, "It does not make sense to have less than 0 retries"
assert wait_seconds >= 0, "It does not make sense to wait les than 0 seconds"

def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
attempt = 0
while True:
try:
return func(*args, **kwargs)
except Exception as e:
if attempt >= max_attempts:
raise # Reraise the exception after all retries are exhausted

attempt += 1
logging.debug(
f"Retrying in {wait_seconds} seconds. Attempt {attempt}/{max_attempts} failed with: {e}"
)
time.sleep(wait_seconds)

return wrapper

return decorator
19 changes: 15 additions & 4 deletions core/opl/shovel.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,9 @@ def upload(self):
elif self.args.test_end is None:
raise Exception("Test end is required to work with --horreum-upload")
elif self.args.test_job_matcher_label is None:
raise Exception("Test job matcher Horreum label name is required to work with --horreum-upload")
raise Exception(
"Test job matcher Horreum label name is required to work with --horreum-upload"
)

self.logger.debug(f"Loading file {self.args.horreum_data_file}")
with open(self.args.horreum_data_file, "r") as fd:
Expand Down Expand Up @@ -313,7 +315,9 @@ def result(self):
change_detected = True
break

print(f"Writing result to {self.args.horreum_data_file}: {'FAIL' if change_detected else 'PASS'}")
print(
f"Writing result to {self.args.horreum_data_file}: {'FAIL' if change_detected else 'PASS'}"
)
if change_detected:
sd.set("result", "FAIL")
else:
Expand Down Expand Up @@ -351,8 +355,15 @@ def set_args(parser, group_actions):
group.add_argument(
"--test-name-horreum", default="load-tests-result", help="Test Name"
)
group.add_argument("--test-job-matcher", default="jobName", help="Field name in JSON with unique enough value we use to detect if document is already in Horreum")
group.add_argument("--test-job-matcher-label", help="Label name in Horreum with unique enough value we use to detect if document is already in Horreum")
group.add_argument(
"--test-job-matcher",
default="jobName",
help="Field name in JSON with unique enough value we use to detect if document is already in Horreum",
)
group.add_argument(
"--test-job-matcher-label",
help="Label name in Horreum with unique enough value we use to detect if document is already in Horreum",
)
group.add_argument("--test-owner", default="rhtap-perf-test-team")
group.add_argument("--test-access", default="PUBLIC")
group.add_argument("--test-start", help="time when the test started")
Expand Down
11 changes: 8 additions & 3 deletions core/opl/skelet.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ def setup_logger(app_name, stderr_log_lvl):
Create logger that logs to both stderr and log file but with different log levels
"""
# Remove all handlers from root logger if any
logging.basicConfig(level=logging.NOTSET, handlers=[]) # `force=True` was added in Python 3.8 :-(
logging.basicConfig(
level=logging.NOTSET, handlers=[]
) # `force=True` was added in Python 3.8 :-(
# Change root logger level from WARNING (default) to NOTSET in order for all messages to be delegated
logging.getLogger().setLevel(logging.NOTSET)

Expand Down Expand Up @@ -46,6 +48,7 @@ def setup_logger(app_name, stderr_log_lvl):

return logging.getLogger(app_name)


@contextmanager
def test_setup(parser, logger_name="root"):
parser.add_argument(
Expand All @@ -54,12 +57,14 @@ def test_setup(parser, logger_name="root"):
help='File where we maintain metadata, results, parameters and measurements for this test run (also use env variable STATUS_DATA_FILE, default to "/tmp/status-data.json")',
)
parser.add_argument(
"-v", "--verbose",
"-v",
"--verbose",
action="store_true",
help="Show verbose output",
)
parser.add_argument(
"-d", "--debug",
"-d",
"--debug",
action="store_true",
help="Show debug output",
)
Expand Down
15 changes: 6 additions & 9 deletions core/opl/status_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def clear(self):
"""
self._data = {
"name": None,
"started": get_now_str(),
"started": date.get_now_str(),
"ended": None,
"owner": None,
"result": None,
Expand Down Expand Up @@ -263,7 +263,9 @@ def save(self, filename=None):
if self._filename_mtime != current_mtime:
tmp = tempfile.mktemp()
self._save(tmp)
raise Exception(f"Status data file {self._filename} was modified since we loaded it so I do not want to overwrite it. Instead, saved to {tmp}")
raise Exception(
f"Status data file {self._filename} was modified since we loaded it so I do not want to overwrite it. Instead, saved to {tmp}"
)
else:
self._filename = filename

Expand All @@ -278,12 +280,6 @@ def _save(self, filename):
logging.debug(f"Saved status data to {filename}")


def get_now_str():
now = datetime.datetime.utcnow()
now = now.replace(tzinfo=datetime.timezone.utc)
return now.isoformat()


def doit_set(status_data, set_this):
for item in set_this:
if item == "":
Expand All @@ -296,7 +292,7 @@ def doit_set(status_data, set_this):
value = value[1:-1]

if value == "%NOW%":
value = get_now_str()
value = date.get_now_str()
else:
try:
value = int(value)
Expand Down Expand Up @@ -345,6 +341,7 @@ def doit_additional(status_data, additional, monitoring_start, monitoring_end, a
start=monitoring_start,
end=monitoring_end,
args=args,
sd=status_data,
)

counter_ok = 0
Expand Down
Loading

0 comments on commit 367648c

Please sign in to comment.