Skip to content

Commit

Permalink
Merge pull request #2 from LamaAni/create_python_package
Browse files Browse the repository at this point in the history
Added package info
  • Loading branch information
LamaAni authored Feb 20, 2020
2 parents f934f91 + 2f9fce0 commit ed74ffd
Show file tree
Hide file tree
Showing 25 changed files with 66 additions and 92 deletions.
1 change: 1 addition & 0 deletions CHANGES
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
v0.1.0, Thu Feb 20 16:52:38 2020 -- Initial Release.
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
include README.rst
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Airflow KubernetesJobOperator

An airflow job operator that executes a task as a Kubernetes job on a cluster, given
a job yaml configuration or an image uri.
An airflow job operator that executes a task as a Kubernetes job on a cluster, given a job yaml configuration or an image uri.

### Two operators are available:

Expand Down
6 changes: 6 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
airflow_kubernetes_job_operator
===============================

An airflow job operator that executes a task as a Kubernetes job on a cluster, given a job yaml configuration or an image uri.

Please see README.md for more info.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion experimental/core_tester/test_job_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import yaml
from utils import logging, load_raw_formatted_file
from datetime import datetime
from src.job_runner import JobRunner
from airflow_kubernetes_job_operator.job_runner import JobRunner

logging.basicConfig(level="INFO")
CUR_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
import yaml
from utils import logging
from src.watchers.threaded_kubernetes_resource_watchers import (
from airflow_kubernetes_job_operator.watchers.threaded_kubernetes_resource_watchers import (
ThreadedKubernetesNamespaceResourcesWatcher,
)

Expand Down
2 changes: 1 addition & 1 deletion experimental/core_tester/test_threaded_kubernetes_watch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
import yaml
from utils import logging
from src.watchers.threaded_kubernetes_watch import ThreadedKubernetesWatchNamspeace
from airflow_kubernetes_job_operator.watchers.threaded_kubernetes_watch import ThreadedKubernetesWatchNamspeace

logging.basicConfig(level="INFO")
CUR_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import kubernetes
import os
from .utils import logging
from src.watchers.threaded_kubernetes_watch import (
from airflow_kubernetes_job_operator.watchers.threaded_kubernetes_watch import (
ThreadedKubernetesWatchPodLog,
ThreadedKubernetesWatchNamspeace,
)
Expand Down
2 changes: 1 addition & 1 deletion experimental/core_tester/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@
def load_raw_formatted_file(fpath):
text = ""
with open(fpath, "r", encoding="utf-8") as src:
text = src.read()
text = airflow_kubernetes_job_operator.read()
return text

2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[bdist_wheel]
universal=1
36 changes: 36 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
setup.py
~~~~~~~~
An airflow job operator that executes a task as a Kubernetes job on a cluster,
given a job yaml configuration or an image uri.
:copyright: (c) 2020 by zav.
:license: see LICENSE for more details.
"""

import codecs
import os
import re
from setuptools import setup

here = os.path.abspath(os.path.dirname(__file__))

setup(
name="airflow_kubernetes_job_operator",
version="0.1.0",
description="An airflow job operator that executes a task as a Kubernetes job on a cluster, given a job yaml configuration or an image uri.",
long_description="Please see readme.md",
classifiers=[],
author="Zav Shotan",
author_email="",
url="https://github.com/LamaAni/KubernetesJobOperator",
packages=["airflow_kubernetes_job_operator"],
platforms="any",
license="LICENSE",
install_requires=["PyYAML>=5.0", "kubernetes>=9.0.0", "urllib3>=1.25.0"],
python_requires=">=3.6",
)
76 changes: 0 additions & 76 deletions src/utils.py

This file was deleted.

1 change: 0 additions & 1 deletion tests/airflow-webserver.pid

This file was deleted.

2 changes: 1 addition & 1 deletion tests/dags/test_job_operator.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from airflow import DAG
from src.kubernetes_job_operator import KubernetesJobOperator
from airflow_kubernetes_job_operator.kubernetes_job_operator import KubernetesJobOperator
from airflow.utils.dates import days_ago

default_args = {"owner": "tester", "start_date": days_ago(2), "retries": 0}
Expand Down
2 changes: 1 addition & 1 deletion tests/dags/test_legacy_job_operator.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from airflow import DAG
from src.kubernetes_legacy_job_operator import KubernetesLegacyJobOperator
from airflow_kubernetes_job_operator.kubernetes_legacy_job_operator import KubernetesLegacyJobOperator

# from airflow.operators.bash_operator import BashOperator
from airflow.utils.dates import days_ago
Expand Down
18 changes: 12 additions & 6 deletions tests/unittests.cfg
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
[core]
unit_test_mode = True
dags_folder = /c/Code/repos/KubernetesJobOperator/tests/dags
plugins_folder = /c/Code/repos/KubernetesJobOperator/tests/plugins
base_log_folder = /c/Code/repos/KubernetesJobOperator/tests/logs
dags_folder = /mnt/c/code/zav_public/KubernetesJobOperator/tests/dags
plugins_folder = /mnt/c/code/zav_public/KubernetesJobOperator/tests/plugins
base_log_folder = /mnt/c/code/zav_public/KubernetesJobOperator/tests/logs
logging_level = INFO
fab_logging_level = WARN
log_filename_template = {{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log
log_processor_filename_template = {{ filename }}.log
dag_processor_manager_log_location = /c/Code/repos/KubernetesJobOperator/tests/logs/dag_processor_manager/dag_processor_manager.log
dag_processor_manager_log_location = /mnt/c/code/zav_public/KubernetesJobOperator/tests/logs/dag_processor_manager/dag_processor_manager.log
executor = SequentialExecutor
sql_alchemy_conn = sqlite:////c/Code/repos/KubernetesJobOperator/tests/unittests.db
sql_alchemy_conn = sqlite:////mnt/c/code/zav_public/KubernetesJobOperator/tests/unittests.db
load_examples = True
donot_pickle = False
dag_concurrency = 16
dags_are_paused_at_creation = False
fernet_key = iGswVuTlT6qH8ybxuPPiof7RzaHFg5PSf6_moHYXyyU=
fernet_key = huuYOpfjB9zLrQG1Hk2K-HxdwiHDRbXshbAtqrsQSIk=
enable_xcom_pickling = False
killed_task_cleanup_time = 5
secure_mode = False
hostname_callable = socket:getfqdn
worker_precheck = False
default_task_retries = 0

[cli]
api_client = airflow.api.client.local_client
Expand Down Expand Up @@ -94,5 +95,10 @@ host =
log_id_template = {dag_id}-{task_id}-{execution_date}-{try_number}
end_of_log_mark = end_of_log

[elasticsearch_configs]

use_ssl = False
verify_certs = True

[kubernetes]
dags_volume_claim = default

0 comments on commit ed74ffd

Please sign in to comment.